1 import os.path, re, urllib
3 from StringIO import StringIO
4 from twisted.application import service
5 from twisted.trial import unittest
6 from twisted.internet import defer, reactor
7 from twisted.web import client, error, http
8 from twisted.python import failure, log
10 from allmydata import interfaces, uri, webish
11 from allmydata.storage.shares import get_share_file
12 from allmydata.storage_client import StorageFarmBroker
13 from allmydata.immutable import upload, download
14 from allmydata.dirnode import DirectoryNode
15 from allmydata.nodemaker import NodeMaker
16 from allmydata.unknown import UnknownNode
17 from allmydata.web import status, common
18 from allmydata.scripts.debug import CorruptShareOptions, corrupt_share
19 from allmydata.util import fileutil, base32
20 from allmydata.util.consumer import download_to_data
21 from allmydata.test.common import FakeCHKFileNode, FakeMutableFileNode, \
22 create_chk_filenode, WebErrorMixin, ShouldFailMixin, make_mutable_file_uri
23 from allmydata.interfaces import IMutableFileNode
24 from allmydata.mutable import servermap, publish, retrieve
25 import common_util as testutil
26 from allmydata.test.no_network import GridTestMixin
27 from allmydata.test.common_web import HTTPClientGETFactory, \
29 from allmydata.client import Client, SecretHolder
31 # create a fake uploader/downloader, and a couple of fake dirnodes, then
32 # create a webserver that works against them
34 timeout = 480 # Most of these take longer than 240 seconds on Francois's arm box.
36 class FakeStatsProvider:
38 stats = {'stats': {}, 'counters': {}}
41 class FakeNodeMaker(NodeMaker):
42 def _create_lit(self, cap):
43 return FakeCHKFileNode(cap)
44 def _create_immutable(self, cap):
45 return FakeCHKFileNode(cap)
46 def _create_mutable(self, cap):
47 return FakeMutableFileNode(None, None, None, None).init_from_cap(cap)
48 def create_mutable_file(self, contents="", keysize=None):
49 n = FakeMutableFileNode(None, None, None, None)
50 return n.create(contents)
52 class FakeUploader(service.Service):
54 def upload(self, uploadable, history=None):
55 d = uploadable.get_size()
56 d.addCallback(lambda size: uploadable.read(size))
59 n = create_chk_filenode(data)
60 results = upload.UploadResults()
61 results.uri = n.get_uri()
63 d.addCallback(_got_data)
65 def get_helper_info(self):
69 _all_upload_status = [upload.UploadStatus()]
70 _all_download_status = [download.DownloadStatus()]
71 _all_mapupdate_statuses = [servermap.UpdateStatus()]
72 _all_publish_statuses = [publish.PublishStatus()]
73 _all_retrieve_statuses = [retrieve.RetrieveStatus()]
75 def list_all_upload_statuses(self):
76 return self._all_upload_status
77 def list_all_download_statuses(self):
78 return self._all_download_status
79 def list_all_mapupdate_statuses(self):
80 return self._all_mapupdate_statuses
81 def list_all_publish_statuses(self):
82 return self._all_publish_statuses
83 def list_all_retrieve_statuses(self):
84 return self._all_retrieve_statuses
85 def list_all_helper_statuses(self):
88 class FakeClient(Client):
90 # don't upcall to Client.__init__, since we only want to initialize a
92 service.MultiService.__init__(self)
93 self.nodeid = "fake_nodeid"
94 self.nickname = "fake_nickname"
95 self.introducer_furl = "None"
96 self.stats_provider = FakeStatsProvider()
97 self._secret_holder = SecretHolder("lease secret", "convergence secret")
99 self.convergence = "some random string"
100 self.storage_broker = StorageFarmBroker(None, permute_peers=True)
101 self.introducer_client = None
102 self.history = FakeHistory()
103 self.uploader = FakeUploader()
104 self.uploader.setServiceParent(self)
105 self.nodemaker = FakeNodeMaker(None, self._secret_holder, None,
106 self.uploader, None, None,
109 def startService(self):
110 return service.MultiService.startService(self)
111 def stopService(self):
112 return service.MultiService.stopService(self)
114 MUTABLE_SIZELIMIT = FakeMutableFileNode.MUTABLE_SIZELIMIT
116 class WebMixin(object):
118 self.s = FakeClient()
119 self.s.startService()
120 self.staticdir = self.mktemp()
121 self.ws = webish.WebishServer(self.s, "0", staticdir=self.staticdir)
122 self.ws.setServiceParent(self.s)
123 self.webish_port = port = self.ws.listener._port.getHost().port
124 self.webish_url = "http://localhost:%d" % port
126 l = [ self.s.create_dirnode() for x in range(6) ]
127 d = defer.DeferredList(l)
129 self.public_root = res[0][1]
130 assert interfaces.IDirectoryNode.providedBy(self.public_root), res
131 self.public_url = "/uri/" + self.public_root.get_uri()
132 self.private_root = res[1][1]
136 self._foo_uri = foo.get_uri()
137 self._foo_readonly_uri = foo.get_readonly_uri()
138 self._foo_verifycap = foo.get_verify_cap().to_string()
139 # NOTE: we ignore the deferred on all set_uri() calls, because we
140 # know the fake nodes do these synchronously
141 self.public_root.set_uri(u"foo", foo.get_uri(),
142 foo.get_readonly_uri())
144 self.BAR_CONTENTS, n, self._bar_txt_uri = self.makefile(0)
145 foo.set_uri(u"bar.txt", self._bar_txt_uri, self._bar_txt_uri)
146 self._bar_txt_verifycap = n.get_verify_cap().to_string()
148 foo.set_uri(u"empty", res[3][1].get_uri(),
149 res[3][1].get_readonly_uri())
150 sub_uri = res[4][1].get_uri()
151 self._sub_uri = sub_uri
152 foo.set_uri(u"sub", sub_uri, sub_uri)
153 sub = self.s.create_node_from_uri(sub_uri)
155 _ign, n, blocking_uri = self.makefile(1)
156 foo.set_uri(u"blockingfile", blocking_uri, blocking_uri)
158 unicode_filename = u"n\u00fc.txt" # n u-umlaut . t x t
159 # ok, unicode calls it LATIN SMALL LETTER U WITH DIAERESIS but I
160 # still think of it as an umlaut
161 foo.set_uri(unicode_filename, self._bar_txt_uri, self._bar_txt_uri)
163 _ign, n, baz_file = self.makefile(2)
164 self._baz_file_uri = baz_file
165 sub.set_uri(u"baz.txt", baz_file, baz_file)
167 _ign, n, self._bad_file_uri = self.makefile(3)
168 # this uri should not be downloadable
169 del FakeCHKFileNode.all_contents[self._bad_file_uri]
172 self.public_root.set_uri(u"reedownlee", rodir.get_readonly_uri(),
173 rodir.get_readonly_uri())
174 rodir.set_uri(u"nor", baz_file, baz_file)
179 # public/foo/blockingfile
182 # public/foo/sub/baz.txt
184 # public/reedownlee/nor
185 self.NEWFILE_CONTENTS = "newfile contents\n"
187 return foo.get_metadata_for(u"bar.txt")
189 def _got_metadata(metadata):
190 self._bar_txt_metadata = metadata
191 d.addCallback(_got_metadata)
194 def makefile(self, number):
195 contents = "contents of file %s\n" % number
196 n = create_chk_filenode(contents)
197 return contents, n, n.get_uri()
200 return self.s.stopService()
202 def failUnlessIsBarDotTxt(self, res):
203 self.failUnlessEqual(res, self.BAR_CONTENTS, res)
205 def failUnlessIsBarJSON(self, res):
206 data = simplejson.loads(res)
207 self.failUnless(isinstance(data, list))
208 self.failUnlessEqual(data[0], u"filenode")
209 self.failUnless(isinstance(data[1], dict))
210 self.failIf(data[1]["mutable"])
211 self.failIf("rw_uri" in data[1]) # immutable
212 self.failUnlessEqual(data[1]["ro_uri"], self._bar_txt_uri)
213 self.failUnlessEqual(data[1]["verify_uri"], self._bar_txt_verifycap)
214 self.failUnlessEqual(data[1]["size"], len(self.BAR_CONTENTS))
216 def failUnlessIsFooJSON(self, res):
217 data = simplejson.loads(res)
218 self.failUnless(isinstance(data, list))
219 self.failUnlessEqual(data[0], "dirnode", res)
220 self.failUnless(isinstance(data[1], dict))
221 self.failUnless(data[1]["mutable"])
222 self.failUnless("rw_uri" in data[1]) # mutable
223 self.failUnlessEqual(data[1]["rw_uri"], self._foo_uri)
224 self.failUnlessEqual(data[1]["ro_uri"], self._foo_readonly_uri)
225 self.failUnlessEqual(data[1]["verify_uri"], self._foo_verifycap)
227 kidnames = sorted([unicode(n) for n in data[1]["children"]])
228 self.failUnlessEqual(kidnames,
229 [u"bar.txt", u"blockingfile", u"empty",
230 u"n\u00fc.txt", u"sub"])
231 kids = dict( [(unicode(name),value)
233 in data[1]["children"].iteritems()] )
234 self.failUnlessEqual(kids[u"sub"][0], "dirnode")
235 self.failUnless("metadata" in kids[u"sub"][1])
236 self.failUnless("ctime" in kids[u"sub"][1]["metadata"])
237 self.failUnless("mtime" in kids[u"sub"][1]["metadata"])
238 self.failUnlessEqual(kids[u"bar.txt"][0], "filenode")
239 self.failUnlessEqual(kids[u"bar.txt"][1]["size"], len(self.BAR_CONTENTS))
240 self.failUnlessEqual(kids[u"bar.txt"][1]["ro_uri"], self._bar_txt_uri)
241 self.failUnlessEqual(kids[u"bar.txt"][1]["verify_uri"],
242 self._bar_txt_verifycap)
243 self.failUnlessEqual(kids[u"bar.txt"][1]["metadata"]["ctime"],
244 self._bar_txt_metadata["ctime"])
245 self.failUnlessEqual(kids[u"n\u00fc.txt"][1]["ro_uri"],
248 def GET(self, urlpath, followRedirect=False, return_response=False,
250 # if return_response=True, this fires with (data, statuscode,
251 # respheaders) instead of just data.
252 assert not isinstance(urlpath, unicode)
253 url = self.webish_url + urlpath
254 factory = HTTPClientGETFactory(url, method="GET",
255 followRedirect=followRedirect, **kwargs)
256 reactor.connectTCP("localhost", self.webish_port, factory)
259 return (data, factory.status, factory.response_headers)
261 d.addCallback(_got_data)
262 return factory.deferred
264 def HEAD(self, urlpath, return_response=False, **kwargs):
265 # this requires some surgery, because twisted.web.client doesn't want
266 # to give us back the response headers.
267 factory = HTTPClientHEADFactory(urlpath, method="HEAD", **kwargs)
268 reactor.connectTCP("localhost", self.webish_port, factory)
271 return (data, factory.status, factory.response_headers)
273 d.addCallback(_got_data)
274 return factory.deferred
276 def PUT(self, urlpath, data, **kwargs):
277 url = self.webish_url + urlpath
278 return client.getPage(url, method="PUT", postdata=data, **kwargs)
280 def DELETE(self, urlpath):
281 url = self.webish_url + urlpath
282 return client.getPage(url, method="DELETE")
284 def POST(self, urlpath, followRedirect=False, **fields):
285 sepbase = "boogabooga"
289 form.append('Content-Disposition: form-data; name="_charset"')
293 for name, value in fields.iteritems():
294 if isinstance(value, tuple):
295 filename, value = value
296 form.append('Content-Disposition: form-data; name="%s"; '
297 'filename="%s"' % (name, filename.encode("utf-8")))
299 form.append('Content-Disposition: form-data; name="%s"' % name)
301 if isinstance(value, unicode):
302 value = value.encode("utf-8")
305 assert isinstance(value, str)
312 body = "\r\n".join(form) + "\r\n"
313 headers["content-type"] = "multipart/form-data; boundary=%s" % sepbase
314 return self.POST2(urlpath, body, headers, followRedirect)
316 def POST2(self, urlpath, body="", headers={}, followRedirect=False):
317 url = self.webish_url + urlpath
318 return client.getPage(url, method="POST", postdata=body,
319 headers=headers, followRedirect=followRedirect)
321 def shouldFail(self, res, expected_failure, which,
322 substring=None, response_substring=None):
323 if isinstance(res, failure.Failure):
324 res.trap(expected_failure)
326 self.failUnless(substring in str(res),
327 "substring '%s' not in '%s'"
328 % (substring, str(res)))
329 if response_substring:
330 self.failUnless(response_substring in res.value.response,
331 "response substring '%s' not in '%s'"
332 % (response_substring, res.value.response))
334 self.fail("%s was supposed to raise %s, not get '%s'" %
335 (which, expected_failure, res))
337 def shouldFail2(self, expected_failure, which, substring,
339 callable, *args, **kwargs):
340 assert substring is None or isinstance(substring, str)
341 assert response_substring is None or isinstance(response_substring, str)
342 d = defer.maybeDeferred(callable, *args, **kwargs)
344 if isinstance(res, failure.Failure):
345 res.trap(expected_failure)
347 self.failUnless(substring in str(res),
348 "%s: substring '%s' not in '%s'"
349 % (which, substring, str(res)))
350 if response_substring:
351 self.failUnless(response_substring in res.value.response,
352 "%s: response substring '%s' not in '%s'"
354 response_substring, res.value.response))
356 self.fail("%s was supposed to raise %s, not get '%s'" %
357 (which, expected_failure, res))
361 def should404(self, res, which):
362 if isinstance(res, failure.Failure):
363 res.trap(error.Error)
364 self.failUnlessEqual(res.value.status, "404")
366 self.fail("%s was supposed to Error(404), not get '%s'" %
370 class Web(WebMixin, WebErrorMixin, testutil.StallMixin, unittest.TestCase):
371 def test_create(self):
374 def test_welcome(self):
377 self.failUnless('Welcome To Tahoe-LAFS' in res, res)
379 self.s.basedir = 'web/test_welcome'
380 fileutil.make_dirs("web/test_welcome")
381 fileutil.make_dirs("web/test_welcome/private")
383 d.addCallback(_check)
386 def test_provisioning(self):
387 d = self.GET("/provisioning/")
389 self.failUnless('Tahoe Provisioning Tool' in res)
390 fields = {'filled': True,
391 "num_users": int(50e3),
392 "files_per_user": 1000,
393 "space_per_user": int(1e9),
394 "sharing_ratio": 1.0,
395 "encoding_parameters": "3-of-10-5",
397 "ownership_mode": "A",
398 "download_rate": 100,
403 return self.POST("/provisioning/", **fields)
405 d.addCallback(_check)
407 self.failUnless('Tahoe Provisioning Tool' in res)
408 self.failUnless("Share space consumed: 167.01TB" in res)
410 fields = {'filled': True,
411 "num_users": int(50e6),
412 "files_per_user": 1000,
413 "space_per_user": int(5e9),
414 "sharing_ratio": 1.0,
415 "encoding_parameters": "25-of-100-50",
416 "num_servers": 30000,
417 "ownership_mode": "E",
418 "drive_failure_model": "U",
420 "download_rate": 1000,
425 return self.POST("/provisioning/", **fields)
426 d.addCallback(_check2)
428 self.failUnless("Share space consumed: huge!" in res)
429 fields = {'filled': True}
430 return self.POST("/provisioning/", **fields)
431 d.addCallback(_check3)
433 self.failUnless("Share space consumed:" in res)
434 d.addCallback(_check4)
437 def test_reliability_tool(self):
439 from allmydata import reliability
440 _hush_pyflakes = reliability
442 raise unittest.SkipTest("reliability tool requires NumPy")
444 d = self.GET("/reliability/")
446 self.failUnless('Tahoe Reliability Tool' in res)
447 fields = {'drive_lifetime': "8Y",
452 "check_period": "1M",
453 "report_period": "3M",
456 return self.POST("/reliability/", **fields)
458 d.addCallback(_check)
460 self.failUnless('Tahoe Reliability Tool' in res)
461 r = r'Probability of loss \(no maintenance\):\s+<span>0.033591'
462 self.failUnless(re.search(r, res), res)
463 d.addCallback(_check2)
466 def test_status(self):
467 h = self.s.get_history()
468 dl_num = h.list_all_download_statuses()[0].get_counter()
469 ul_num = h.list_all_upload_statuses()[0].get_counter()
470 mu_num = h.list_all_mapupdate_statuses()[0].get_counter()
471 pub_num = h.list_all_publish_statuses()[0].get_counter()
472 ret_num = h.list_all_retrieve_statuses()[0].get_counter()
473 d = self.GET("/status", followRedirect=True)
475 self.failUnless('Upload and Download Status' in res, res)
476 self.failUnless('"down-%d"' % dl_num in res, res)
477 self.failUnless('"up-%d"' % ul_num in res, res)
478 self.failUnless('"mapupdate-%d"' % mu_num in res, res)
479 self.failUnless('"publish-%d"' % pub_num in res, res)
480 self.failUnless('"retrieve-%d"' % ret_num in res, res)
481 d.addCallback(_check)
482 d.addCallback(lambda res: self.GET("/status/?t=json"))
483 def _check_json(res):
484 data = simplejson.loads(res)
485 self.failUnless(isinstance(data, dict))
486 active = data["active"]
487 # TODO: test more. We need a way to fake an active operation
489 d.addCallback(_check_json)
491 d.addCallback(lambda res: self.GET("/status/down-%d" % dl_num))
493 self.failUnless("File Download Status" in res, res)
494 d.addCallback(_check_dl)
495 d.addCallback(lambda res: self.GET("/status/up-%d" % ul_num))
497 self.failUnless("File Upload Status" in res, res)
498 d.addCallback(_check_ul)
499 d.addCallback(lambda res: self.GET("/status/mapupdate-%d" % mu_num))
500 def _check_mapupdate(res):
501 self.failUnless("Mutable File Servermap Update Status" in res, res)
502 d.addCallback(_check_mapupdate)
503 d.addCallback(lambda res: self.GET("/status/publish-%d" % pub_num))
504 def _check_publish(res):
505 self.failUnless("Mutable File Publish Status" in res, res)
506 d.addCallback(_check_publish)
507 d.addCallback(lambda res: self.GET("/status/retrieve-%d" % ret_num))
508 def _check_retrieve(res):
509 self.failUnless("Mutable File Retrieve Status" in res, res)
510 d.addCallback(_check_retrieve)
514 def test_status_numbers(self):
515 drrm = status.DownloadResultsRendererMixin()
516 self.failUnlessEqual(drrm.render_time(None, None), "")
517 self.failUnlessEqual(drrm.render_time(None, 2.5), "2.50s")
518 self.failUnlessEqual(drrm.render_time(None, 0.25), "250ms")
519 self.failUnlessEqual(drrm.render_time(None, 0.0021), "2.1ms")
520 self.failUnlessEqual(drrm.render_time(None, 0.000123), "123us")
521 self.failUnlessEqual(drrm.render_rate(None, None), "")
522 self.failUnlessEqual(drrm.render_rate(None, 2500000), "2.50MBps")
523 self.failUnlessEqual(drrm.render_rate(None, 30100), "30.1kBps")
524 self.failUnlessEqual(drrm.render_rate(None, 123), "123Bps")
526 urrm = status.UploadResultsRendererMixin()
527 self.failUnlessEqual(urrm.render_time(None, None), "")
528 self.failUnlessEqual(urrm.render_time(None, 2.5), "2.50s")
529 self.failUnlessEqual(urrm.render_time(None, 0.25), "250ms")
530 self.failUnlessEqual(urrm.render_time(None, 0.0021), "2.1ms")
531 self.failUnlessEqual(urrm.render_time(None, 0.000123), "123us")
532 self.failUnlessEqual(urrm.render_rate(None, None), "")
533 self.failUnlessEqual(urrm.render_rate(None, 2500000), "2.50MBps")
534 self.failUnlessEqual(urrm.render_rate(None, 30100), "30.1kBps")
535 self.failUnlessEqual(urrm.render_rate(None, 123), "123Bps")
537 def test_GET_FILEURL(self):
538 d = self.GET(self.public_url + "/foo/bar.txt")
539 d.addCallback(self.failUnlessIsBarDotTxt)
542 def test_GET_FILEURL_range(self):
543 headers = {"range": "bytes=1-10"}
544 d = self.GET(self.public_url + "/foo/bar.txt", headers=headers,
545 return_response=True)
546 def _got((res, status, headers)):
547 self.failUnlessEqual(int(status), 206)
548 self.failUnless(headers.has_key("content-range"))
549 self.failUnlessEqual(headers["content-range"][0],
550 "bytes 1-10/%d" % len(self.BAR_CONTENTS))
551 self.failUnlessEqual(res, self.BAR_CONTENTS[1:11])
555 def test_GET_FILEURL_partial_range(self):
556 headers = {"range": "bytes=5-"}
557 length = len(self.BAR_CONTENTS)
558 d = self.GET(self.public_url + "/foo/bar.txt", headers=headers,
559 return_response=True)
560 def _got((res, status, headers)):
561 self.failUnlessEqual(int(status), 206)
562 self.failUnless(headers.has_key("content-range"))
563 self.failUnlessEqual(headers["content-range"][0],
564 "bytes 5-%d/%d" % (length-1, length))
565 self.failUnlessEqual(res, self.BAR_CONTENTS[5:])
569 def test_HEAD_FILEURL_range(self):
570 headers = {"range": "bytes=1-10"}
571 d = self.HEAD(self.public_url + "/foo/bar.txt", headers=headers,
572 return_response=True)
573 def _got((res, status, headers)):
574 self.failUnlessEqual(res, "")
575 self.failUnlessEqual(int(status), 206)
576 self.failUnless(headers.has_key("content-range"))
577 self.failUnlessEqual(headers["content-range"][0],
578 "bytes 1-10/%d" % len(self.BAR_CONTENTS))
582 def test_HEAD_FILEURL_partial_range(self):
583 headers = {"range": "bytes=5-"}
584 length = len(self.BAR_CONTENTS)
585 d = self.HEAD(self.public_url + "/foo/bar.txt", headers=headers,
586 return_response=True)
587 def _got((res, status, headers)):
588 self.failUnlessEqual(int(status), 206)
589 self.failUnless(headers.has_key("content-range"))
590 self.failUnlessEqual(headers["content-range"][0],
591 "bytes 5-%d/%d" % (length-1, length))
595 def test_GET_FILEURL_range_bad(self):
596 headers = {"range": "BOGUS=fizbop-quarnak"}
597 d = self.shouldFail2(error.Error, "test_GET_FILEURL_range_bad",
599 "Syntactically invalid http range header",
600 self.GET, self.public_url + "/foo/bar.txt",
604 def test_HEAD_FILEURL(self):
605 d = self.HEAD(self.public_url + "/foo/bar.txt", return_response=True)
606 def _got((res, status, headers)):
607 self.failUnlessEqual(res, "")
608 self.failUnlessEqual(headers["content-length"][0],
609 str(len(self.BAR_CONTENTS)))
610 self.failUnlessEqual(headers["content-type"], ["text/plain"])
614 def test_GET_FILEURL_named(self):
615 base = "/file/%s" % urllib.quote(self._bar_txt_uri)
616 base2 = "/named/%s" % urllib.quote(self._bar_txt_uri)
617 d = self.GET(base + "/@@name=/blah.txt")
618 d.addCallback(self.failUnlessIsBarDotTxt)
619 d.addCallback(lambda res: self.GET(base + "/blah.txt"))
620 d.addCallback(self.failUnlessIsBarDotTxt)
621 d.addCallback(lambda res: self.GET(base + "/ignore/lots/blah.txt"))
622 d.addCallback(self.failUnlessIsBarDotTxt)
623 d.addCallback(lambda res: self.GET(base2 + "/@@name=/blah.txt"))
624 d.addCallback(self.failUnlessIsBarDotTxt)
625 save_url = base + "?save=true&filename=blah.txt"
626 d.addCallback(lambda res: self.GET(save_url))
627 d.addCallback(self.failUnlessIsBarDotTxt) # TODO: check headers
628 u_filename = u"n\u00e9wer.txt" # n e-acute w e r . t x t
629 u_fn_e = urllib.quote(u_filename.encode("utf-8"))
630 u_url = base + "?save=true&filename=" + u_fn_e
631 d.addCallback(lambda res: self.GET(u_url))
632 d.addCallback(self.failUnlessIsBarDotTxt) # TODO: check headers
635 def test_PUT_FILEURL_named_bad(self):
636 base = "/file/%s" % urllib.quote(self._bar_txt_uri)
637 d = self.shouldFail2(error.Error, "test_PUT_FILEURL_named_bad",
639 "/file can only be used with GET or HEAD",
640 self.PUT, base + "/@@name=/blah.txt", "")
643 def test_GET_DIRURL_named_bad(self):
644 base = "/file/%s" % urllib.quote(self._foo_uri)
645 d = self.shouldFail2(error.Error, "test_PUT_DIRURL_named_bad",
648 self.GET, base + "/@@name=/blah.txt")
651 def test_GET_slash_file_bad(self):
652 d = self.shouldFail2(error.Error, "test_GET_slash_file_bad",
654 "/file must be followed by a file-cap and a name",
658 def test_GET_unhandled_URI_named(self):
659 contents, n, newuri = self.makefile(12)
660 verifier_cap = n.get_verify_cap().to_string()
661 base = "/file/%s" % urllib.quote(verifier_cap)
662 # client.create_node_from_uri() can't handle verify-caps
663 d = self.shouldFail2(error.Error, "GET_unhandled_URI_named",
664 "400 Bad Request", "is not a file-cap",
668 def test_GET_unhandled_URI(self):
669 contents, n, newuri = self.makefile(12)
670 verifier_cap = n.get_verify_cap().to_string()
671 base = "/uri/%s" % urllib.quote(verifier_cap)
672 # client.create_node_from_uri() can't handle verify-caps
673 d = self.shouldFail2(error.Error, "test_GET_unhandled_URI",
675 "GET unknown URI type: can only do t=info",
679 def test_GET_FILE_URI(self):
680 base = "/uri/%s" % urllib.quote(self._bar_txt_uri)
682 d.addCallback(self.failUnlessIsBarDotTxt)
685 def test_GET_FILE_URI_badchild(self):
686 base = "/uri/%s/boguschild" % urllib.quote(self._bar_txt_uri)
687 errmsg = "Files have no children, certainly not named 'boguschild'"
688 d = self.shouldFail2(error.Error, "test_GET_FILE_URI_badchild",
689 "400 Bad Request", errmsg,
693 def test_PUT_FILE_URI_badchild(self):
694 base = "/uri/%s/boguschild" % urllib.quote(self._bar_txt_uri)
695 errmsg = "Cannot create directory 'boguschild', because its parent is a file, not a directory"
696 d = self.shouldFail2(error.Error, "test_GET_FILE_URI_badchild",
697 "400 Bad Request", errmsg,
701 def test_GET_FILEURL_save(self):
702 d = self.GET(self.public_url + "/foo/bar.txt?filename=bar.txt&save=true")
703 # TODO: look at the headers, expect a Content-Disposition: attachment
705 d.addCallback(self.failUnlessIsBarDotTxt)
708 def test_GET_FILEURL_missing(self):
709 d = self.GET(self.public_url + "/foo/missing")
710 d.addBoth(self.should404, "test_GET_FILEURL_missing")
713 def test_PUT_overwrite_only_files(self):
714 # create a directory, put a file in that directory.
715 contents, n, filecap = self.makefile(8)
716 d = self.PUT(self.public_url + "/foo/dir?t=mkdir", "")
717 d.addCallback(lambda res:
718 self.PUT(self.public_url + "/foo/dir/file1.txt",
719 self.NEWFILE_CONTENTS))
720 # try to overwrite the file with replace=only-files
722 d.addCallback(lambda res:
723 self.PUT(self.public_url + "/foo/dir/file1.txt?t=uri&replace=only-files",
725 d.addCallback(lambda res:
726 self.shouldFail2(error.Error, "PUT_bad_t", "409 Conflict",
727 "There was already a child by that name, and you asked me "
729 self.PUT, self.public_url + "/foo/dir?t=uri&replace=only-files",
733 def test_PUT_NEWFILEURL(self):
734 d = self.PUT(self.public_url + "/foo/new.txt", self.NEWFILE_CONTENTS)
735 # TODO: we lose the response code, so we can't check this
736 #self.failUnlessEqual(responsecode, 201)
737 d.addCallback(self.failUnlessURIMatchesChild, self._foo_node, u"new.txt")
738 d.addCallback(lambda res:
739 self.failUnlessChildContentsAre(self._foo_node, u"new.txt",
740 self.NEWFILE_CONTENTS))
743 def test_PUT_NEWFILEURL_not_mutable(self):
744 d = self.PUT(self.public_url + "/foo/new.txt?mutable=false",
745 self.NEWFILE_CONTENTS)
746 # TODO: we lose the response code, so we can't check this
747 #self.failUnlessEqual(responsecode, 201)
748 d.addCallback(self.failUnlessURIMatchesChild, self._foo_node, u"new.txt")
749 d.addCallback(lambda res:
750 self.failUnlessChildContentsAre(self._foo_node, u"new.txt",
751 self.NEWFILE_CONTENTS))
754 def test_PUT_NEWFILEURL_range_bad(self):
755 headers = {"content-range": "bytes 1-10/%d" % len(self.NEWFILE_CONTENTS)}
756 target = self.public_url + "/foo/new.txt"
757 d = self.shouldFail2(error.Error, "test_PUT_NEWFILEURL_range_bad",
758 "501 Not Implemented",
759 "Content-Range in PUT not yet supported",
760 # (and certainly not for immutable files)
761 self.PUT, target, self.NEWFILE_CONTENTS[1:11],
763 d.addCallback(lambda res:
764 self.failIfNodeHasChild(self._foo_node, u"new.txt"))
767 def test_PUT_NEWFILEURL_mutable(self):
768 d = self.PUT(self.public_url + "/foo/new.txt?mutable=true",
769 self.NEWFILE_CONTENTS)
770 # TODO: we lose the response code, so we can't check this
771 #self.failUnlessEqual(responsecode, 201)
773 u = uri.from_string_mutable_filenode(res)
774 self.failUnless(u.is_mutable())
775 self.failIf(u.is_readonly())
777 d.addCallback(_check_uri)
778 d.addCallback(self.failUnlessURIMatchesChild, self._foo_node, u"new.txt")
779 d.addCallback(lambda res:
780 self.failUnlessMutableChildContentsAre(self._foo_node,
782 self.NEWFILE_CONTENTS))
785 def test_PUT_NEWFILEURL_mutable_toobig(self):
786 d = self.shouldFail2(error.Error, "test_PUT_NEWFILEURL_mutable_toobig",
787 "413 Request Entity Too Large",
788 "SDMF is limited to one segment, and 10001 > 10000",
790 self.public_url + "/foo/new.txt?mutable=true",
791 "b" * (self.s.MUTABLE_SIZELIMIT+1))
794 def test_PUT_NEWFILEURL_replace(self):
795 d = self.PUT(self.public_url + "/foo/bar.txt", self.NEWFILE_CONTENTS)
796 # TODO: we lose the response code, so we can't check this
797 #self.failUnlessEqual(responsecode, 200)
798 d.addCallback(self.failUnlessURIMatchesChild, self._foo_node, u"bar.txt")
799 d.addCallback(lambda res:
800 self.failUnlessChildContentsAre(self._foo_node, u"bar.txt",
801 self.NEWFILE_CONTENTS))
804 def test_PUT_NEWFILEURL_bad_t(self):
805 d = self.shouldFail2(error.Error, "PUT_bad_t", "400 Bad Request",
806 "PUT to a file: bad t=bogus",
807 self.PUT, self.public_url + "/foo/bar.txt?t=bogus",
811 def test_PUT_NEWFILEURL_no_replace(self):
812 d = self.PUT(self.public_url + "/foo/bar.txt?replace=false",
813 self.NEWFILE_CONTENTS)
814 d.addBoth(self.shouldFail, error.Error, "PUT_NEWFILEURL_no_replace",
816 "There was already a child by that name, and you asked me "
820 def test_PUT_NEWFILEURL_mkdirs(self):
821 d = self.PUT(self.public_url + "/foo/newdir/new.txt", self.NEWFILE_CONTENTS)
823 d.addCallback(self.failUnlessURIMatchesChild, fn, u"newdir/new.txt")
824 d.addCallback(lambda res: self.failIfNodeHasChild(fn, u"new.txt"))
825 d.addCallback(lambda res: self.failUnlessNodeHasChild(fn, u"newdir"))
826 d.addCallback(lambda res:
827 self.failUnlessChildContentsAre(fn, u"newdir/new.txt",
828 self.NEWFILE_CONTENTS))
831 def test_PUT_NEWFILEURL_blocked(self):
832 d = self.PUT(self.public_url + "/foo/blockingfile/new.txt",
833 self.NEWFILE_CONTENTS)
834 d.addBoth(self.shouldFail, error.Error, "PUT_NEWFILEURL_blocked",
836 "Unable to create directory 'blockingfile': a file was in the way")
839 def test_PUT_NEWFILEURL_emptyname(self):
840 # an empty pathname component (i.e. a double-slash) is disallowed
841 d = self.shouldFail2(error.Error, "test_PUT_NEWFILEURL_emptyname",
843 "The webapi does not allow empty pathname components",
844 self.PUT, self.public_url + "/foo//new.txt", "")
847 def test_DELETE_FILEURL(self):
848 d = self.DELETE(self.public_url + "/foo/bar.txt")
849 d.addCallback(lambda res:
850 self.failIfNodeHasChild(self._foo_node, u"bar.txt"))
853 def test_DELETE_FILEURL_missing(self):
854 d = self.DELETE(self.public_url + "/foo/missing")
855 d.addBoth(self.should404, "test_DELETE_FILEURL_missing")
858 def test_DELETE_FILEURL_missing2(self):
859 d = self.DELETE(self.public_url + "/missing/missing")
860 d.addBoth(self.should404, "test_DELETE_FILEURL_missing2")
863 def failUnlessHasBarDotTxtMetadata(self, res):
864 data = simplejson.loads(res)
865 self.failUnless(isinstance(data, list))
866 self.failUnless(data[1].has_key("metadata"))
867 self.failUnless(data[1]["metadata"].has_key("ctime"))
868 self.failUnless(data[1]["metadata"].has_key("mtime"))
869 self.failUnlessEqual(data[1]["metadata"]["ctime"],
870 self._bar_txt_metadata["ctime"])
872 def test_GET_FILEURL_json(self):
873 # twisted.web.http.parse_qs ignores any query args without an '=', so
874 # I can't do "GET /path?json", I have to do "GET /path/t=json"
875 # instead. This may make it tricky to emulate the S3 interface
877 d = self.GET(self.public_url + "/foo/bar.txt?t=json")
879 self.failUnlessIsBarJSON(data)
880 self.failUnlessHasBarDotTxtMetadata(data)
882 d.addCallback(_check1)
885 def test_GET_FILEURL_json_missing(self):
886 d = self.GET(self.public_url + "/foo/missing?json")
887 d.addBoth(self.should404, "test_GET_FILEURL_json_missing")
890 def test_GET_FILEURL_uri(self):
891 d = self.GET(self.public_url + "/foo/bar.txt?t=uri")
893 self.failUnlessEqual(res, self._bar_txt_uri)
894 d.addCallback(_check)
895 d.addCallback(lambda res:
896 self.GET(self.public_url + "/foo/bar.txt?t=readonly-uri"))
898 # for now, for files, uris and readonly-uris are the same
899 self.failUnlessEqual(res, self._bar_txt_uri)
900 d.addCallback(_check2)
903 def test_GET_FILEURL_badtype(self):
904 d = self.shouldHTTPError("GET t=bogus", 400, "Bad Request",
907 self.public_url + "/foo/bar.txt?t=bogus")
910 def test_GET_FILEURL_uri_missing(self):
911 d = self.GET(self.public_url + "/foo/missing?t=uri")
912 d.addBoth(self.should404, "test_GET_FILEURL_uri_missing")
915 def test_GET_DIRURL(self):
916 # the addSlash means we get a redirect here
917 # from /uri/$URI/foo/ , we need ../../../ to get back to the root
919 d = self.GET(self.public_url + "/foo", followRedirect=True)
921 self.failUnless(('<a href="%s">Return to Welcome page' % ROOT)
923 # the FILE reference points to a URI, but it should end in bar.txt
924 bar_url = ("%s/file/%s/@@named=/bar.txt" %
925 (ROOT, urllib.quote(self._bar_txt_uri)))
926 get_bar = "".join([r'<td>FILE</td>',
928 r'<a href="%s">bar.txt</a>' % bar_url,
930 r'\s+<td>%d</td>' % len(self.BAR_CONTENTS),
932 self.failUnless(re.search(get_bar, res), res)
933 for line in res.split("\n"):
934 # find the line that contains the delete button for bar.txt
935 if ("form action" in line and
936 'value="delete"' in line and
937 'value="bar.txt"' in line):
938 # the form target should use a relative URL
939 foo_url = urllib.quote("%s/uri/%s/" % (ROOT, self._foo_uri))
940 self.failUnless(('action="%s"' % foo_url) in line, line)
941 # and the when_done= should too
942 #done_url = urllib.quote(???)
943 #self.failUnless(('name="when_done" value="%s"' % done_url)
947 self.fail("unable to find delete-bar.txt line", res)
949 # the DIR reference just points to a URI
950 sub_url = ("%s/uri/%s/" % (ROOT, urllib.quote(self._sub_uri)))
951 get_sub = ((r'<td>DIR</td>')
952 +r'\s+<td><a href="%s">sub</a></td>' % sub_url)
953 self.failUnless(re.search(get_sub, res), res)
954 d.addCallback(_check)
956 # look at a directory which is readonly
957 d.addCallback(lambda res:
958 self.GET(self.public_url + "/reedownlee", followRedirect=True))
960 self.failUnless("(read-only)" in res, res)
961 self.failIf("Upload a file" in res, res)
962 d.addCallback(_check2)
964 # and at a directory that contains a readonly directory
965 d.addCallback(lambda res:
966 self.GET(self.public_url, followRedirect=True))
968 self.failUnless(re.search('<td>DIR-RO</td>'
969 r'\s+<td><a href="[\.\/]+/uri/URI%3ADIR2-RO%3A[^"]+">reedownlee</a></td>', res), res)
970 d.addCallback(_check3)
972 # and an empty directory
973 d.addCallback(lambda res: self.GET(self.public_url + "/foo/empty/"))
975 self.failUnless("directory is empty" in res, res)
976 MKDIR_BUTTON_RE=re.compile('<input type="hidden" name="t" value="mkdir" />.*<legend class="freeform-form-label">Create a new directory in this directory</legend>.*<input type="submit" value="Create" />', re.I)
977 self.failUnless(MKDIR_BUTTON_RE.search(res), res)
978 d.addCallback(_check4)
982 def test_GET_DIRURL_badtype(self):
983 d = self.shouldHTTPError("test_GET_DIRURL_badtype",
987 self.public_url + "/foo?t=bogus")
990 def test_GET_DIRURL_json(self):
991 d = self.GET(self.public_url + "/foo?t=json")
992 d.addCallback(self.failUnlessIsFooJSON)
996 def test_POST_DIRURL_manifest_no_ophandle(self):
997 d = self.shouldFail2(error.Error,
998 "test_POST_DIRURL_manifest_no_ophandle",
1000 "slow operation requires ophandle=",
1001 self.POST, self.public_url, t="start-manifest")
1004 def test_POST_DIRURL_manifest(self):
1005 d = defer.succeed(None)
1006 def getman(ignored, output):
1007 d = self.POST(self.public_url + "/foo/?t=start-manifest&ophandle=125",
1008 followRedirect=True)
1009 d.addCallback(self.wait_for_operation, "125")
1010 d.addCallback(self.get_operation_results, "125", output)
1012 d.addCallback(getman, None)
1013 def _got_html(manifest):
1014 self.failUnless("Manifest of SI=" in manifest)
1015 self.failUnless("<td>sub</td>" in manifest)
1016 self.failUnless(self._sub_uri in manifest)
1017 self.failUnless("<td>sub/baz.txt</td>" in manifest)
1018 d.addCallback(_got_html)
1020 # both t=status and unadorned GET should be identical
1021 d.addCallback(lambda res: self.GET("/operations/125"))
1022 d.addCallback(_got_html)
1024 d.addCallback(getman, "html")
1025 d.addCallback(_got_html)
1026 d.addCallback(getman, "text")
1027 def _got_text(manifest):
1028 self.failUnless("\nsub " + self._sub_uri + "\n" in manifest)
1029 self.failUnless("\nsub/baz.txt URI:CHK:" in manifest)
1030 d.addCallback(_got_text)
1031 d.addCallback(getman, "JSON")
1033 data = res["manifest"]
1035 for (path_list, cap) in data:
1036 got[tuple(path_list)] = cap
1037 self.failUnlessEqual(got[(u"sub",)], self._sub_uri)
1038 self.failUnless((u"sub",u"baz.txt") in got)
1039 self.failUnless("finished" in res)
1040 self.failUnless("origin" in res)
1041 self.failUnless("storage-index" in res)
1042 self.failUnless("verifycaps" in res)
1043 self.failUnless("stats" in res)
1044 d.addCallback(_got_json)
1047 def test_POST_DIRURL_deepsize_no_ophandle(self):
1048 d = self.shouldFail2(error.Error,
1049 "test_POST_DIRURL_deepsize_no_ophandle",
1051 "slow operation requires ophandle=",
1052 self.POST, self.public_url, t="start-deep-size")
1055 def test_POST_DIRURL_deepsize(self):
1056 d = self.POST(self.public_url + "/foo/?t=start-deep-size&ophandle=126",
1057 followRedirect=True)
1058 d.addCallback(self.wait_for_operation, "126")
1059 d.addCallback(self.get_operation_results, "126", "json")
1060 def _got_json(data):
1061 self.failUnlessEqual(data["finished"], True)
1063 self.failUnless(size > 1000)
1064 d.addCallback(_got_json)
1065 d.addCallback(self.get_operation_results, "126", "text")
1067 mo = re.search(r'^size: (\d+)$', res, re.M)
1068 self.failUnless(mo, res)
1069 size = int(mo.group(1))
1070 # with directories, the size varies.
1071 self.failUnless(size > 1000)
1072 d.addCallback(_got_text)
1075 def test_POST_DIRURL_deepstats_no_ophandle(self):
1076 d = self.shouldFail2(error.Error,
1077 "test_POST_DIRURL_deepstats_no_ophandle",
1079 "slow operation requires ophandle=",
1080 self.POST, self.public_url, t="start-deep-stats")
1083 def test_POST_DIRURL_deepstats(self):
1084 d = self.POST(self.public_url + "/foo/?t=start-deep-stats&ophandle=127",
1085 followRedirect=True)
1086 d.addCallback(self.wait_for_operation, "127")
1087 d.addCallback(self.get_operation_results, "127", "json")
1088 def _got_json(stats):
1089 expected = {"count-immutable-files": 3,
1090 "count-mutable-files": 0,
1091 "count-literal-files": 0,
1093 "count-directories": 3,
1094 "size-immutable-files": 57,
1095 "size-literal-files": 0,
1096 #"size-directories": 1912, # varies
1097 #"largest-directory": 1590,
1098 "largest-directory-children": 5,
1099 "largest-immutable-file": 19,
1101 for k,v in expected.iteritems():
1102 self.failUnlessEqual(stats[k], v,
1103 "stats[%s] was %s, not %s" %
1105 self.failUnlessEqual(stats["size-files-histogram"],
1107 d.addCallback(_got_json)
1110 def test_POST_DIRURL_stream_manifest(self):
1111 d = self.POST(self.public_url + "/foo/?t=stream-manifest")
1113 self.failUnless(res.endswith("\n"))
1114 units = [simplejson.loads(t) for t in res[:-1].split("\n")]
1115 self.failUnlessEqual(len(units), 7)
1116 self.failUnlessEqual(units[-1]["type"], "stats")
1118 self.failUnlessEqual(first["path"], [])
1119 self.failUnlessEqual(first["cap"], self._foo_uri)
1120 self.failUnlessEqual(first["type"], "directory")
1121 baz = [u for u in units[:-1] if u["cap"] == self._baz_file_uri][0]
1122 self.failUnlessEqual(baz["path"], ["sub", "baz.txt"])
1123 self.failIfEqual(baz["storage-index"], None)
1124 self.failIfEqual(baz["verifycap"], None)
1125 self.failIfEqual(baz["repaircap"], None)
1127 d.addCallback(_check)
1130 def test_GET_DIRURL_uri(self):
1131 d = self.GET(self.public_url + "/foo?t=uri")
1133 self.failUnlessEqual(res, self._foo_uri)
1134 d.addCallback(_check)
1137 def test_GET_DIRURL_readonly_uri(self):
1138 d = self.GET(self.public_url + "/foo?t=readonly-uri")
1140 self.failUnlessEqual(res, self._foo_readonly_uri)
1141 d.addCallback(_check)
1144 def test_PUT_NEWDIRURL(self):
1145 d = self.PUT(self.public_url + "/foo/newdir?t=mkdir", "")
1146 d.addCallback(lambda res:
1147 self.failUnlessNodeHasChild(self._foo_node, u"newdir"))
1148 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
1149 d.addCallback(self.failUnlessNodeKeysAre, [])
1152 def test_POST_NEWDIRURL(self):
1153 d = self.POST2(self.public_url + "/foo/newdir?t=mkdir", "")
1154 d.addCallback(lambda res:
1155 self.failUnlessNodeHasChild(self._foo_node, u"newdir"))
1156 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
1157 d.addCallback(self.failUnlessNodeKeysAre, [])
1160 def test_POST_NEWDIRURL_emptyname(self):
1161 # an empty pathname component (i.e. a double-slash) is disallowed
1162 d = self.shouldFail2(error.Error, "test_POST_NEWDIRURL_emptyname",
1164 "The webapi does not allow empty pathname components, i.e. a double slash",
1165 self.POST, self.public_url + "//?t=mkdir")
1168 def test_POST_NEWDIRURL_initial_children(self):
1169 (newkids, filecap1, filecap2, filecap3,
1170 dircap) = self._create_initial_children()
1171 d = self.POST2(self.public_url + "/foo/newdir?t=mkdir-with-children",
1172 simplejson.dumps(newkids))
1174 n = self.s.create_node_from_uri(uri.strip())
1175 d2 = self.failUnlessNodeKeysAre(n, newkids.keys())
1176 d2.addCallback(lambda ign:
1177 self.failUnlessChildURIIs(n, u"child-imm", filecap1))
1178 d2.addCallback(lambda ign:
1179 self.failUnlessChildURIIs(n, u"child-mutable",
1181 d2.addCallback(lambda ign:
1182 self.failUnlessChildURIIs(n, u"child-mutable-ro",
1184 d2.addCallback(lambda ign:
1185 self.failUnlessChildURIIs(n, u"dirchild", dircap))
1187 d.addCallback(_check)
1188 d.addCallback(lambda res:
1189 self.failUnlessNodeHasChild(self._foo_node, u"newdir"))
1190 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
1191 d.addCallback(self.failUnlessNodeKeysAre, newkids.keys())
1192 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
1193 d.addCallback(self.failUnlessChildURIIs, u"child-imm", filecap1)
1196 def test_POST_NEWDIRURL_immutable(self):
1197 (newkids, filecap1, immdircap) = self._create_immutable_children()
1198 d = self.POST2(self.public_url + "/foo/newdir?t=mkdir-immutable",
1199 simplejson.dumps(newkids))
1201 n = self.s.create_node_from_uri(uri.strip())
1202 d2 = self.failUnlessNodeKeysAre(n, newkids.keys())
1203 d2.addCallback(lambda ign:
1204 self.failUnlessChildURIIs(n, u"child-imm", filecap1))
1205 d2.addCallback(lambda ign:
1206 self.failUnlessChildURIIs(n, u"dirchild-imm",
1209 d.addCallback(_check)
1210 d.addCallback(lambda res:
1211 self.failUnlessNodeHasChild(self._foo_node, u"newdir"))
1212 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
1213 d.addCallback(self.failUnlessNodeKeysAre, newkids.keys())
1214 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
1215 d.addCallback(self.failUnlessChildURIIs, u"child-imm", filecap1)
1216 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
1217 d.addCallback(self.failUnlessChildURIIs, u"dirchild-imm", immdircap)
1218 d.addErrback(self.explain_web_error)
1221 def test_POST_NEWDIRURL_immutable_bad(self):
1222 (newkids, filecap1, filecap2, filecap3,
1223 dircap) = self._create_initial_children()
1224 d = self.shouldFail2(error.Error, "test_POST_NEWDIRURL_immutable_bad",
1226 "a mkdir-immutable operation was given a child that was not itself immutable",
1228 self.public_url + "/foo/newdir?t=mkdir-immutable",
1229 simplejson.dumps(newkids))
1232 def test_PUT_NEWDIRURL_exists(self):
1233 d = self.PUT(self.public_url + "/foo/sub?t=mkdir", "")
1234 d.addCallback(lambda res:
1235 self.failUnlessNodeHasChild(self._foo_node, u"sub"))
1236 d.addCallback(lambda res: self._foo_node.get(u"sub"))
1237 d.addCallback(self.failUnlessNodeKeysAre, [u"baz.txt"])
1240 def test_PUT_NEWDIRURL_blocked(self):
1241 d = self.shouldFail2(error.Error, "PUT_NEWDIRURL_blocked",
1242 "409 Conflict", "Unable to create directory 'bar.txt': a file was in the way",
1244 self.public_url + "/foo/bar.txt/sub?t=mkdir", "")
1245 d.addCallback(lambda res:
1246 self.failUnlessNodeHasChild(self._foo_node, u"sub"))
1247 d.addCallback(lambda res: self._foo_node.get(u"sub"))
1248 d.addCallback(self.failUnlessNodeKeysAre, [u"baz.txt"])
1251 def test_PUT_NEWDIRURL_mkdir_p(self):
1252 d = defer.succeed(None)
1253 d.addCallback(lambda res: self.POST(self.public_url + "/foo", t='mkdir', name='mkp'))
1254 d.addCallback(lambda res: self.failUnlessNodeHasChild(self._foo_node, u"mkp"))
1255 d.addCallback(lambda res: self._foo_node.get(u"mkp"))
1256 def mkdir_p(mkpnode):
1257 url = '/uri/%s?t=mkdir-p&path=/sub1/sub2' % urllib.quote(mkpnode.get_uri())
1259 def made_subsub(ssuri):
1260 d = self._foo_node.get_child_at_path(u"mkp/sub1/sub2")
1261 d.addCallback(lambda ssnode: self.failUnlessEqual(ssnode.get_uri(), ssuri))
1263 d.addCallback(lambda uri2: self.failUnlessEqual(uri2, ssuri))
1265 d.addCallback(made_subsub)
1267 d.addCallback(mkdir_p)
1270 def test_PUT_NEWDIRURL_mkdirs(self):
1271 d = self.PUT(self.public_url + "/foo/subdir/newdir?t=mkdir", "")
1272 d.addCallback(lambda res:
1273 self.failIfNodeHasChild(self._foo_node, u"newdir"))
1274 d.addCallback(lambda res:
1275 self.failUnlessNodeHasChild(self._foo_node, u"subdir"))
1276 d.addCallback(lambda res:
1277 self._foo_node.get_child_at_path(u"subdir/newdir"))
1278 d.addCallback(self.failUnlessNodeKeysAre, [])
1281 def test_DELETE_DIRURL(self):
1282 d = self.DELETE(self.public_url + "/foo")
1283 d.addCallback(lambda res:
1284 self.failIfNodeHasChild(self.public_root, u"foo"))
1287 def test_DELETE_DIRURL_missing(self):
1288 d = self.DELETE(self.public_url + "/foo/missing")
1289 d.addBoth(self.should404, "test_DELETE_DIRURL_missing")
1290 d.addCallback(lambda res:
1291 self.failUnlessNodeHasChild(self.public_root, u"foo"))
1294 def test_DELETE_DIRURL_missing2(self):
1295 d = self.DELETE(self.public_url + "/missing")
1296 d.addBoth(self.should404, "test_DELETE_DIRURL_missing2")
1299 def dump_root(self):
1301 w = webish.DirnodeWalkerMixin()
1302 def visitor(childpath, childnode, metadata):
1304 d = w.walk(self.public_root, visitor)
1307 def failUnlessNodeKeysAre(self, node, expected_keys):
1308 for k in expected_keys:
1309 assert isinstance(k, unicode)
1311 def _check(children):
1312 self.failUnlessEqual(sorted(children.keys()), sorted(expected_keys))
1313 d.addCallback(_check)
1315 def failUnlessNodeHasChild(self, node, name):
1316 assert isinstance(name, unicode)
1318 def _check(children):
1319 self.failUnless(name in children)
1320 d.addCallback(_check)
1322 def failIfNodeHasChild(self, node, name):
1323 assert isinstance(name, unicode)
1325 def _check(children):
1326 self.failIf(name in children)
1327 d.addCallback(_check)
1330 def failUnlessChildContentsAre(self, node, name, expected_contents):
1331 assert isinstance(name, unicode)
1332 d = node.get_child_at_path(name)
1333 d.addCallback(lambda node: download_to_data(node))
1334 def _check(contents):
1335 self.failUnlessEqual(contents, expected_contents)
1336 d.addCallback(_check)
1339 def failUnlessMutableChildContentsAre(self, node, name, expected_contents):
1340 assert isinstance(name, unicode)
1341 d = node.get_child_at_path(name)
1342 d.addCallback(lambda node: node.download_best_version())
1343 def _check(contents):
1344 self.failUnlessEqual(contents, expected_contents)
1345 d.addCallback(_check)
1348 def failUnlessChildURIIs(self, node, name, expected_uri):
1349 assert isinstance(name, unicode)
1350 d = node.get_child_at_path(name)
1352 self.failUnlessEqual(child.get_uri(), expected_uri.strip())
1353 d.addCallback(_check)
1356 def failUnlessURIMatchesChild(self, got_uri, node, name):
1357 assert isinstance(name, unicode)
1358 d = node.get_child_at_path(name)
1360 self.failUnlessEqual(got_uri.strip(), child.get_uri())
1361 d.addCallback(_check)
1364 def failUnlessCHKURIHasContents(self, got_uri, contents):
1365 self.failUnless(FakeCHKFileNode.all_contents[got_uri] == contents)
1367 def test_POST_upload(self):
1368 d = self.POST(self.public_url + "/foo", t="upload",
1369 file=("new.txt", self.NEWFILE_CONTENTS))
1371 d.addCallback(self.failUnlessURIMatchesChild, fn, u"new.txt")
1372 d.addCallback(lambda res:
1373 self.failUnlessChildContentsAre(fn, u"new.txt",
1374 self.NEWFILE_CONTENTS))
1377 def test_POST_upload_unicode(self):
1378 filename = u"n\u00e9wer.txt" # n e-acute w e r . t x t
1379 d = self.POST(self.public_url + "/foo", t="upload",
1380 file=(filename, self.NEWFILE_CONTENTS))
1382 d.addCallback(self.failUnlessURIMatchesChild, fn, filename)
1383 d.addCallback(lambda res:
1384 self.failUnlessChildContentsAre(fn, filename,
1385 self.NEWFILE_CONTENTS))
1386 target_url = self.public_url + "/foo/" + filename.encode("utf-8")
1387 d.addCallback(lambda res: self.GET(target_url))
1388 d.addCallback(lambda contents: self.failUnlessEqual(contents,
1389 self.NEWFILE_CONTENTS,
1393 def test_POST_upload_unicode_named(self):
1394 filename = u"n\u00e9wer.txt" # n e-acute w e r . t x t
1395 d = self.POST(self.public_url + "/foo", t="upload",
1397 file=("overridden", self.NEWFILE_CONTENTS))
1399 d.addCallback(self.failUnlessURIMatchesChild, fn, filename)
1400 d.addCallback(lambda res:
1401 self.failUnlessChildContentsAre(fn, filename,
1402 self.NEWFILE_CONTENTS))
1403 target_url = self.public_url + "/foo/" + filename.encode("utf-8")
1404 d.addCallback(lambda res: self.GET(target_url))
1405 d.addCallback(lambda contents: self.failUnlessEqual(contents,
1406 self.NEWFILE_CONTENTS,
1410 def test_POST_upload_no_link(self):
1411 d = self.POST("/uri", t="upload",
1412 file=("new.txt", self.NEWFILE_CONTENTS))
1413 def _check_upload_results(page):
1414 # this should be a page which describes the results of the upload
1415 # that just finished.
1416 self.failUnless("Upload Results:" in page)
1417 self.failUnless("URI:" in page)
1418 uri_re = re.compile("URI: <tt><span>(.*)</span>")
1419 mo = uri_re.search(page)
1420 self.failUnless(mo, page)
1421 new_uri = mo.group(1)
1423 d.addCallback(_check_upload_results)
1424 d.addCallback(self.failUnlessCHKURIHasContents, self.NEWFILE_CONTENTS)
1427 def test_POST_upload_no_link_whendone(self):
1428 d = self.POST("/uri", t="upload", when_done="/",
1429 file=("new.txt", self.NEWFILE_CONTENTS))
1430 d.addBoth(self.shouldRedirect, "/")
1433 def shouldRedirect2(self, which, checker, callable, *args, **kwargs):
1434 d = defer.maybeDeferred(callable, *args, **kwargs)
1436 if isinstance(res, failure.Failure):
1437 res.trap(error.PageRedirect)
1438 statuscode = res.value.status
1439 target = res.value.location
1440 return checker(statuscode, target)
1441 self.fail("%s: callable was supposed to redirect, not return '%s'"
1446 def test_POST_upload_no_link_whendone_results(self):
1447 def check(statuscode, target):
1448 self.failUnlessEqual(statuscode, str(http.FOUND))
1449 self.failUnless(target.startswith(self.webish_url), target)
1450 return client.getPage(target, method="GET")
1451 d = self.shouldRedirect2("test_POST_upload_no_link_whendone_results",
1453 self.POST, "/uri", t="upload",
1454 when_done="/uri/%(uri)s",
1455 file=("new.txt", self.NEWFILE_CONTENTS))
1456 d.addCallback(lambda res:
1457 self.failUnlessEqual(res, self.NEWFILE_CONTENTS))
1460 def test_POST_upload_no_link_mutable(self):
1461 d = self.POST("/uri", t="upload", mutable="true",
1462 file=("new.txt", self.NEWFILE_CONTENTS))
1463 def _check(filecap):
1464 filecap = filecap.strip()
1465 self.failUnless(filecap.startswith("URI:SSK:"), filecap)
1466 self.filecap = filecap
1467 u = uri.WriteableSSKFileURI.init_from_string(filecap)
1468 self.failUnless(u.storage_index in FakeMutableFileNode.all_contents)
1469 n = self.s.create_node_from_uri(filecap)
1470 return n.download_best_version()
1471 d.addCallback(_check)
1473 self.failUnlessEqual(data, self.NEWFILE_CONTENTS)
1474 return self.GET("/uri/%s" % urllib.quote(self.filecap))
1475 d.addCallback(_check2)
1477 self.failUnlessEqual(data, self.NEWFILE_CONTENTS)
1478 return self.GET("/file/%s" % urllib.quote(self.filecap))
1479 d.addCallback(_check3)
1481 self.failUnlessEqual(data, self.NEWFILE_CONTENTS)
1482 d.addCallback(_check4)
1485 def test_POST_upload_no_link_mutable_toobig(self):
1486 d = self.shouldFail2(error.Error,
1487 "test_POST_upload_no_link_mutable_toobig",
1488 "413 Request Entity Too Large",
1489 "SDMF is limited to one segment, and 10001 > 10000",
1491 "/uri", t="upload", mutable="true",
1493 "b" * (self.s.MUTABLE_SIZELIMIT+1)) )
1496 def test_POST_upload_mutable(self):
1497 # this creates a mutable file
1498 d = self.POST(self.public_url + "/foo", t="upload", mutable="true",
1499 file=("new.txt", self.NEWFILE_CONTENTS))
1501 d.addCallback(self.failUnlessURIMatchesChild, fn, u"new.txt")
1502 d.addCallback(lambda res:
1503 self.failUnlessMutableChildContentsAre(fn, u"new.txt",
1504 self.NEWFILE_CONTENTS))
1505 d.addCallback(lambda res: self._foo_node.get(u"new.txt"))
1507 self.failUnless(IMutableFileNode.providedBy(newnode))
1508 self.failUnless(newnode.is_mutable())
1509 self.failIf(newnode.is_readonly())
1510 self._mutable_node = newnode
1511 self._mutable_uri = newnode.get_uri()
1514 # now upload it again and make sure that the URI doesn't change
1515 NEWER_CONTENTS = self.NEWFILE_CONTENTS + "newer\n"
1516 d.addCallback(lambda res:
1517 self.POST(self.public_url + "/foo", t="upload",
1519 file=("new.txt", NEWER_CONTENTS)))
1520 d.addCallback(self.failUnlessURIMatchesChild, fn, u"new.txt")
1521 d.addCallback(lambda res:
1522 self.failUnlessMutableChildContentsAre(fn, u"new.txt",
1524 d.addCallback(lambda res: self._foo_node.get(u"new.txt"))
1526 self.failUnless(IMutableFileNode.providedBy(newnode))
1527 self.failUnless(newnode.is_mutable())
1528 self.failIf(newnode.is_readonly())
1529 self.failUnlessEqual(self._mutable_uri, newnode.get_uri())
1530 d.addCallback(_got2)
1532 # upload a second time, using PUT instead of POST
1533 NEW2_CONTENTS = NEWER_CONTENTS + "overwrite with PUT\n"
1534 d.addCallback(lambda res:
1535 self.PUT(self.public_url + "/foo/new.txt", NEW2_CONTENTS))
1536 d.addCallback(self.failUnlessURIMatchesChild, fn, u"new.txt")
1537 d.addCallback(lambda res:
1538 self.failUnlessMutableChildContentsAre(fn, u"new.txt",
1541 # finally list the directory, since mutable files are displayed
1542 # slightly differently
1544 d.addCallback(lambda res:
1545 self.GET(self.public_url + "/foo/",
1546 followRedirect=True))
1547 def _check_page(res):
1548 # TODO: assert more about the contents
1549 self.failUnless("SSK" in res)
1551 d.addCallback(_check_page)
1553 d.addCallback(lambda res: self._foo_node.get(u"new.txt"))
1555 self.failUnless(IMutableFileNode.providedBy(newnode))
1556 self.failUnless(newnode.is_mutable())
1557 self.failIf(newnode.is_readonly())
1558 self.failUnlessEqual(self._mutable_uri, newnode.get_uri())
1559 d.addCallback(_got3)
1561 # look at the JSON form of the enclosing directory
1562 d.addCallback(lambda res:
1563 self.GET(self.public_url + "/foo/?t=json",
1564 followRedirect=True))
1565 def _check_page_json(res):
1566 parsed = simplejson.loads(res)
1567 self.failUnlessEqual(parsed[0], "dirnode")
1568 children = dict( [(unicode(name),value)
1570 in parsed[1]["children"].iteritems()] )
1571 self.failUnless("new.txt" in children)
1572 new_json = children["new.txt"]
1573 self.failUnlessEqual(new_json[0], "filenode")
1574 self.failUnless(new_json[1]["mutable"])
1575 self.failUnlessEqual(new_json[1]["rw_uri"], self._mutable_uri)
1576 ro_uri = unicode(self._mutable_node.get_readonly().to_string())
1577 self.failUnlessEqual(new_json[1]["ro_uri"], ro_uri)
1578 d.addCallback(_check_page_json)
1580 # and the JSON form of the file
1581 d.addCallback(lambda res:
1582 self.GET(self.public_url + "/foo/new.txt?t=json"))
1583 def _check_file_json(res):
1584 parsed = simplejson.loads(res)
1585 self.failUnlessEqual(parsed[0], "filenode")
1586 self.failUnless(parsed[1]["mutable"])
1587 self.failUnlessEqual(parsed[1]["rw_uri"], self._mutable_uri)
1588 ro_uri = unicode(self._mutable_node.get_readonly().to_string())
1589 self.failUnlessEqual(parsed[1]["ro_uri"], ro_uri)
1590 d.addCallback(_check_file_json)
1592 # and look at t=uri and t=readonly-uri
1593 d.addCallback(lambda res:
1594 self.GET(self.public_url + "/foo/new.txt?t=uri"))
1595 d.addCallback(lambda res: self.failUnlessEqual(res, self._mutable_uri))
1596 d.addCallback(lambda res:
1597 self.GET(self.public_url + "/foo/new.txt?t=readonly-uri"))
1598 def _check_ro_uri(res):
1599 ro_uri = unicode(self._mutable_node.get_readonly().to_string())
1600 self.failUnlessEqual(res, ro_uri)
1601 d.addCallback(_check_ro_uri)
1603 # make sure we can get to it from /uri/URI
1604 d.addCallback(lambda res:
1605 self.GET("/uri/%s" % urllib.quote(self._mutable_uri)))
1606 d.addCallback(lambda res:
1607 self.failUnlessEqual(res, NEW2_CONTENTS))
1609 # and that HEAD computes the size correctly
1610 d.addCallback(lambda res:
1611 self.HEAD(self.public_url + "/foo/new.txt",
1612 return_response=True))
1613 def _got_headers((res, status, headers)):
1614 self.failUnlessEqual(res, "")
1615 self.failUnlessEqual(headers["content-length"][0],
1616 str(len(NEW2_CONTENTS)))
1617 self.failUnlessEqual(headers["content-type"], ["text/plain"])
1618 d.addCallback(_got_headers)
1620 # make sure that size errors are displayed correctly for overwrite
1621 d.addCallback(lambda res:
1622 self.shouldFail2(error.Error,
1623 "test_POST_upload_mutable-toobig",
1624 "413 Request Entity Too Large",
1625 "SDMF is limited to one segment, and 10001 > 10000",
1627 self.public_url + "/foo", t="upload",
1630 "b" * (self.s.MUTABLE_SIZELIMIT+1)),
1633 d.addErrback(self.dump_error)
1636 def test_POST_upload_mutable_toobig(self):
1637 d = self.shouldFail2(error.Error,
1638 "test_POST_upload_mutable_toobig",
1639 "413 Request Entity Too Large",
1640 "SDMF is limited to one segment, and 10001 > 10000",
1642 self.public_url + "/foo",
1643 t="upload", mutable="true",
1645 "b" * (self.s.MUTABLE_SIZELIMIT+1)) )
1648 def dump_error(self, f):
1649 # if the web server returns an error code (like 400 Bad Request),
1650 # web.client.getPage puts the HTTP response body into the .response
1651 # attribute of the exception object that it gives back. It does not
1652 # appear in the Failure's repr(), so the ERROR that trial displays
1653 # will be rather terse and unhelpful. addErrback this method to the
1654 # end of your chain to get more information out of these errors.
1655 if f.check(error.Error):
1656 print "web.error.Error:"
1658 print f.value.response
1661 def test_POST_upload_replace(self):
1662 d = self.POST(self.public_url + "/foo", t="upload",
1663 file=("bar.txt", self.NEWFILE_CONTENTS))
1665 d.addCallback(self.failUnlessURIMatchesChild, fn, u"bar.txt")
1666 d.addCallback(lambda res:
1667 self.failUnlessChildContentsAre(fn, u"bar.txt",
1668 self.NEWFILE_CONTENTS))
1671 def test_POST_upload_no_replace_ok(self):
1672 d = self.POST(self.public_url + "/foo?replace=false", t="upload",
1673 file=("new.txt", self.NEWFILE_CONTENTS))
1674 d.addCallback(lambda res: self.GET(self.public_url + "/foo/new.txt"))
1675 d.addCallback(lambda res: self.failUnlessEqual(res,
1676 self.NEWFILE_CONTENTS))
1679 def test_POST_upload_no_replace_queryarg(self):
1680 d = self.POST(self.public_url + "/foo?replace=false", t="upload",
1681 file=("bar.txt", self.NEWFILE_CONTENTS))
1682 d.addBoth(self.shouldFail, error.Error,
1683 "POST_upload_no_replace_queryarg",
1685 "There was already a child by that name, and you asked me "
1686 "to not replace it")
1687 d.addCallback(lambda res: self.GET(self.public_url + "/foo/bar.txt"))
1688 d.addCallback(self.failUnlessIsBarDotTxt)
1691 def test_POST_upload_no_replace_field(self):
1692 d = self.POST(self.public_url + "/foo", t="upload", replace="false",
1693 file=("bar.txt", self.NEWFILE_CONTENTS))
1694 d.addBoth(self.shouldFail, error.Error, "POST_upload_no_replace_field",
1696 "There was already a child by that name, and you asked me "
1697 "to not replace it")
1698 d.addCallback(lambda res: self.GET(self.public_url + "/foo/bar.txt"))
1699 d.addCallback(self.failUnlessIsBarDotTxt)
1702 def test_POST_upload_whendone(self):
1703 d = self.POST(self.public_url + "/foo", t="upload", when_done="/THERE",
1704 file=("new.txt", self.NEWFILE_CONTENTS))
1705 d.addBoth(self.shouldRedirect, "/THERE")
1707 d.addCallback(lambda res:
1708 self.failUnlessChildContentsAre(fn, u"new.txt",
1709 self.NEWFILE_CONTENTS))
1712 def test_POST_upload_named(self):
1714 d = self.POST(self.public_url + "/foo", t="upload",
1715 name="new.txt", file=self.NEWFILE_CONTENTS)
1716 d.addCallback(self.failUnlessURIMatchesChild, fn, u"new.txt")
1717 d.addCallback(lambda res:
1718 self.failUnlessChildContentsAre(fn, u"new.txt",
1719 self.NEWFILE_CONTENTS))
1722 def test_POST_upload_named_badfilename(self):
1723 d = self.POST(self.public_url + "/foo", t="upload",
1724 name="slashes/are/bad.txt", file=self.NEWFILE_CONTENTS)
1725 d.addBoth(self.shouldFail, error.Error,
1726 "test_POST_upload_named_badfilename",
1728 "name= may not contain a slash",
1730 # make sure that nothing was added
1731 d.addCallback(lambda res:
1732 self.failUnlessNodeKeysAre(self._foo_node,
1733 [u"bar.txt", u"blockingfile",
1734 u"empty", u"n\u00fc.txt",
1738 def test_POST_FILEURL_check(self):
1739 bar_url = self.public_url + "/foo/bar.txt"
1740 d = self.POST(bar_url, t="check")
1742 self.failUnless("Healthy :" in res)
1743 d.addCallback(_check)
1744 redir_url = "http://allmydata.org/TARGET"
1745 def _check2(statuscode, target):
1746 self.failUnlessEqual(statuscode, str(http.FOUND))
1747 self.failUnlessEqual(target, redir_url)
1748 d.addCallback(lambda res:
1749 self.shouldRedirect2("test_POST_FILEURL_check",
1753 when_done=redir_url))
1754 d.addCallback(lambda res:
1755 self.POST(bar_url, t="check", return_to=redir_url))
1757 self.failUnless("Healthy :" in res)
1758 self.failUnless("Return to file" in res)
1759 self.failUnless(redir_url in res)
1760 d.addCallback(_check3)
1762 d.addCallback(lambda res:
1763 self.POST(bar_url, t="check", output="JSON"))
1764 def _check_json(res):
1765 data = simplejson.loads(res)
1766 self.failUnless("storage-index" in data)
1767 self.failUnless(data["results"]["healthy"])
1768 d.addCallback(_check_json)
1772 def test_POST_FILEURL_check_and_repair(self):
1773 bar_url = self.public_url + "/foo/bar.txt"
1774 d = self.POST(bar_url, t="check", repair="true")
1776 self.failUnless("Healthy :" in res)
1777 d.addCallback(_check)
1778 redir_url = "http://allmydata.org/TARGET"
1779 def _check2(statuscode, target):
1780 self.failUnlessEqual(statuscode, str(http.FOUND))
1781 self.failUnlessEqual(target, redir_url)
1782 d.addCallback(lambda res:
1783 self.shouldRedirect2("test_POST_FILEURL_check_and_repair",
1786 t="check", repair="true",
1787 when_done=redir_url))
1788 d.addCallback(lambda res:
1789 self.POST(bar_url, t="check", return_to=redir_url))
1791 self.failUnless("Healthy :" in res)
1792 self.failUnless("Return to file" in res)
1793 self.failUnless(redir_url in res)
1794 d.addCallback(_check3)
1797 def test_POST_DIRURL_check(self):
1798 foo_url = self.public_url + "/foo/"
1799 d = self.POST(foo_url, t="check")
1801 self.failUnless("Healthy :" in res, res)
1802 d.addCallback(_check)
1803 redir_url = "http://allmydata.org/TARGET"
1804 def _check2(statuscode, target):
1805 self.failUnlessEqual(statuscode, str(http.FOUND))
1806 self.failUnlessEqual(target, redir_url)
1807 d.addCallback(lambda res:
1808 self.shouldRedirect2("test_POST_DIRURL_check",
1812 when_done=redir_url))
1813 d.addCallback(lambda res:
1814 self.POST(foo_url, t="check", return_to=redir_url))
1816 self.failUnless("Healthy :" in res, res)
1817 self.failUnless("Return to file/directory" in res)
1818 self.failUnless(redir_url in res)
1819 d.addCallback(_check3)
1821 d.addCallback(lambda res:
1822 self.POST(foo_url, t="check", output="JSON"))
1823 def _check_json(res):
1824 data = simplejson.loads(res)
1825 self.failUnless("storage-index" in data)
1826 self.failUnless(data["results"]["healthy"])
1827 d.addCallback(_check_json)
1831 def test_POST_DIRURL_check_and_repair(self):
1832 foo_url = self.public_url + "/foo/"
1833 d = self.POST(foo_url, t="check", repair="true")
1835 self.failUnless("Healthy :" in res, res)
1836 d.addCallback(_check)
1837 redir_url = "http://allmydata.org/TARGET"
1838 def _check2(statuscode, target):
1839 self.failUnlessEqual(statuscode, str(http.FOUND))
1840 self.failUnlessEqual(target, redir_url)
1841 d.addCallback(lambda res:
1842 self.shouldRedirect2("test_POST_DIRURL_check_and_repair",
1845 t="check", repair="true",
1846 when_done=redir_url))
1847 d.addCallback(lambda res:
1848 self.POST(foo_url, t="check", return_to=redir_url))
1850 self.failUnless("Healthy :" in res)
1851 self.failUnless("Return to file/directory" in res)
1852 self.failUnless(redir_url in res)
1853 d.addCallback(_check3)
1856 def wait_for_operation(self, ignored, ophandle):
1857 url = "/operations/" + ophandle
1858 url += "?t=status&output=JSON"
1861 data = simplejson.loads(res)
1862 if not data["finished"]:
1863 d = self.stall(delay=1.0)
1864 d.addCallback(self.wait_for_operation, ophandle)
1870 def get_operation_results(self, ignored, ophandle, output=None):
1871 url = "/operations/" + ophandle
1874 url += "&output=" + output
1877 if output and output.lower() == "json":
1878 return simplejson.loads(res)
1883 def test_POST_DIRURL_deepcheck_no_ophandle(self):
1884 d = self.shouldFail2(error.Error,
1885 "test_POST_DIRURL_deepcheck_no_ophandle",
1887 "slow operation requires ophandle=",
1888 self.POST, self.public_url, t="start-deep-check")
1891 def test_POST_DIRURL_deepcheck(self):
1892 def _check_redirect(statuscode, target):
1893 self.failUnlessEqual(statuscode, str(http.FOUND))
1894 self.failUnless(target.endswith("/operations/123"))
1895 d = self.shouldRedirect2("test_POST_DIRURL_deepcheck", _check_redirect,
1896 self.POST, self.public_url,
1897 t="start-deep-check", ophandle="123")
1898 d.addCallback(self.wait_for_operation, "123")
1899 def _check_json(data):
1900 self.failUnlessEqual(data["finished"], True)
1901 self.failUnlessEqual(data["count-objects-checked"], 8)
1902 self.failUnlessEqual(data["count-objects-healthy"], 8)
1903 d.addCallback(_check_json)
1904 d.addCallback(self.get_operation_results, "123", "html")
1905 def _check_html(res):
1906 self.failUnless("Objects Checked: <span>8</span>" in res)
1907 self.failUnless("Objects Healthy: <span>8</span>" in res)
1908 d.addCallback(_check_html)
1910 d.addCallback(lambda res:
1911 self.GET("/operations/123/"))
1912 d.addCallback(_check_html) # should be the same as without the slash
1914 d.addCallback(lambda res:
1915 self.shouldFail2(error.Error, "one", "404 Not Found",
1916 "No detailed results for SI bogus",
1917 self.GET, "/operations/123/bogus"))
1919 foo_si = self._foo_node.get_storage_index()
1920 foo_si_s = base32.b2a(foo_si)
1921 d.addCallback(lambda res:
1922 self.GET("/operations/123/%s?output=JSON" % foo_si_s))
1923 def _check_foo_json(res):
1924 data = simplejson.loads(res)
1925 self.failUnlessEqual(data["storage-index"], foo_si_s)
1926 self.failUnless(data["results"]["healthy"])
1927 d.addCallback(_check_foo_json)
1930 def test_POST_DIRURL_deepcheck_and_repair(self):
1931 d = self.POST(self.public_url, t="start-deep-check", repair="true",
1932 ophandle="124", output="json", followRedirect=True)
1933 d.addCallback(self.wait_for_operation, "124")
1934 def _check_json(data):
1935 self.failUnlessEqual(data["finished"], True)
1936 self.failUnlessEqual(data["count-objects-checked"], 8)
1937 self.failUnlessEqual(data["count-objects-healthy-pre-repair"], 8)
1938 self.failUnlessEqual(data["count-objects-unhealthy-pre-repair"], 0)
1939 self.failUnlessEqual(data["count-corrupt-shares-pre-repair"], 0)
1940 self.failUnlessEqual(data["count-repairs-attempted"], 0)
1941 self.failUnlessEqual(data["count-repairs-successful"], 0)
1942 self.failUnlessEqual(data["count-repairs-unsuccessful"], 0)
1943 self.failUnlessEqual(data["count-objects-healthy-post-repair"], 8)
1944 self.failUnlessEqual(data["count-objects-unhealthy-post-repair"], 0)
1945 self.failUnlessEqual(data["count-corrupt-shares-post-repair"], 0)
1946 d.addCallback(_check_json)
1947 d.addCallback(self.get_operation_results, "124", "html")
1948 def _check_html(res):
1949 self.failUnless("Objects Checked: <span>8</span>" in res)
1951 self.failUnless("Objects Healthy (before repair): <span>8</span>" in res)
1952 self.failUnless("Objects Unhealthy (before repair): <span>0</span>" in res)
1953 self.failUnless("Corrupt Shares (before repair): <span>0</span>" in res)
1955 self.failUnless("Repairs Attempted: <span>0</span>" in res)
1956 self.failUnless("Repairs Successful: <span>0</span>" in res)
1957 self.failUnless("Repairs Unsuccessful: <span>0</span>" in res)
1959 self.failUnless("Objects Healthy (after repair): <span>8</span>" in res)
1960 self.failUnless("Objects Unhealthy (after repair): <span>0</span>" in res)
1961 self.failUnless("Corrupt Shares (after repair): <span>0</span>" in res)
1962 d.addCallback(_check_html)
1965 def test_POST_FILEURL_bad_t(self):
1966 d = self.shouldFail2(error.Error, "POST_bad_t", "400 Bad Request",
1967 "POST to file: bad t=bogus",
1968 self.POST, self.public_url + "/foo/bar.txt",
1972 def test_POST_mkdir(self): # return value?
1973 d = self.POST(self.public_url + "/foo", t="mkdir", name="newdir")
1974 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
1975 d.addCallback(self.failUnlessNodeKeysAre, [])
1978 def test_POST_mkdir_initial_children(self):
1979 newkids, filecap1, ign, ign, ign = self._create_initial_children()
1980 d = self.POST2(self.public_url +
1981 "/foo?t=mkdir-with-children&name=newdir",
1982 simplejson.dumps(newkids))
1983 d.addCallback(lambda res:
1984 self.failUnlessNodeHasChild(self._foo_node, u"newdir"))
1985 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
1986 d.addCallback(self.failUnlessNodeKeysAre, newkids.keys())
1987 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
1988 d.addCallback(self.failUnlessChildURIIs, u"child-imm", filecap1)
1991 def test_POST_mkdir_immutable(self):
1992 (newkids, filecap1, immdircap) = self._create_immutable_children()
1993 d = self.POST2(self.public_url +
1994 "/foo?t=mkdir-immutable&name=newdir",
1995 simplejson.dumps(newkids))
1996 d.addCallback(lambda res:
1997 self.failUnlessNodeHasChild(self._foo_node, u"newdir"))
1998 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
1999 d.addCallback(self.failUnlessNodeKeysAre, newkids.keys())
2000 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
2001 d.addCallback(self.failUnlessChildURIIs, u"child-imm", filecap1)
2002 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
2003 d.addCallback(self.failUnlessChildURIIs, u"dirchild-imm", immdircap)
2006 def test_POST_mkdir_immutable_bad(self):
2007 (newkids, filecap1, filecap2, filecap3,
2008 dircap) = self._create_initial_children()
2009 d = self.shouldFail2(error.Error, "test_POST_mkdir_immutable_bad",
2011 "a mkdir-immutable operation was given a child that was not itself immutable",
2014 "/foo?t=mkdir-immutable&name=newdir",
2015 simplejson.dumps(newkids))
2018 def test_POST_mkdir_2(self):
2019 d = self.POST(self.public_url + "/foo/newdir?t=mkdir", "")
2020 d.addCallback(lambda res:
2021 self.failUnlessNodeHasChild(self._foo_node, u"newdir"))
2022 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
2023 d.addCallback(self.failUnlessNodeKeysAre, [])
2026 def test_POST_mkdirs_2(self):
2027 d = self.POST(self.public_url + "/foo/bardir/newdir?t=mkdir", "")
2028 d.addCallback(lambda res:
2029 self.failUnlessNodeHasChild(self._foo_node, u"bardir"))
2030 d.addCallback(lambda res: self._foo_node.get(u"bardir"))
2031 d.addCallback(lambda bardirnode: bardirnode.get(u"newdir"))
2032 d.addCallback(self.failUnlessNodeKeysAre, [])
2035 def test_POST_mkdir_no_parentdir_noredirect(self):
2036 d = self.POST("/uri?t=mkdir")
2037 def _after_mkdir(res):
2038 uri.DirectoryURI.init_from_string(res)
2039 d.addCallback(_after_mkdir)
2042 def test_POST_mkdir_no_parentdir_redirect(self):
2043 d = self.POST("/uri?t=mkdir&redirect_to_result=true")
2044 d.addBoth(self.shouldRedirect, None, statuscode='303')
2045 def _check_target(target):
2046 target = urllib.unquote(target)
2047 self.failUnless(target.startswith("uri/URI:DIR2:"), target)
2048 d.addCallback(_check_target)
2051 def _create_initial_children(self):
2052 contents, n, filecap1 = self.makefile(12)
2053 md1 = {"metakey1": "metavalue1"}
2054 filecap2 = make_mutable_file_uri()
2055 node3 = self.s.create_node_from_uri(make_mutable_file_uri())
2056 filecap3 = node3.get_readonly_uri()
2057 node4 = self.s.create_node_from_uri(make_mutable_file_uri())
2058 dircap = DirectoryNode(node4, None, None).get_uri()
2059 newkids = {u"child-imm": ["filenode", {"ro_uri": filecap1,
2060 "metadata": md1, }],
2061 u"child-mutable": ["filenode", {"rw_uri": filecap2}],
2062 u"child-mutable-ro": ["filenode", {"ro_uri": filecap3}],
2063 u"dirchild": ["dirnode", {"rw_uri": dircap}],
2065 return newkids, filecap1, filecap2, filecap3, dircap
2067 def _create_immutable_children(self):
2068 contents, n, filecap1 = self.makefile(12)
2069 md1 = {"metakey1": "metavalue1"}
2070 tnode = create_chk_filenode("immutable directory contents\n"*10)
2071 dnode = DirectoryNode(tnode, None, None)
2072 assert not dnode.is_mutable()
2073 immdircap = dnode.get_uri()
2074 newkids = {u"child-imm": ["filenode", {"ro_uri": filecap1,
2075 "metadata": md1, }],
2076 u"dirchild-imm": ["dirnode", {"ro_uri": immdircap}],
2078 return newkids, filecap1, immdircap
2080 def test_POST_mkdir_no_parentdir_initial_children(self):
2081 (newkids, filecap1, filecap2, filecap3,
2082 dircap) = self._create_initial_children()
2083 d = self.POST2("/uri?t=mkdir-with-children", simplejson.dumps(newkids))
2084 def _after_mkdir(res):
2085 self.failUnless(res.startswith("URI:DIR"), res)
2086 n = self.s.create_node_from_uri(res)
2087 d2 = self.failUnlessNodeKeysAre(n, newkids.keys())
2088 d2.addCallback(lambda ign:
2089 self.failUnlessChildURIIs(n, u"child-imm", filecap1))
2090 d2.addCallback(lambda ign:
2091 self.failUnlessChildURIIs(n, u"child-mutable",
2093 d2.addCallback(lambda ign:
2094 self.failUnlessChildURIIs(n, u"child-mutable-ro",
2096 d2.addCallback(lambda ign:
2097 self.failUnlessChildURIIs(n, u"dirchild", dircap))
2099 d.addCallback(_after_mkdir)
2102 def test_POST_mkdir_no_parentdir_unexpected_children(self):
2103 # the regular /uri?t=mkdir operation is specified to ignore its body.
2104 # Only t=mkdir-with-children pays attention to it.
2105 (newkids, filecap1, filecap2, filecap3,
2106 dircap) = self._create_initial_children()
2107 d = self.shouldHTTPError("POST t=mkdir unexpected children",
2109 "t=mkdir does not accept children=, "
2110 "try t=mkdir-with-children instead",
2111 self.POST2, "/uri?t=mkdir", # without children
2112 simplejson.dumps(newkids))
2115 def test_POST_noparent_bad(self):
2116 d = self.shouldHTTPError("POST /uri?t=bogus", 400, "Bad Request",
2117 "/uri accepts only PUT, PUT?t=mkdir, "
2118 "POST?t=upload, and POST?t=mkdir",
2119 self.POST, "/uri?t=bogus")
2122 def test_POST_mkdir_no_parentdir_immutable(self):
2123 (newkids, filecap1, immdircap) = self._create_immutable_children()
2124 d = self.POST2("/uri?t=mkdir-immutable", simplejson.dumps(newkids))
2125 def _after_mkdir(res):
2126 self.failUnless(res.startswith("URI:DIR"), res)
2127 n = self.s.create_node_from_uri(res)
2128 d2 = self.failUnlessNodeKeysAre(n, newkids.keys())
2129 d2.addCallback(lambda ign:
2130 self.failUnlessChildURIIs(n, u"child-imm", filecap1))
2131 d2.addCallback(lambda ign:
2132 self.failUnlessChildURIIs(n, u"dirchild-imm",
2135 d.addCallback(_after_mkdir)
2138 def test_POST_mkdir_no_parentdir_immutable_bad(self):
2139 (newkids, filecap1, filecap2, filecap3,
2140 dircap) = self._create_initial_children()
2141 d = self.shouldFail2(error.Error,
2142 "test_POST_mkdir_no_parentdir_immutable_bad",
2144 "a mkdir-immutable operation was given a child that was not itself immutable",
2146 "/uri?t=mkdir-immutable",
2147 simplejson.dumps(newkids))
2150 def test_welcome_page_mkdir_button(self):
2151 # Fetch the welcome page.
2153 def _after_get_welcome_page(res):
2154 MKDIR_BUTTON_RE=re.compile('<form action="([^"]*)" method="post".*?<input type="hidden" name="t" value="([^"]*)" /><input type="hidden" name="([^"]*)" value="([^"]*)" /><input type="submit" value="Create a directory" />', re.I)
2155 mo = MKDIR_BUTTON_RE.search(res)
2156 formaction = mo.group(1)
2158 formaname = mo.group(3)
2159 formavalue = mo.group(4)
2160 return (formaction, formt, formaname, formavalue)
2161 d.addCallback(_after_get_welcome_page)
2162 def _after_parse_form(res):
2163 (formaction, formt, formaname, formavalue) = res
2164 return self.POST("/%s?t=%s&%s=%s" % (formaction, formt, formaname, formavalue))
2165 d.addCallback(_after_parse_form)
2166 d.addBoth(self.shouldRedirect, None, statuscode='303')
2169 def test_POST_mkdir_replace(self): # return value?
2170 d = self.POST(self.public_url + "/foo", t="mkdir", name="sub")
2171 d.addCallback(lambda res: self._foo_node.get(u"sub"))
2172 d.addCallback(self.failUnlessNodeKeysAre, [])
2175 def test_POST_mkdir_no_replace_queryarg(self): # return value?
2176 d = self.POST(self.public_url + "/foo?replace=false", t="mkdir", name="sub")
2177 d.addBoth(self.shouldFail, error.Error,
2178 "POST_mkdir_no_replace_queryarg",
2180 "There was already a child by that name, and you asked me "
2181 "to not replace it")
2182 d.addCallback(lambda res: self._foo_node.get(u"sub"))
2183 d.addCallback(self.failUnlessNodeKeysAre, [u"baz.txt"])
2186 def test_POST_mkdir_no_replace_field(self): # return value?
2187 d = self.POST(self.public_url + "/foo", t="mkdir", name="sub",
2189 d.addBoth(self.shouldFail, error.Error, "POST_mkdir_no_replace_field",
2191 "There was already a child by that name, and you asked me "
2192 "to not replace it")
2193 d.addCallback(lambda res: self._foo_node.get(u"sub"))
2194 d.addCallback(self.failUnlessNodeKeysAre, [u"baz.txt"])
2197 def test_POST_mkdir_whendone_field(self):
2198 d = self.POST(self.public_url + "/foo",
2199 t="mkdir", name="newdir", when_done="/THERE")
2200 d.addBoth(self.shouldRedirect, "/THERE")
2201 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
2202 d.addCallback(self.failUnlessNodeKeysAre, [])
2205 def test_POST_mkdir_whendone_queryarg(self):
2206 d = self.POST(self.public_url + "/foo?when_done=/THERE",
2207 t="mkdir", name="newdir")
2208 d.addBoth(self.shouldRedirect, "/THERE")
2209 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
2210 d.addCallback(self.failUnlessNodeKeysAre, [])
2213 def test_POST_bad_t(self):
2214 d = self.shouldFail2(error.Error, "POST_bad_t", "400 Bad Request",
2215 "POST to a directory with bad t=BOGUS",
2216 self.POST, self.public_url + "/foo", t="BOGUS")
2219 def test_POST_set_children(self):
2220 contents9, n9, newuri9 = self.makefile(9)
2221 contents10, n10, newuri10 = self.makefile(10)
2222 contents11, n11, newuri11 = self.makefile(11)
2225 "atomic_added_1": [ "filenode", { "rw_uri": "%s",
2228 "ctime": 1002777696.7564139,
2229 "mtime": 1002777696.7564139
2232 "atomic_added_2": [ "filenode", { "rw_uri": "%s",
2235 "ctime": 1002777696.7564139,
2236 "mtime": 1002777696.7564139
2239 "atomic_added_3": [ "filenode", { "rw_uri": "%s",
2242 "ctime": 1002777696.7564139,
2243 "mtime": 1002777696.7564139
2246 }""" % (newuri9, newuri10, newuri11)
2248 url = self.webish_url + self.public_url + "/foo" + "?t=set_children"
2250 d = client.getPage(url, method="POST", postdata=reqbody)
2252 self.failUnlessURIMatchesChild(newuri9, self._foo_node, u"atomic_added_1")
2253 self.failUnlessURIMatchesChild(newuri10, self._foo_node, u"atomic_added_2")
2254 self.failUnlessURIMatchesChild(newuri11, self._foo_node, u"atomic_added_3")
2256 d.addCallback(_then)
2257 d.addErrback(self.dump_error)
2260 def test_POST_put_uri(self):
2261 contents, n, newuri = self.makefile(8)
2262 d = self.POST(self.public_url + "/foo", t="uri", name="new.txt", uri=newuri)
2263 d.addCallback(self.failUnlessURIMatchesChild, self._foo_node, u"new.txt")
2264 d.addCallback(lambda res:
2265 self.failUnlessChildContentsAre(self._foo_node, u"new.txt",
2269 def test_POST_put_uri_replace(self):
2270 contents, n, newuri = self.makefile(8)
2271 d = self.POST(self.public_url + "/foo", t="uri", name="bar.txt", uri=newuri)
2272 d.addCallback(self.failUnlessURIMatchesChild, self._foo_node, u"bar.txt")
2273 d.addCallback(lambda res:
2274 self.failUnlessChildContentsAre(self._foo_node, u"bar.txt",
2278 def test_POST_put_uri_no_replace_queryarg(self):
2279 contents, n, newuri = self.makefile(8)
2280 d = self.POST(self.public_url + "/foo?replace=false", t="uri",
2281 name="bar.txt", uri=newuri)
2282 d.addBoth(self.shouldFail, error.Error,
2283 "POST_put_uri_no_replace_queryarg",
2285 "There was already a child by that name, and you asked me "
2286 "to not replace it")
2287 d.addCallback(lambda res: self.GET(self.public_url + "/foo/bar.txt"))
2288 d.addCallback(self.failUnlessIsBarDotTxt)
2291 def test_POST_put_uri_no_replace_field(self):
2292 contents, n, newuri = self.makefile(8)
2293 d = self.POST(self.public_url + "/foo", t="uri", replace="false",
2294 name="bar.txt", uri=newuri)
2295 d.addBoth(self.shouldFail, error.Error,
2296 "POST_put_uri_no_replace_field",
2298 "There was already a child by that name, and you asked me "
2299 "to not replace it")
2300 d.addCallback(lambda res: self.GET(self.public_url + "/foo/bar.txt"))
2301 d.addCallback(self.failUnlessIsBarDotTxt)
2304 def test_POST_delete(self):
2305 d = self.POST(self.public_url + "/foo", t="delete", name="bar.txt")
2306 d.addCallback(lambda res: self._foo_node.list())
2307 def _check(children):
2308 self.failIf(u"bar.txt" in children)
2309 d.addCallback(_check)
2312 def test_POST_rename_file(self):
2313 d = self.POST(self.public_url + "/foo", t="rename",
2314 from_name="bar.txt", to_name='wibble.txt')
2315 d.addCallback(lambda res:
2316 self.failIfNodeHasChild(self._foo_node, u"bar.txt"))
2317 d.addCallback(lambda res:
2318 self.failUnlessNodeHasChild(self._foo_node, u"wibble.txt"))
2319 d.addCallback(lambda res: self.GET(self.public_url + "/foo/wibble.txt"))
2320 d.addCallback(self.failUnlessIsBarDotTxt)
2321 d.addCallback(lambda res: self.GET(self.public_url + "/foo/wibble.txt?t=json"))
2322 d.addCallback(self.failUnlessIsBarJSON)
2325 def test_POST_rename_file_redundant(self):
2326 d = self.POST(self.public_url + "/foo", t="rename",
2327 from_name="bar.txt", to_name='bar.txt')
2328 d.addCallback(lambda res:
2329 self.failUnlessNodeHasChild(self._foo_node, u"bar.txt"))
2330 d.addCallback(lambda res: self.GET(self.public_url + "/foo/bar.txt"))
2331 d.addCallback(self.failUnlessIsBarDotTxt)
2332 d.addCallback(lambda res: self.GET(self.public_url + "/foo/bar.txt?t=json"))
2333 d.addCallback(self.failUnlessIsBarJSON)
2336 def test_POST_rename_file_replace(self):
2337 # rename a file and replace a directory with it
2338 d = self.POST(self.public_url + "/foo", t="rename",
2339 from_name="bar.txt", to_name='empty')
2340 d.addCallback(lambda res:
2341 self.failIfNodeHasChild(self._foo_node, u"bar.txt"))
2342 d.addCallback(lambda res:
2343 self.failUnlessNodeHasChild(self._foo_node, u"empty"))
2344 d.addCallback(lambda res: self.GET(self.public_url + "/foo/empty"))
2345 d.addCallback(self.failUnlessIsBarDotTxt)
2346 d.addCallback(lambda res: self.GET(self.public_url + "/foo/empty?t=json"))
2347 d.addCallback(self.failUnlessIsBarJSON)
2350 def test_POST_rename_file_no_replace_queryarg(self):
2351 # rename a file and replace a directory with it
2352 d = self.POST(self.public_url + "/foo?replace=false", t="rename",
2353 from_name="bar.txt", to_name='empty')
2354 d.addBoth(self.shouldFail, error.Error,
2355 "POST_rename_file_no_replace_queryarg",
2357 "There was already a child by that name, and you asked me "
2358 "to not replace it")
2359 d.addCallback(lambda res: self.GET(self.public_url + "/foo/empty?t=json"))
2360 d.addCallback(self.failUnlessIsEmptyJSON)
2363 def test_POST_rename_file_no_replace_field(self):
2364 # rename a file and replace a directory with it
2365 d = self.POST(self.public_url + "/foo", t="rename", replace="false",
2366 from_name="bar.txt", to_name='empty')
2367 d.addBoth(self.shouldFail, error.Error,
2368 "POST_rename_file_no_replace_field",
2370 "There was already a child by that name, and you asked me "
2371 "to not replace it")
2372 d.addCallback(lambda res: self.GET(self.public_url + "/foo/empty?t=json"))
2373 d.addCallback(self.failUnlessIsEmptyJSON)
2376 def failUnlessIsEmptyJSON(self, res):
2377 data = simplejson.loads(res)
2378 self.failUnlessEqual(data[0], "dirnode", data)
2379 self.failUnlessEqual(len(data[1]["children"]), 0)
2381 def test_POST_rename_file_slash_fail(self):
2382 d = self.POST(self.public_url + "/foo", t="rename",
2383 from_name="bar.txt", to_name='kirk/spock.txt')
2384 d.addBoth(self.shouldFail, error.Error,
2385 "test_POST_rename_file_slash_fail",
2387 "to_name= may not contain a slash",
2389 d.addCallback(lambda res:
2390 self.failUnlessNodeHasChild(self._foo_node, u"bar.txt"))
2393 def test_POST_rename_dir(self):
2394 d = self.POST(self.public_url, t="rename",
2395 from_name="foo", to_name='plunk')
2396 d.addCallback(lambda res:
2397 self.failIfNodeHasChild(self.public_root, u"foo"))
2398 d.addCallback(lambda res:
2399 self.failUnlessNodeHasChild(self.public_root, u"plunk"))
2400 d.addCallback(lambda res: self.GET(self.public_url + "/plunk?t=json"))
2401 d.addCallback(self.failUnlessIsFooJSON)
2404 def shouldRedirect(self, res, target=None, statuscode=None, which=""):
2405 """ If target is not None then the redirection has to go to target. If
2406 statuscode is not None then the redirection has to be accomplished with
2407 that HTTP status code."""
2408 if not isinstance(res, failure.Failure):
2409 to_where = (target is None) and "somewhere" or ("to " + target)
2410 self.fail("%s: we were expecting to get redirected %s, not get an"
2411 " actual page: %s" % (which, to_where, res))
2412 res.trap(error.PageRedirect)
2413 if statuscode is not None:
2414 self.failUnlessEqual(res.value.status, statuscode,
2415 "%s: not a redirect" % which)
2416 if target is not None:
2417 # the PageRedirect does not seem to capture the uri= query arg
2418 # properly, so we can't check for it.
2419 realtarget = self.webish_url + target
2420 self.failUnlessEqual(res.value.location, realtarget,
2421 "%s: wrong target" % which)
2422 return res.value.location
2424 def test_GET_URI_form(self):
2425 base = "/uri?uri=%s" % self._bar_txt_uri
2426 # this is supposed to give us a redirect to /uri/$URI, plus arguments
2427 targetbase = "/uri/%s" % urllib.quote(self._bar_txt_uri)
2429 d.addBoth(self.shouldRedirect, targetbase)
2430 d.addCallback(lambda res: self.GET(base+"&filename=bar.txt"))
2431 d.addBoth(self.shouldRedirect, targetbase+"?filename=bar.txt")
2432 d.addCallback(lambda res: self.GET(base+"&t=json"))
2433 d.addBoth(self.shouldRedirect, targetbase+"?t=json")
2434 d.addCallback(self.log, "about to get file by uri")
2435 d.addCallback(lambda res: self.GET(base, followRedirect=True))
2436 d.addCallback(self.failUnlessIsBarDotTxt)
2437 d.addCallback(self.log, "got file by uri, about to get dir by uri")
2438 d.addCallback(lambda res: self.GET("/uri?uri=%s&t=json" % self._foo_uri,
2439 followRedirect=True))
2440 d.addCallback(self.failUnlessIsFooJSON)
2441 d.addCallback(self.log, "got dir by uri")
2445 def test_GET_URI_form_bad(self):
2446 d = self.shouldFail2(error.Error, "test_GET_URI_form_bad",
2447 "400 Bad Request", "GET /uri requires uri=",
2451 def test_GET_rename_form(self):
2452 d = self.GET(self.public_url + "/foo?t=rename-form&name=bar.txt",
2453 followRedirect=True)
2455 self.failUnless('name="when_done" value="."' in res, res)
2456 self.failUnless(re.search(r'name="from_name" value="bar\.txt"', res))
2457 d.addCallback(_check)
2460 def log(self, res, msg):
2461 #print "MSG: %s RES: %s" % (msg, res)
2465 def test_GET_URI_URL(self):
2466 base = "/uri/%s" % self._bar_txt_uri
2468 d.addCallback(self.failUnlessIsBarDotTxt)
2469 d.addCallback(lambda res: self.GET(base+"?filename=bar.txt"))
2470 d.addCallback(self.failUnlessIsBarDotTxt)
2471 d.addCallback(lambda res: self.GET(base+"?filename=bar.txt&save=true"))
2472 d.addCallback(self.failUnlessIsBarDotTxt)
2475 def test_GET_URI_URL_dir(self):
2476 base = "/uri/%s?t=json" % self._foo_uri
2478 d.addCallback(self.failUnlessIsFooJSON)
2481 def test_GET_URI_URL_missing(self):
2482 base = "/uri/%s" % self._bad_file_uri
2483 d = self.shouldHTTPError("test_GET_URI_URL_missing",
2484 http.GONE, None, "NotEnoughSharesError",
2486 # TODO: how can we exercise both sides of WebDownloadTarget.fail
2487 # here? we must arrange for a download to fail after target.open()
2488 # has been called, and then inspect the response to see that it is
2489 # shorter than we expected.
2492 def test_PUT_DIRURL_uri(self):
2493 d = self.s.create_dirnode()
2495 new_uri = dn.get_uri()
2496 # replace /foo with a new (empty) directory
2497 d = self.PUT(self.public_url + "/foo?t=uri", new_uri)
2498 d.addCallback(lambda res:
2499 self.failUnlessEqual(res.strip(), new_uri))
2500 d.addCallback(lambda res:
2501 self.failUnlessChildURIIs(self.public_root,
2505 d.addCallback(_made_dir)
2508 def test_PUT_DIRURL_uri_noreplace(self):
2509 d = self.s.create_dirnode()
2511 new_uri = dn.get_uri()
2512 # replace /foo with a new (empty) directory, but ask that
2513 # replace=false, so it should fail
2514 d = self.shouldFail2(error.Error, "test_PUT_DIRURL_uri_noreplace",
2515 "409 Conflict", "There was already a child by that name, and you asked me to not replace it",
2517 self.public_url + "/foo?t=uri&replace=false",
2519 d.addCallback(lambda res:
2520 self.failUnlessChildURIIs(self.public_root,
2524 d.addCallback(_made_dir)
2527 def test_PUT_DIRURL_bad_t(self):
2528 d = self.shouldFail2(error.Error, "test_PUT_DIRURL_bad_t",
2529 "400 Bad Request", "PUT to a directory",
2530 self.PUT, self.public_url + "/foo?t=BOGUS", "")
2531 d.addCallback(lambda res:
2532 self.failUnlessChildURIIs(self.public_root,
2537 def test_PUT_NEWFILEURL_uri(self):
2538 contents, n, new_uri = self.makefile(8)
2539 d = self.PUT(self.public_url + "/foo/new.txt?t=uri", new_uri)
2540 d.addCallback(lambda res: self.failUnlessEqual(res.strip(), new_uri))
2541 d.addCallback(lambda res:
2542 self.failUnlessChildContentsAre(self._foo_node, u"new.txt",
2546 def test_PUT_NEWFILEURL_uri_replace(self):
2547 contents, n, new_uri = self.makefile(8)
2548 d = self.PUT(self.public_url + "/foo/bar.txt?t=uri", new_uri)
2549 d.addCallback(lambda res: self.failUnlessEqual(res.strip(), new_uri))
2550 d.addCallback(lambda res:
2551 self.failUnlessChildContentsAre(self._foo_node, u"bar.txt",
2555 def test_PUT_NEWFILEURL_uri_no_replace(self):
2556 contents, n, new_uri = self.makefile(8)
2557 d = self.PUT(self.public_url + "/foo/bar.txt?t=uri&replace=false", new_uri)
2558 d.addBoth(self.shouldFail, error.Error, "PUT_NEWFILEURL_uri_no_replace",
2560 "There was already a child by that name, and you asked me "
2561 "to not replace it")
2564 def test_PUT_NEWFILE_URI(self):
2565 file_contents = "New file contents here\n"
2566 d = self.PUT("/uri", file_contents)
2568 assert isinstance(uri, str), uri
2569 self.failUnless(uri in FakeCHKFileNode.all_contents)
2570 self.failUnlessEqual(FakeCHKFileNode.all_contents[uri],
2572 return self.GET("/uri/%s" % uri)
2573 d.addCallback(_check)
2575 self.failUnlessEqual(res, file_contents)
2576 d.addCallback(_check2)
2579 def test_PUT_NEWFILE_URI_not_mutable(self):
2580 file_contents = "New file contents here\n"
2581 d = self.PUT("/uri?mutable=false", file_contents)
2583 assert isinstance(uri, str), uri
2584 self.failUnless(uri in FakeCHKFileNode.all_contents)
2585 self.failUnlessEqual(FakeCHKFileNode.all_contents[uri],
2587 return self.GET("/uri/%s" % uri)
2588 d.addCallback(_check)
2590 self.failUnlessEqual(res, file_contents)
2591 d.addCallback(_check2)
2594 def test_PUT_NEWFILE_URI_only_PUT(self):
2595 d = self.PUT("/uri?t=bogus", "")
2596 d.addBoth(self.shouldFail, error.Error,
2597 "PUT_NEWFILE_URI_only_PUT",
2599 "/uri accepts only PUT, PUT?t=mkdir, POST?t=upload, and POST?t=mkdir")
2602 def test_PUT_NEWFILE_URI_mutable(self):
2603 file_contents = "New file contents here\n"
2604 d = self.PUT("/uri?mutable=true", file_contents)
2605 def _check1(filecap):
2606 filecap = filecap.strip()
2607 self.failUnless(filecap.startswith("URI:SSK:"), filecap)
2608 self.filecap = filecap
2609 u = uri.WriteableSSKFileURI.init_from_string(filecap)
2610 self.failUnless(u.storage_index in FakeMutableFileNode.all_contents)
2611 n = self.s.create_node_from_uri(filecap)
2612 return n.download_best_version()
2613 d.addCallback(_check1)
2615 self.failUnlessEqual(data, file_contents)
2616 return self.GET("/uri/%s" % urllib.quote(self.filecap))
2617 d.addCallback(_check2)
2619 self.failUnlessEqual(res, file_contents)
2620 d.addCallback(_check3)
2623 def test_PUT_mkdir(self):
2624 d = self.PUT("/uri?t=mkdir", "")
2626 n = self.s.create_node_from_uri(uri.strip())
2627 d2 = self.failUnlessNodeKeysAre(n, [])
2628 d2.addCallback(lambda res:
2629 self.GET("/uri/%s?t=json" % uri))
2631 d.addCallback(_check)
2632 d.addCallback(self.failUnlessIsEmptyJSON)
2635 def test_POST_check(self):
2636 d = self.POST(self.public_url + "/foo", t="check", name="bar.txt")
2638 # this returns a string form of the results, which are probably
2639 # None since we're using fake filenodes.
2640 # TODO: verify that the check actually happened, by changing
2641 # FakeCHKFileNode to count how many times .check() has been
2644 d.addCallback(_done)
2647 def test_bad_method(self):
2648 url = self.webish_url + self.public_url + "/foo/bar.txt"
2649 d = self.shouldHTTPError("test_bad_method",
2650 501, "Not Implemented",
2651 "I don't know how to treat a BOGUS request.",
2652 client.getPage, url, method="BOGUS")
2655 def test_short_url(self):
2656 url = self.webish_url + "/uri"
2657 d = self.shouldHTTPError("test_short_url", 501, "Not Implemented",
2658 "I don't know how to treat a DELETE request.",
2659 client.getPage, url, method="DELETE")
2662 def test_ophandle_bad(self):
2663 url = self.webish_url + "/operations/bogus?t=status"
2664 d = self.shouldHTTPError("test_ophandle_bad", 404, "404 Not Found",
2665 "unknown/expired handle 'bogus'",
2666 client.getPage, url)
2669 def test_ophandle_cancel(self):
2670 d = self.POST(self.public_url + "/foo/?t=start-manifest&ophandle=128",
2671 followRedirect=True)
2672 d.addCallback(lambda ignored:
2673 self.GET("/operations/128?t=status&output=JSON"))
2675 data = simplejson.loads(res)
2676 self.failUnless("finished" in data, res)
2677 monitor = self.ws.root.child_operations.handles["128"][0]
2678 d = self.POST("/operations/128?t=cancel&output=JSON")
2680 data = simplejson.loads(res)
2681 self.failUnless("finished" in data, res)
2682 # t=cancel causes the handle to be forgotten
2683 self.failUnless(monitor.is_cancelled())
2684 d.addCallback(_check2)
2686 d.addCallback(_check1)
2687 d.addCallback(lambda ignored:
2688 self.shouldHTTPError("test_ophandle_cancel",
2689 404, "404 Not Found",
2690 "unknown/expired handle '128'",
2692 "/operations/128?t=status&output=JSON"))
2695 def test_ophandle_retainfor(self):
2696 d = self.POST(self.public_url + "/foo/?t=start-manifest&ophandle=129&retain-for=60",
2697 followRedirect=True)
2698 d.addCallback(lambda ignored:
2699 self.GET("/operations/129?t=status&output=JSON&retain-for=0"))
2701 data = simplejson.loads(res)
2702 self.failUnless("finished" in data, res)
2703 d.addCallback(_check1)
2704 # the retain-for=0 will cause the handle to be expired very soon
2705 d.addCallback(self.stall, 2.0)
2706 d.addCallback(lambda ignored:
2707 self.shouldHTTPError("test_ophandle_retainfor",
2708 404, "404 Not Found",
2709 "unknown/expired handle '129'",
2711 "/operations/129?t=status&output=JSON"))
2714 def test_ophandle_release_after_complete(self):
2715 d = self.POST(self.public_url + "/foo/?t=start-manifest&ophandle=130",
2716 followRedirect=True)
2717 d.addCallback(self.wait_for_operation, "130")
2718 d.addCallback(lambda ignored:
2719 self.GET("/operations/130?t=status&output=JSON&release-after-complete=true"))
2720 # the release-after-complete=true will cause the handle to be expired
2721 d.addCallback(lambda ignored:
2722 self.shouldHTTPError("test_ophandle_release_after_complete",
2723 404, "404 Not Found",
2724 "unknown/expired handle '130'",
2726 "/operations/130?t=status&output=JSON"))
2729 def test_incident(self):
2730 d = self.POST("/report_incident", details="eek")
2732 self.failUnless("Thank you for your report!" in res, res)
2733 d.addCallback(_done)
2736 def test_static(self):
2737 webdir = os.path.join(self.staticdir, "subdir")
2738 fileutil.make_dirs(webdir)
2739 f = open(os.path.join(webdir, "hello.txt"), "wb")
2743 d = self.GET("/static/subdir/hello.txt")
2745 self.failUnlessEqual(res, "hello")
2746 d.addCallback(_check)
2750 class Util(unittest.TestCase, ShouldFailMixin):
2751 def test_parse_replace_arg(self):
2752 self.failUnlessEqual(common.parse_replace_arg("true"), True)
2753 self.failUnlessEqual(common.parse_replace_arg("false"), False)
2754 self.failUnlessEqual(common.parse_replace_arg("only-files"),
2756 self.shouldFail(AssertionError, "test_parse_replace_arg", "",
2757 common.parse_replace_arg, "only_fles")
2759 def test_abbreviate_time(self):
2760 self.failUnlessEqual(common.abbreviate_time(None), "")
2761 self.failUnlessEqual(common.abbreviate_time(1.234), "1.23s")
2762 self.failUnlessEqual(common.abbreviate_time(0.123), "123ms")
2763 self.failUnlessEqual(common.abbreviate_time(0.00123), "1.2ms")
2764 self.failUnlessEqual(common.abbreviate_time(0.000123), "123us")
2766 def test_abbreviate_rate(self):
2767 self.failUnlessEqual(common.abbreviate_rate(None), "")
2768 self.failUnlessEqual(common.abbreviate_rate(1234000), "1.23MBps")
2769 self.failUnlessEqual(common.abbreviate_rate(12340), "12.3kBps")
2770 self.failUnlessEqual(common.abbreviate_rate(123), "123Bps")
2772 def test_abbreviate_size(self):
2773 self.failUnlessEqual(common.abbreviate_size(None), "")
2774 self.failUnlessEqual(common.abbreviate_size(1.23*1000*1000*1000), "1.23GB")
2775 self.failUnlessEqual(common.abbreviate_size(1.23*1000*1000), "1.23MB")
2776 self.failUnlessEqual(common.abbreviate_size(1230), "1.2kB")
2777 self.failUnlessEqual(common.abbreviate_size(123), "123B")
2779 def test_plural(self):
2781 return "%d second%s" % (s, status.plural(s))
2782 self.failUnlessEqual(convert(0), "0 seconds")
2783 self.failUnlessEqual(convert(1), "1 second")
2784 self.failUnlessEqual(convert(2), "2 seconds")
2786 return "has share%s: %s" % (status.plural(s), ",".join(s))
2787 self.failUnlessEqual(convert2([]), "has shares: ")
2788 self.failUnlessEqual(convert2(["1"]), "has share: 1")
2789 self.failUnlessEqual(convert2(["1","2"]), "has shares: 1,2")
2792 class Grid(GridTestMixin, WebErrorMixin, unittest.TestCase, ShouldFailMixin):
2794 def CHECK(self, ign, which, args, clientnum=0):
2795 fileurl = self.fileurls[which]
2796 url = fileurl + "?" + args
2797 return self.GET(url, method="POST", clientnum=clientnum)
2799 def test_filecheck(self):
2800 self.basedir = "web/Grid/filecheck"
2802 c0 = self.g.clients[0]
2805 d = c0.upload(upload.Data(DATA, convergence=""))
2806 def _stash_uri(ur, which):
2807 self.uris[which] = ur.uri
2808 d.addCallback(_stash_uri, "good")
2809 d.addCallback(lambda ign:
2810 c0.upload(upload.Data(DATA+"1", convergence="")))
2811 d.addCallback(_stash_uri, "sick")
2812 d.addCallback(lambda ign:
2813 c0.upload(upload.Data(DATA+"2", convergence="")))
2814 d.addCallback(_stash_uri, "dead")
2815 def _stash_mutable_uri(n, which):
2816 self.uris[which] = n.get_uri()
2817 assert isinstance(self.uris[which], str)
2818 d.addCallback(lambda ign: c0.create_mutable_file(DATA+"3"))
2819 d.addCallback(_stash_mutable_uri, "corrupt")
2820 d.addCallback(lambda ign:
2821 c0.upload(upload.Data("literal", convergence="")))
2822 d.addCallback(_stash_uri, "small")
2823 d.addCallback(lambda ign: c0.create_immutable_dirnode({}))
2824 d.addCallback(_stash_mutable_uri, "smalldir")
2826 def _compute_fileurls(ignored):
2828 for which in self.uris:
2829 self.fileurls[which] = "uri/" + urllib.quote(self.uris[which])
2830 d.addCallback(_compute_fileurls)
2832 def _clobber_shares(ignored):
2833 good_shares = self.find_shares(self.uris["good"])
2834 self.failUnlessEqual(len(good_shares), 10)
2835 sick_shares = self.find_shares(self.uris["sick"])
2836 os.unlink(sick_shares[0][2])
2837 dead_shares = self.find_shares(self.uris["dead"])
2838 for i in range(1, 10):
2839 os.unlink(dead_shares[i][2])
2840 c_shares = self.find_shares(self.uris["corrupt"])
2841 cso = CorruptShareOptions()
2842 cso.stdout = StringIO()
2843 cso.parseOptions([c_shares[0][2]])
2845 d.addCallback(_clobber_shares)
2847 d.addCallback(self.CHECK, "good", "t=check")
2848 def _got_html_good(res):
2849 self.failUnless("Healthy" in res, res)
2850 self.failIf("Not Healthy" in res, res)
2851 d.addCallback(_got_html_good)
2852 d.addCallback(self.CHECK, "good", "t=check&return_to=somewhere")
2853 def _got_html_good_return_to(res):
2854 self.failUnless("Healthy" in res, res)
2855 self.failIf("Not Healthy" in res, res)
2856 self.failUnless('<a href="somewhere">Return to file'
2858 d.addCallback(_got_html_good_return_to)
2859 d.addCallback(self.CHECK, "good", "t=check&output=json")
2860 def _got_json_good(res):
2861 r = simplejson.loads(res)
2862 self.failUnlessEqual(r["summary"], "Healthy")
2863 self.failUnless(r["results"]["healthy"])
2864 self.failIf(r["results"]["needs-rebalancing"])
2865 self.failUnless(r["results"]["recoverable"])
2866 d.addCallback(_got_json_good)
2868 d.addCallback(self.CHECK, "small", "t=check")
2869 def _got_html_small(res):
2870 self.failUnless("Literal files are always healthy" in res, res)
2871 self.failIf("Not Healthy" in res, res)
2872 d.addCallback(_got_html_small)
2873 d.addCallback(self.CHECK, "small", "t=check&return_to=somewhere")
2874 def _got_html_small_return_to(res):
2875 self.failUnless("Literal files are always healthy" in res, res)
2876 self.failIf("Not Healthy" in res, res)
2877 self.failUnless('<a href="somewhere">Return to file'
2879 d.addCallback(_got_html_small_return_to)
2880 d.addCallback(self.CHECK, "small", "t=check&output=json")
2881 def _got_json_small(res):
2882 r = simplejson.loads(res)
2883 self.failUnlessEqual(r["storage-index"], "")
2884 self.failUnless(r["results"]["healthy"])
2885 d.addCallback(_got_json_small)
2887 d.addCallback(self.CHECK, "smalldir", "t=check")
2888 def _got_html_smalldir(res):
2889 self.failUnless("Literal files are always healthy" in res, res)
2890 self.failIf("Not Healthy" in res, res)
2891 d.addCallback(_got_html_smalldir)
2892 d.addCallback(self.CHECK, "smalldir", "t=check&output=json")
2893 def _got_json_smalldir(res):
2894 r = simplejson.loads(res)
2895 self.failUnlessEqual(r["storage-index"], "")
2896 self.failUnless(r["results"]["healthy"])
2897 d.addCallback(_got_json_smalldir)
2899 d.addCallback(self.CHECK, "sick", "t=check")
2900 def _got_html_sick(res):
2901 self.failUnless("Not Healthy" in res, res)
2902 d.addCallback(_got_html_sick)
2903 d.addCallback(self.CHECK, "sick", "t=check&output=json")
2904 def _got_json_sick(res):
2905 r = simplejson.loads(res)
2906 self.failUnlessEqual(r["summary"],
2907 "Not Healthy: 9 shares (enc 3-of-10)")
2908 self.failIf(r["results"]["healthy"])
2909 self.failIf(r["results"]["needs-rebalancing"])
2910 self.failUnless(r["results"]["recoverable"])
2911 d.addCallback(_got_json_sick)
2913 d.addCallback(self.CHECK, "dead", "t=check")
2914 def _got_html_dead(res):
2915 self.failUnless("Not Healthy" in res, res)
2916 d.addCallback(_got_html_dead)
2917 d.addCallback(self.CHECK, "dead", "t=check&output=json")
2918 def _got_json_dead(res):
2919 r = simplejson.loads(res)
2920 self.failUnlessEqual(r["summary"],
2921 "Not Healthy: 1 shares (enc 3-of-10)")
2922 self.failIf(r["results"]["healthy"])
2923 self.failIf(r["results"]["needs-rebalancing"])
2924 self.failIf(r["results"]["recoverable"])
2925 d.addCallback(_got_json_dead)
2927 d.addCallback(self.CHECK, "corrupt", "t=check&verify=true")
2928 def _got_html_corrupt(res):
2929 self.failUnless("Not Healthy! : Unhealthy" in res, res)
2930 d.addCallback(_got_html_corrupt)
2931 d.addCallback(self.CHECK, "corrupt", "t=check&verify=true&output=json")
2932 def _got_json_corrupt(res):
2933 r = simplejson.loads(res)
2934 self.failUnless("Unhealthy: 9 shares (enc 3-of-10)" in r["summary"],
2936 self.failIf(r["results"]["healthy"])
2937 self.failUnless(r["results"]["recoverable"])
2938 self.failUnlessEqual(r["results"]["count-shares-good"], 9)
2939 self.failUnlessEqual(r["results"]["count-corrupt-shares"], 1)
2940 d.addCallback(_got_json_corrupt)
2942 d.addErrback(self.explain_web_error)
2945 def test_repair_html(self):
2946 self.basedir = "web/Grid/repair_html"
2948 c0 = self.g.clients[0]
2951 d = c0.upload(upload.Data(DATA, convergence=""))
2952 def _stash_uri(ur, which):
2953 self.uris[which] = ur.uri
2954 d.addCallback(_stash_uri, "good")
2955 d.addCallback(lambda ign:
2956 c0.upload(upload.Data(DATA+"1", convergence="")))
2957 d.addCallback(_stash_uri, "sick")
2958 d.addCallback(lambda ign:
2959 c0.upload(upload.Data(DATA+"2", convergence="")))
2960 d.addCallback(_stash_uri, "dead")
2961 def _stash_mutable_uri(n, which):
2962 self.uris[which] = n.get_uri()
2963 assert isinstance(self.uris[which], str)
2964 d.addCallback(lambda ign: c0.create_mutable_file(DATA+"3"))
2965 d.addCallback(_stash_mutable_uri, "corrupt")
2967 def _compute_fileurls(ignored):
2969 for which in self.uris:
2970 self.fileurls[which] = "uri/" + urllib.quote(self.uris[which])
2971 d.addCallback(_compute_fileurls)
2973 def _clobber_shares(ignored):
2974 good_shares = self.find_shares(self.uris["good"])
2975 self.failUnlessEqual(len(good_shares), 10)
2976 sick_shares = self.find_shares(self.uris["sick"])
2977 os.unlink(sick_shares[0][2])
2978 dead_shares = self.find_shares(self.uris["dead"])
2979 for i in range(1, 10):
2980 os.unlink(dead_shares[i][2])
2981 c_shares = self.find_shares(self.uris["corrupt"])
2982 cso = CorruptShareOptions()
2983 cso.stdout = StringIO()
2984 cso.parseOptions([c_shares[0][2]])
2986 d.addCallback(_clobber_shares)
2988 d.addCallback(self.CHECK, "good", "t=check&repair=true")
2989 def _got_html_good(res):
2990 self.failUnless("Healthy" in res, res)
2991 self.failIf("Not Healthy" in res, res)
2992 self.failUnless("No repair necessary" in res, res)
2993 d.addCallback(_got_html_good)
2995 d.addCallback(self.CHECK, "sick", "t=check&repair=true")
2996 def _got_html_sick(res):
2997 self.failUnless("Healthy : healthy" in res, res)
2998 self.failIf("Not Healthy" in res, res)
2999 self.failUnless("Repair successful" in res, res)
3000 d.addCallback(_got_html_sick)
3002 # repair of a dead file will fail, of course, but it isn't yet
3003 # clear how this should be reported. Right now it shows up as
3006 #d.addCallback(self.CHECK, "dead", "t=check&repair=true")
3007 #def _got_html_dead(res):
3009 # self.failUnless("Healthy : healthy" in res, res)
3010 # self.failIf("Not Healthy" in res, res)
3011 # self.failUnless("No repair necessary" in res, res)
3012 #d.addCallback(_got_html_dead)
3014 d.addCallback(self.CHECK, "corrupt", "t=check&verify=true&repair=true")
3015 def _got_html_corrupt(res):
3016 self.failUnless("Healthy : Healthy" in res, res)
3017 self.failIf("Not Healthy" in res, res)
3018 self.failUnless("Repair successful" in res, res)
3019 d.addCallback(_got_html_corrupt)
3021 d.addErrback(self.explain_web_error)
3024 def test_repair_json(self):
3025 self.basedir = "web/Grid/repair_json"
3027 c0 = self.g.clients[0]
3030 d = c0.upload(upload.Data(DATA+"1", convergence=""))
3031 def _stash_uri(ur, which):
3032 self.uris[which] = ur.uri
3033 d.addCallback(_stash_uri, "sick")
3035 def _compute_fileurls(ignored):
3037 for which in self.uris:
3038 self.fileurls[which] = "uri/" + urllib.quote(self.uris[which])
3039 d.addCallback(_compute_fileurls)
3041 def _clobber_shares(ignored):
3042 sick_shares = self.find_shares(self.uris["sick"])
3043 os.unlink(sick_shares[0][2])
3044 d.addCallback(_clobber_shares)
3046 d.addCallback(self.CHECK, "sick", "t=check&repair=true&output=json")
3047 def _got_json_sick(res):
3048 r = simplejson.loads(res)
3049 self.failUnlessEqual(r["repair-attempted"], True)
3050 self.failUnlessEqual(r["repair-successful"], True)
3051 self.failUnlessEqual(r["pre-repair-results"]["summary"],
3052 "Not Healthy: 9 shares (enc 3-of-10)")
3053 self.failIf(r["pre-repair-results"]["results"]["healthy"])
3054 self.failUnlessEqual(r["post-repair-results"]["summary"], "healthy")
3055 self.failUnless(r["post-repair-results"]["results"]["healthy"])
3056 d.addCallback(_got_json_sick)
3058 d.addErrback(self.explain_web_error)
3061 def test_unknown(self):
3062 self.basedir = "web/Grid/unknown"
3064 c0 = self.g.clients[0]
3068 future_writecap = "x-tahoe-crazy://I_am_from_the_future."
3069 future_readcap = "x-tahoe-crazy-readonly://I_am_from_the_future."
3070 # the future cap format may contain slashes, which must be tolerated
3071 expected_info_url = "uri/%s?t=info" % urllib.quote(future_writecap,
3073 future_node = UnknownNode(future_writecap, future_readcap)
3075 d = c0.create_dirnode()
3076 def _stash_root_and_create_file(n):
3078 self.rooturl = "uri/" + urllib.quote(n.get_uri()) + "/"
3079 self.rourl = "uri/" + urllib.quote(n.get_readonly_uri()) + "/"
3080 return self.rootnode.set_node(u"future", future_node)
3081 d.addCallback(_stash_root_and_create_file)
3082 # make sure directory listing tolerates unknown nodes
3083 d.addCallback(lambda ign: self.GET(self.rooturl))
3084 def _check_html(res):
3085 self.failUnlessIn("<td>future</td>", res)
3086 # find the More Info link for "future", should be relative
3087 mo = re.search(r'<a href="([^"]+)">More Info</a>', res)
3088 info_url = mo.group(1)
3089 self.failUnlessEqual(info_url, "future?t=info")
3091 d.addCallback(_check_html)
3092 d.addCallback(lambda ign: self.GET(self.rooturl+"?t=json"))
3093 def _check_json(res, expect_writecap):
3094 data = simplejson.loads(res)
3095 self.failUnlessEqual(data[0], "dirnode")
3096 f = data[1]["children"]["future"]
3097 self.failUnlessEqual(f[0], "unknown")
3099 self.failUnlessEqual(f[1]["rw_uri"], future_writecap)
3101 self.failIfIn("rw_uri", f[1])
3102 self.failUnlessEqual(f[1]["ro_uri"], future_readcap)
3103 self.failUnless("metadata" in f[1])
3104 d.addCallback(_check_json, expect_writecap=True)
3105 d.addCallback(lambda ign: self.GET(expected_info_url))
3106 def _check_info(res, expect_readcap):
3107 self.failUnlessIn("Object Type: <span>unknown</span>", res)
3108 self.failUnlessIn(future_writecap, res)
3110 self.failUnlessIn(future_readcap, res)
3111 self.failIfIn("Raw data as", res)
3112 self.failIfIn("Directory writecap", res)
3113 self.failIfIn("Checker Operations", res)
3114 self.failIfIn("Mutable File Operations", res)
3115 self.failIfIn("Directory Operations", res)
3116 d.addCallback(_check_info, expect_readcap=False)
3117 d.addCallback(lambda ign: self.GET(self.rooturl+"future?t=info"))
3118 d.addCallback(_check_info, expect_readcap=True)
3120 # and make sure that a read-only version of the directory can be
3121 # rendered too. This version will not have future_writecap
3122 d.addCallback(lambda ign: self.GET(self.rourl))
3123 d.addCallback(_check_html)
3124 d.addCallback(lambda ign: self.GET(self.rourl+"?t=json"))
3125 d.addCallback(_check_json, expect_writecap=False)
3128 def test_deep_check(self):
3129 self.basedir = "web/Grid/deep_check"
3131 c0 = self.g.clients[0]
3135 d = c0.create_dirnode()
3136 def _stash_root_and_create_file(n):
3138 self.fileurls["root"] = "uri/" + urllib.quote(n.get_uri()) + "/"
3139 return n.add_file(u"good", upload.Data(DATA, convergence=""))
3140 d.addCallback(_stash_root_and_create_file)
3141 def _stash_uri(fn, which):
3142 self.uris[which] = fn.get_uri()
3144 d.addCallback(_stash_uri, "good")
3145 d.addCallback(lambda ign:
3146 self.rootnode.add_file(u"small",
3147 upload.Data("literal",
3149 d.addCallback(_stash_uri, "small")
3150 d.addCallback(lambda ign:
3151 self.rootnode.add_file(u"sick",
3152 upload.Data(DATA+"1",
3154 d.addCallback(_stash_uri, "sick")
3156 # this tests that deep-check and stream-manifest will ignore
3157 # UnknownNode instances. Hopefully this will also cover deep-stats.
3158 future_writecap = "x-tahoe-crazy://I_am_from_the_future."
3159 future_readcap = "x-tahoe-crazy-readonly://I_am_from_the_future."
3160 future_node = UnknownNode(future_writecap, future_readcap)
3161 d.addCallback(lambda ign: self.rootnode.set_node(u"future",future_node))
3163 def _clobber_shares(ignored):
3164 self.delete_shares_numbered(self.uris["sick"], [0,1])
3165 d.addCallback(_clobber_shares)
3173 d.addCallback(self.CHECK, "root", "t=stream-deep-check")
3176 units = [simplejson.loads(line)
3177 for line in res.splitlines()
3180 print "response is:", res
3181 print "undecodeable line was '%s'" % line
3183 self.failUnlessEqual(len(units), 5+1)
3184 # should be parent-first
3186 self.failUnlessEqual(u0["path"], [])
3187 self.failUnlessEqual(u0["type"], "directory")
3188 self.failUnlessEqual(u0["cap"], self.rootnode.get_uri())
3189 u0cr = u0["check-results"]
3190 self.failUnlessEqual(u0cr["results"]["count-shares-good"], 10)
3192 ugood = [u for u in units
3193 if u["type"] == "file" and u["path"] == [u"good"]][0]
3194 self.failUnlessEqual(ugood["cap"], self.uris["good"])
3195 ugoodcr = ugood["check-results"]
3196 self.failUnlessEqual(ugoodcr["results"]["count-shares-good"], 10)
3199 self.failUnlessEqual(stats["type"], "stats")
3201 self.failUnlessEqual(s["count-immutable-files"], 2)
3202 self.failUnlessEqual(s["count-literal-files"], 1)
3203 self.failUnlessEqual(s["count-directories"], 1)
3204 self.failUnlessEqual(s["count-unknown"], 1)
3205 d.addCallback(_done)
3207 d.addCallback(self.CHECK, "root", "t=stream-manifest")
3208 def _check_manifest(res):
3209 self.failUnless(res.endswith("\n"))
3210 units = [simplejson.loads(t) for t in res[:-1].split("\n")]
3211 self.failUnlessEqual(len(units), 5+1)
3212 self.failUnlessEqual(units[-1]["type"], "stats")
3214 self.failUnlessEqual(first["path"], [])
3215 self.failUnlessEqual(first["cap"], self.rootnode.get_uri())
3216 self.failUnlessEqual(first["type"], "directory")
3217 stats = units[-1]["stats"]
3218 self.failUnlessEqual(stats["count-immutable-files"], 2)
3219 self.failUnlessEqual(stats["count-literal-files"], 1)
3220 self.failUnlessEqual(stats["count-mutable-files"], 0)
3221 self.failUnlessEqual(stats["count-immutable-files"], 2)
3222 self.failUnlessEqual(stats["count-unknown"], 1)
3223 d.addCallback(_check_manifest)
3225 # now add root/subdir and root/subdir/grandchild, then make subdir
3226 # unrecoverable, then see what happens
3228 d.addCallback(lambda ign:
3229 self.rootnode.create_subdirectory(u"subdir"))
3230 d.addCallback(_stash_uri, "subdir")
3231 d.addCallback(lambda subdir_node:
3232 subdir_node.add_file(u"grandchild",
3233 upload.Data(DATA+"2",
3235 d.addCallback(_stash_uri, "grandchild")
3237 d.addCallback(lambda ign:
3238 self.delete_shares_numbered(self.uris["subdir"],
3246 # root/subdir [unrecoverable]
3247 # root/subdir/grandchild
3249 # how should a streaming-JSON API indicate fatal error?
3250 # answer: emit ERROR: instead of a JSON string
3252 d.addCallback(self.CHECK, "root", "t=stream-manifest")
3253 def _check_broken_manifest(res):
3254 lines = res.splitlines()
3256 for (i,line) in enumerate(lines)
3257 if line.startswith("ERROR:")]
3259 self.fail("no ERROR: in output: %s" % (res,))
3260 first_error = error_lines[0]
3261 error_line = lines[first_error]
3262 error_msg = lines[first_error+1:]
3263 error_msg_s = "\n".join(error_msg) + "\n"
3264 self.failUnlessIn("ERROR: UnrecoverableFileError(no recoverable versions)",
3266 self.failUnless(len(error_msg) > 2, error_msg_s) # some traceback
3267 units = [simplejson.loads(line) for line in lines[:first_error]]
3268 self.failUnlessEqual(len(units), 6) # includes subdir
3269 last_unit = units[-1]
3270 self.failUnlessEqual(last_unit["path"], ["subdir"])
3271 d.addCallback(_check_broken_manifest)
3273 d.addCallback(self.CHECK, "root", "t=stream-deep-check")
3274 def _check_broken_deepcheck(res):
3275 lines = res.splitlines()
3277 for (i,line) in enumerate(lines)
3278 if line.startswith("ERROR:")]
3280 self.fail("no ERROR: in output: %s" % (res,))
3281 first_error = error_lines[0]
3282 error_line = lines[first_error]
3283 error_msg = lines[first_error+1:]
3284 error_msg_s = "\n".join(error_msg) + "\n"
3285 self.failUnlessIn("ERROR: UnrecoverableFileError(no recoverable versions)",
3287 self.failUnless(len(error_msg) > 2, error_msg_s) # some traceback
3288 units = [simplejson.loads(line) for line in lines[:first_error]]
3289 self.failUnlessEqual(len(units), 6) # includes subdir
3290 last_unit = units[-1]
3291 self.failUnlessEqual(last_unit["path"], ["subdir"])
3292 r = last_unit["check-results"]["results"]
3293 self.failUnlessEqual(r["count-recoverable-versions"], 0)
3294 self.failUnlessEqual(r["count-shares-good"], 1)
3295 self.failUnlessEqual(r["recoverable"], False)
3296 d.addCallback(_check_broken_deepcheck)
3298 d.addErrback(self.explain_web_error)
3301 def test_deep_check_and_repair(self):
3302 self.basedir = "web/Grid/deep_check_and_repair"
3304 c0 = self.g.clients[0]
3308 d = c0.create_dirnode()
3309 def _stash_root_and_create_file(n):
3311 self.fileurls["root"] = "uri/" + urllib.quote(n.get_uri()) + "/"
3312 return n.add_file(u"good", upload.Data(DATA, convergence=""))
3313 d.addCallback(_stash_root_and_create_file)
3314 def _stash_uri(fn, which):
3315 self.uris[which] = fn.get_uri()
3316 d.addCallback(_stash_uri, "good")
3317 d.addCallback(lambda ign:
3318 self.rootnode.add_file(u"small",
3319 upload.Data("literal",
3321 d.addCallback(_stash_uri, "small")
3322 d.addCallback(lambda ign:
3323 self.rootnode.add_file(u"sick",
3324 upload.Data(DATA+"1",
3326 d.addCallback(_stash_uri, "sick")
3327 #d.addCallback(lambda ign:
3328 # self.rootnode.add_file(u"dead",
3329 # upload.Data(DATA+"2",
3331 #d.addCallback(_stash_uri, "dead")
3333 #d.addCallback(lambda ign: c0.create_mutable_file("mutable"))
3334 #d.addCallback(lambda fn: self.rootnode.set_node(u"corrupt", fn))
3335 #d.addCallback(_stash_uri, "corrupt")
3337 def _clobber_shares(ignored):
3338 good_shares = self.find_shares(self.uris["good"])
3339 self.failUnlessEqual(len(good_shares), 10)
3340 sick_shares = self.find_shares(self.uris["sick"])
3341 os.unlink(sick_shares[0][2])
3342 #dead_shares = self.find_shares(self.uris["dead"])
3343 #for i in range(1, 10):
3344 # os.unlink(dead_shares[i][2])
3346 #c_shares = self.find_shares(self.uris["corrupt"])
3347 #cso = CorruptShareOptions()
3348 #cso.stdout = StringIO()
3349 #cso.parseOptions([c_shares[0][2]])
3351 d.addCallback(_clobber_shares)
3354 # root/good CHK, 10 shares
3356 # root/sick CHK, 9 shares
3358 d.addCallback(self.CHECK, "root", "t=stream-deep-check&repair=true")
3360 units = [simplejson.loads(line)
3361 for line in res.splitlines()
3363 self.failUnlessEqual(len(units), 4+1)
3364 # should be parent-first
3366 self.failUnlessEqual(u0["path"], [])
3367 self.failUnlessEqual(u0["type"], "directory")
3368 self.failUnlessEqual(u0["cap"], self.rootnode.get_uri())
3369 u0crr = u0["check-and-repair-results"]
3370 self.failUnlessEqual(u0crr["repair-attempted"], False)
3371 self.failUnlessEqual(u0crr["pre-repair-results"]["results"]["count-shares-good"], 10)
3373 ugood = [u for u in units
3374 if u["type"] == "file" and u["path"] == [u"good"]][0]
3375 self.failUnlessEqual(ugood["cap"], self.uris["good"])
3376 ugoodcrr = ugood["check-and-repair-results"]
3377 self.failUnlessEqual(u0crr["repair-attempted"], False)
3378 self.failUnlessEqual(u0crr["pre-repair-results"]["results"]["count-shares-good"], 10)
3380 usick = [u for u in units
3381 if u["type"] == "file" and u["path"] == [u"sick"]][0]
3382 self.failUnlessEqual(usick["cap"], self.uris["sick"])
3383 usickcrr = usick["check-and-repair-results"]
3384 self.failUnlessEqual(usickcrr["repair-attempted"], True)
3385 self.failUnlessEqual(usickcrr["repair-successful"], True)
3386 self.failUnlessEqual(usickcrr["pre-repair-results"]["results"]["count-shares-good"], 9)
3387 self.failUnlessEqual(usickcrr["post-repair-results"]["results"]["count-shares-good"], 10)
3390 self.failUnlessEqual(stats["type"], "stats")
3392 self.failUnlessEqual(s["count-immutable-files"], 2)
3393 self.failUnlessEqual(s["count-literal-files"], 1)
3394 self.failUnlessEqual(s["count-directories"], 1)
3395 d.addCallback(_done)
3397 d.addErrback(self.explain_web_error)
3400 def _count_leases(self, ignored, which):
3401 u = self.uris[which]
3402 shares = self.find_shares(u)
3404 for shnum, serverid, fn in shares:
3405 sf = get_share_file(fn)
3406 num_leases = len(list(sf.get_leases()))
3407 lease_counts.append( (fn, num_leases) )
3410 def _assert_leasecount(self, lease_counts, expected):
3411 for (fn, num_leases) in lease_counts:
3412 if num_leases != expected:
3413 self.fail("expected %d leases, have %d, on %s" %
3414 (expected, num_leases, fn))
3416 def test_add_lease(self):
3417 self.basedir = "web/Grid/add_lease"
3418 self.set_up_grid(num_clients=2)
3419 c0 = self.g.clients[0]
3422 d = c0.upload(upload.Data(DATA, convergence=""))
3423 def _stash_uri(ur, which):
3424 self.uris[which] = ur.uri
3425 d.addCallback(_stash_uri, "one")
3426 d.addCallback(lambda ign:
3427 c0.upload(upload.Data(DATA+"1", convergence="")))
3428 d.addCallback(_stash_uri, "two")
3429 def _stash_mutable_uri(n, which):
3430 self.uris[which] = n.get_uri()
3431 assert isinstance(self.uris[which], str)
3432 d.addCallback(lambda ign: c0.create_mutable_file(DATA+"2"))
3433 d.addCallback(_stash_mutable_uri, "mutable")
3435 def _compute_fileurls(ignored):
3437 for which in self.uris:
3438 self.fileurls[which] = "uri/" + urllib.quote(self.uris[which])
3439 d.addCallback(_compute_fileurls)
3441 d.addCallback(self._count_leases, "one")
3442 d.addCallback(self._assert_leasecount, 1)
3443 d.addCallback(self._count_leases, "two")
3444 d.addCallback(self._assert_leasecount, 1)
3445 d.addCallback(self._count_leases, "mutable")
3446 d.addCallback(self._assert_leasecount, 1)
3448 d.addCallback(self.CHECK, "one", "t=check") # no add-lease
3449 def _got_html_good(res):
3450 self.failUnless("Healthy" in res, res)
3451 self.failIf("Not Healthy" in res, res)
3452 d.addCallback(_got_html_good)
3454 d.addCallback(self._count_leases, "one")
3455 d.addCallback(self._assert_leasecount, 1)
3456 d.addCallback(self._count_leases, "two")
3457 d.addCallback(self._assert_leasecount, 1)
3458 d.addCallback(self._count_leases, "mutable")
3459 d.addCallback(self._assert_leasecount, 1)
3461 # this CHECK uses the original client, which uses the same
3462 # lease-secrets, so it will just renew the original lease
3463 d.addCallback(self.CHECK, "one", "t=check&add-lease=true")
3464 d.addCallback(_got_html_good)
3466 d.addCallback(self._count_leases, "one")
3467 d.addCallback(self._assert_leasecount, 1)
3468 d.addCallback(self._count_leases, "two")
3469 d.addCallback(self._assert_leasecount, 1)
3470 d.addCallback(self._count_leases, "mutable")
3471 d.addCallback(self._assert_leasecount, 1)
3473 # this CHECK uses an alternate client, which adds a second lease
3474 d.addCallback(self.CHECK, "one", "t=check&add-lease=true", clientnum=1)
3475 d.addCallback(_got_html_good)
3477 d.addCallback(self._count_leases, "one")
3478 d.addCallback(self._assert_leasecount, 2)
3479 d.addCallback(self._count_leases, "two")
3480 d.addCallback(self._assert_leasecount, 1)
3481 d.addCallback(self._count_leases, "mutable")
3482 d.addCallback(self._assert_leasecount, 1)
3484 d.addCallback(self.CHECK, "mutable", "t=check&add-lease=true")
3485 d.addCallback(_got_html_good)
3487 d.addCallback(self._count_leases, "one")
3488 d.addCallback(self._assert_leasecount, 2)
3489 d.addCallback(self._count_leases, "two")
3490 d.addCallback(self._assert_leasecount, 1)
3491 d.addCallback(self._count_leases, "mutable")
3492 d.addCallback(self._assert_leasecount, 1)
3494 d.addCallback(self.CHECK, "mutable", "t=check&add-lease=true",
3496 d.addCallback(_got_html_good)
3498 d.addCallback(self._count_leases, "one")
3499 d.addCallback(self._assert_leasecount, 2)
3500 d.addCallback(self._count_leases, "two")
3501 d.addCallback(self._assert_leasecount, 1)
3502 d.addCallback(self._count_leases, "mutable")
3503 d.addCallback(self._assert_leasecount, 2)
3505 d.addErrback(self.explain_web_error)
3508 def test_deep_add_lease(self):
3509 self.basedir = "web/Grid/deep_add_lease"
3510 self.set_up_grid(num_clients=2)
3511 c0 = self.g.clients[0]
3515 d = c0.create_dirnode()
3516 def _stash_root_and_create_file(n):
3518 self.uris["root"] = n.get_uri()
3519 self.fileurls["root"] = "uri/" + urllib.quote(n.get_uri()) + "/"
3520 return n.add_file(u"one", upload.Data(DATA, convergence=""))
3521 d.addCallback(_stash_root_and_create_file)
3522 def _stash_uri(fn, which):
3523 self.uris[which] = fn.get_uri()
3524 d.addCallback(_stash_uri, "one")
3525 d.addCallback(lambda ign:
3526 self.rootnode.add_file(u"small",
3527 upload.Data("literal",
3529 d.addCallback(_stash_uri, "small")
3531 d.addCallback(lambda ign: c0.create_mutable_file("mutable"))
3532 d.addCallback(lambda fn: self.rootnode.set_node(u"mutable", fn))
3533 d.addCallback(_stash_uri, "mutable")
3535 d.addCallback(self.CHECK, "root", "t=stream-deep-check") # no add-lease
3537 units = [simplejson.loads(line)
3538 for line in res.splitlines()
3540 # root, one, small, mutable, stats
3541 self.failUnlessEqual(len(units), 4+1)
3542 d.addCallback(_done)
3544 d.addCallback(self._count_leases, "root")
3545 d.addCallback(self._assert_leasecount, 1)
3546 d.addCallback(self._count_leases, "one")
3547 d.addCallback(self._assert_leasecount, 1)
3548 d.addCallback(self._count_leases, "mutable")
3549 d.addCallback(self._assert_leasecount, 1)
3551 d.addCallback(self.CHECK, "root", "t=stream-deep-check&add-lease=true")
3552 d.addCallback(_done)
3554 d.addCallback(self._count_leases, "root")
3555 d.addCallback(self._assert_leasecount, 1)
3556 d.addCallback(self._count_leases, "one")
3557 d.addCallback(self._assert_leasecount, 1)
3558 d.addCallback(self._count_leases, "mutable")
3559 d.addCallback(self._assert_leasecount, 1)
3561 d.addCallback(self.CHECK, "root", "t=stream-deep-check&add-lease=true",
3563 d.addCallback(_done)
3565 d.addCallback(self._count_leases, "root")
3566 d.addCallback(self._assert_leasecount, 2)
3567 d.addCallback(self._count_leases, "one")
3568 d.addCallback(self._assert_leasecount, 2)
3569 d.addCallback(self._count_leases, "mutable")
3570 d.addCallback(self._assert_leasecount, 2)
3572 d.addErrback(self.explain_web_error)
3576 def test_exceptions(self):
3577 self.basedir = "web/Grid/exceptions"
3578 self.set_up_grid(num_clients=1, num_servers=2)
3579 c0 = self.g.clients[0]
3582 d = c0.create_dirnode()
3584 self.fileurls["root"] = "uri/" + urllib.quote(n.get_uri()) + "/"
3585 self.fileurls["imaginary"] = self.fileurls["root"] + "imaginary"
3587 d.addCallback(_stash_root)
3588 d.addCallback(lambda ign: c0.upload(upload.Data(DATA, convergence="")))
3590 self.fileurls["1share"] = "uri/" + urllib.quote(ur.uri)
3591 self.delete_shares_numbered(ur.uri, range(1,10))
3593 u = uri.from_string(ur.uri)
3594 u.key = testutil.flip_bit(u.key, 0)
3595 baduri = u.to_string()
3596 self.fileurls["0shares"] = "uri/" + urllib.quote(baduri)
3597 d.addCallback(_stash_bad)
3598 d.addCallback(lambda ign: c0.create_dirnode())
3599 def _mangle_dirnode_1share(n):
3601 url = self.fileurls["dir-1share"] = "uri/" + urllib.quote(u) + "/"
3602 self.fileurls["dir-1share-json"] = url + "?t=json"
3603 self.delete_shares_numbered(u, range(1,10))
3604 d.addCallback(_mangle_dirnode_1share)
3605 d.addCallback(lambda ign: c0.create_dirnode())
3606 def _mangle_dirnode_0share(n):
3608 url = self.fileurls["dir-0share"] = "uri/" + urllib.quote(u) + "/"
3609 self.fileurls["dir-0share-json"] = url + "?t=json"
3610 self.delete_shares_numbered(u, range(0,10))
3611 d.addCallback(_mangle_dirnode_0share)
3613 # NotEnoughSharesError should be reported sensibly, with a
3614 # text/plain explanation of the problem, and perhaps some
3615 # information on which shares *could* be found.
3617 d.addCallback(lambda ignored:
3618 self.shouldHTTPError("GET unrecoverable",
3619 410, "Gone", "NoSharesError",
3620 self.GET, self.fileurls["0shares"]))
3621 def _check_zero_shares(body):
3622 self.failIf("<html>" in body, body)
3623 body = " ".join(body.strip().split())
3624 exp = ("NoSharesError: no shares could be found. "
3625 "Zero shares usually indicates a corrupt URI, or that "
3626 "no servers were connected, but it might also indicate "
3627 "severe corruption. You should perform a filecheck on "
3628 "this object to learn more. The full error message is: "
3629 "Failed to get enough shareholders: have 0, need 3")
3630 self.failUnlessEqual(exp, body)
3631 d.addCallback(_check_zero_shares)
3634 d.addCallback(lambda ignored:
3635 self.shouldHTTPError("GET 1share",
3636 410, "Gone", "NotEnoughSharesError",
3637 self.GET, self.fileurls["1share"]))
3638 def _check_one_share(body):
3639 self.failIf("<html>" in body, body)
3640 body = " ".join(body.strip().split())
3641 exp = ("NotEnoughSharesError: This indicates that some "
3642 "servers were unavailable, or that shares have been "
3643 "lost to server departure, hard drive failure, or disk "
3644 "corruption. You should perform a filecheck on "
3645 "this object to learn more. The full error message is:"
3646 " Failed to get enough shareholders: have 1, need 3")
3647 self.failUnlessEqual(exp, body)
3648 d.addCallback(_check_one_share)
3650 d.addCallback(lambda ignored:
3651 self.shouldHTTPError("GET imaginary",
3652 404, "Not Found", None,
3653 self.GET, self.fileurls["imaginary"]))
3654 def _missing_child(body):
3655 self.failUnless("No such child: imaginary" in body, body)
3656 d.addCallback(_missing_child)
3658 d.addCallback(lambda ignored: self.GET(self.fileurls["dir-0share"]))
3659 def _check_0shares_dir_html(body):
3660 self.failUnless("<html>" in body, body)
3661 # we should see the regular page, but without the child table or
3663 body = " ".join(body.strip().split())
3664 self.failUnlessIn('href="?t=info">More info on this directory',
3666 exp = ("UnrecoverableFileError: the directory (or mutable file) "
3667 "could not be retrieved, because there were insufficient "
3668 "good shares. This might indicate that no servers were "
3669 "connected, insufficient servers were connected, the URI "
3670 "was corrupt, or that shares have been lost due to server "
3671 "departure, hard drive failure, or disk corruption. You "
3672 "should perform a filecheck on this object to learn more.")
3673 self.failUnlessIn(exp, body)
3674 self.failUnlessIn("No upload forms: directory is unreadable", body)
3675 d.addCallback(_check_0shares_dir_html)
3677 d.addCallback(lambda ignored: self.GET(self.fileurls["dir-1share"]))
3678 def _check_1shares_dir_html(body):
3679 # at some point, we'll split UnrecoverableFileError into 0-shares
3680 # and some-shares like we did for immutable files (since there
3681 # are different sorts of advice to offer in each case). For now,
3682 # they present the same way.
3683 self.failUnless("<html>" in body, body)
3684 body = " ".join(body.strip().split())
3685 self.failUnlessIn('href="?t=info">More info on this directory',
3687 exp = ("UnrecoverableFileError: the directory (or mutable file) "
3688 "could not be retrieved, because there were insufficient "
3689 "good shares. This might indicate that no servers were "
3690 "connected, insufficient servers were connected, the URI "
3691 "was corrupt, or that shares have been lost due to server "
3692 "departure, hard drive failure, or disk corruption. You "
3693 "should perform a filecheck on this object to learn more.")
3694 self.failUnlessIn(exp, body)
3695 self.failUnlessIn("No upload forms: directory is unreadable", body)
3696 d.addCallback(_check_1shares_dir_html)
3698 d.addCallback(lambda ignored:
3699 self.shouldHTTPError("GET dir-0share-json",
3700 410, "Gone", "UnrecoverableFileError",
3702 self.fileurls["dir-0share-json"]))
3703 def _check_unrecoverable_file(body):
3704 self.failIf("<html>" in body, body)
3705 body = " ".join(body.strip().split())
3706 exp = ("UnrecoverableFileError: the directory (or mutable file) "
3707 "could not be retrieved, because there were insufficient "
3708 "good shares. This might indicate that no servers were "
3709 "connected, insufficient servers were connected, the URI "
3710 "was corrupt, or that shares have been lost due to server "
3711 "departure, hard drive failure, or disk corruption. You "
3712 "should perform a filecheck on this object to learn more.")
3713 self.failUnlessEqual(exp, body)
3714 d.addCallback(_check_unrecoverable_file)
3716 d.addCallback(lambda ignored:
3717 self.shouldHTTPError("GET dir-1share-json",
3718 410, "Gone", "UnrecoverableFileError",
3720 self.fileurls["dir-1share-json"]))
3721 d.addCallback(_check_unrecoverable_file)
3723 d.addCallback(lambda ignored:
3724 self.shouldHTTPError("GET imaginary",
3725 404, "Not Found", None,
3726 self.GET, self.fileurls["imaginary"]))
3728 # attach a webapi child that throws a random error, to test how it
3730 w = c0.getServiceNamed("webish")
3731 w.root.putChild("ERRORBOOM", ErrorBoom())
3733 # "Accept: */*" : should get a text/html stack trace
3734 # "Accept: text/plain" : should get a text/plain stack trace
3735 # "Accept: text/plain, application/octet-stream" : text/plain (CLI)
3736 # no Accept header: should get a text/html stack trace
3738 d.addCallback(lambda ignored:
3739 self.shouldHTTPError("GET errorboom_html",
3740 500, "Internal Server Error", None,
3741 self.GET, "ERRORBOOM",
3742 headers={"accept": ["*/*"]}))
3743 def _internal_error_html1(body):
3744 self.failUnless("<html>" in body, "expected HTML, not '%s'" % body)
3745 d.addCallback(_internal_error_html1)
3747 d.addCallback(lambda ignored:
3748 self.shouldHTTPError("GET errorboom_text",
3749 500, "Internal Server Error", None,
3750 self.GET, "ERRORBOOM",
3751 headers={"accept": ["text/plain"]}))
3752 def _internal_error_text2(body):
3753 self.failIf("<html>" in body, body)
3754 self.failUnless(body.startswith("Traceback "), body)
3755 d.addCallback(_internal_error_text2)
3757 CLI_accepts = "text/plain, application/octet-stream"
3758 d.addCallback(lambda ignored:
3759 self.shouldHTTPError("GET errorboom_text",
3760 500, "Internal Server Error", None,
3761 self.GET, "ERRORBOOM",
3762 headers={"accept": [CLI_accepts]}))
3763 def _internal_error_text3(body):
3764 self.failIf("<html>" in body, body)
3765 self.failUnless(body.startswith("Traceback "), body)
3766 d.addCallback(_internal_error_text3)
3768 d.addCallback(lambda ignored:
3769 self.shouldHTTPError("GET errorboom_text",
3770 500, "Internal Server Error", None,
3771 self.GET, "ERRORBOOM"))
3772 def _internal_error_html4(body):
3773 self.failUnless("<html>" in body, "expected HTML, not '%s'" % body)
3774 d.addCallback(_internal_error_html4)
3776 def _flush_errors(res):
3777 # Trial: please ignore the CompletelyUnhandledError in the logs
3778 self.flushLoggedErrors(CompletelyUnhandledError)
3780 d.addBoth(_flush_errors)
3784 class CompletelyUnhandledError(Exception):
3786 class ErrorBoom(rend.Page):
3787 def beforeRender(self, ctx):
3788 raise CompletelyUnhandledError("whoops")