1 import os.path, re, urllib
3 from StringIO import StringIO
4 from twisted.application import service
5 from twisted.trial import unittest
6 from twisted.internet import defer, reactor
7 from twisted.web import client, error, http
8 from twisted.python import failure, log
10 from allmydata import interfaces, uri, webish
11 from allmydata.storage.shares import get_share_file
12 from allmydata.storage_client import StorageFarmBroker
13 from allmydata.immutable import upload, download
14 from allmydata.dirnode import DirectoryNode
15 from allmydata.nodemaker import NodeMaker
16 from allmydata.unknown import UnknownNode
17 from allmydata.web import status, common
18 from allmydata.scripts.debug import CorruptShareOptions, corrupt_share
19 from allmydata.util import fileutil, base32
20 from allmydata.test.common import FakeCHKFileNode, FakeMutableFileNode, \
21 create_chk_filenode, WebErrorMixin, ShouldFailMixin, make_mutable_file_uri
22 from allmydata.interfaces import IMutableFileNode
23 from allmydata.mutable import servermap, publish, retrieve
24 import common_util as testutil
25 from allmydata.test.no_network import GridTestMixin
26 from allmydata.test.common_web import HTTPClientGETFactory, \
28 from allmydata.client import Client
30 # create a fake uploader/downloader, and a couple of fake dirnodes, then
31 # create a webserver that works against them
33 timeout = 480 # Most of these take longer than 240 seconds on Francois's arm box.
35 class FakeStatsProvider:
37 stats = {'stats': {}, 'counters': {}}
40 class FakeNodeMaker(NodeMaker):
41 def _create_lit(self, cap):
42 return FakeCHKFileNode(cap)
43 def _create_immutable(self, cap):
44 return FakeCHKFileNode(cap)
45 def _create_mutable(self, cap):
46 return FakeMutableFileNode(None, None, None, None).init_from_cap(cap)
47 def create_mutable_file(self, contents="", keysize=None):
48 n = FakeMutableFileNode(None, None, None, None)
49 return n.create(contents)
51 class FakeUploader(service.Service):
53 def upload(self, uploadable, history=None):
54 d = uploadable.get_size()
55 d.addCallback(lambda size: uploadable.read(size))
58 n = create_chk_filenode(data)
59 results = upload.UploadResults()
60 results.uri = n.get_uri()
62 d.addCallback(_got_data)
64 def get_helper_info(self):
68 _all_upload_status = [upload.UploadStatus()]
69 _all_download_status = [download.DownloadStatus()]
70 _all_mapupdate_statuses = [servermap.UpdateStatus()]
71 _all_publish_statuses = [publish.PublishStatus()]
72 _all_retrieve_statuses = [retrieve.RetrieveStatus()]
74 def list_all_upload_statuses(self):
75 return self._all_upload_status
76 def list_all_download_statuses(self):
77 return self._all_download_status
78 def list_all_mapupdate_statuses(self):
79 return self._all_mapupdate_statuses
80 def list_all_publish_statuses(self):
81 return self._all_publish_statuses
82 def list_all_retrieve_statuses(self):
83 return self._all_retrieve_statuses
84 def list_all_helper_statuses(self):
87 class FakeClient(Client):
89 # don't upcall to Client.__init__, since we only want to initialize a
91 service.MultiService.__init__(self)
92 self.nodeid = "fake_nodeid"
93 self.nickname = "fake_nickname"
94 self.introducer_furl = "None"
95 self.stats_provider = FakeStatsProvider()
96 self._secret_holder = None
98 self.convergence = "some random string"
99 self.storage_broker = StorageFarmBroker(None, permute_peers=True)
100 self.introducer_client = None
101 self.history = FakeHistory()
102 self.uploader = FakeUploader()
103 self.uploader.setServiceParent(self)
104 self.nodemaker = FakeNodeMaker(None, None, None,
105 self.uploader, None, None,
108 def startService(self):
109 return service.MultiService.startService(self)
110 def stopService(self):
111 return service.MultiService.stopService(self)
113 MUTABLE_SIZELIMIT = FakeMutableFileNode.MUTABLE_SIZELIMIT
115 class WebMixin(object):
117 self.s = FakeClient()
118 self.s.startService()
119 self.staticdir = self.mktemp()
120 self.ws = webish.WebishServer(self.s, "0", staticdir=self.staticdir)
121 self.ws.setServiceParent(self.s)
122 self.webish_port = port = self.ws.listener._port.getHost().port
123 self.webish_url = "http://localhost:%d" % port
125 l = [ self.s.create_dirnode() for x in range(6) ]
126 d = defer.DeferredList(l)
128 self.public_root = res[0][1]
129 assert interfaces.IDirectoryNode.providedBy(self.public_root), res
130 self.public_url = "/uri/" + self.public_root.get_uri()
131 self.private_root = res[1][1]
135 self._foo_uri = foo.get_uri()
136 self._foo_readonly_uri = foo.get_readonly_uri()
137 self._foo_verifycap = foo.get_verify_cap().to_string()
138 # NOTE: we ignore the deferred on all set_uri() calls, because we
139 # know the fake nodes do these synchronously
140 self.public_root.set_uri(u"foo", foo.get_uri(),
141 foo.get_readonly_uri())
143 self.BAR_CONTENTS, n, self._bar_txt_uri = self.makefile(0)
144 foo.set_uri(u"bar.txt", self._bar_txt_uri, self._bar_txt_uri)
145 self._bar_txt_verifycap = n.get_verify_cap().to_string()
147 foo.set_uri(u"empty", res[3][1].get_uri(),
148 res[3][1].get_readonly_uri())
149 sub_uri = res[4][1].get_uri()
150 self._sub_uri = sub_uri
151 foo.set_uri(u"sub", sub_uri, sub_uri)
152 sub = self.s.create_node_from_uri(sub_uri)
154 _ign, n, blocking_uri = self.makefile(1)
155 foo.set_uri(u"blockingfile", blocking_uri, blocking_uri)
157 unicode_filename = u"n\u00fc.txt" # n u-umlaut . t x t
158 # ok, unicode calls it LATIN SMALL LETTER U WITH DIAERESIS but I
159 # still think of it as an umlaut
160 foo.set_uri(unicode_filename, self._bar_txt_uri, self._bar_txt_uri)
162 _ign, n, baz_file = self.makefile(2)
163 self._baz_file_uri = baz_file
164 sub.set_uri(u"baz.txt", baz_file, baz_file)
166 _ign, n, self._bad_file_uri = self.makefile(3)
167 # this uri should not be downloadable
168 del FakeCHKFileNode.all_contents[self._bad_file_uri]
171 self.public_root.set_uri(u"reedownlee", rodir.get_readonly_uri(),
172 rodir.get_readonly_uri())
173 rodir.set_uri(u"nor", baz_file, baz_file)
178 # public/foo/blockingfile
181 # public/foo/sub/baz.txt
183 # public/reedownlee/nor
184 self.NEWFILE_CONTENTS = "newfile contents\n"
186 return foo.get_metadata_for(u"bar.txt")
188 def _got_metadata(metadata):
189 self._bar_txt_metadata = metadata
190 d.addCallback(_got_metadata)
193 def makefile(self, number):
194 contents = "contents of file %s\n" % number
195 n = create_chk_filenode(contents)
196 return contents, n, n.get_uri()
199 return self.s.stopService()
201 def failUnlessIsBarDotTxt(self, res):
202 self.failUnlessEqual(res, self.BAR_CONTENTS, res)
204 def failUnlessIsBarJSON(self, res):
205 data = simplejson.loads(res)
206 self.failUnless(isinstance(data, list))
207 self.failUnlessEqual(data[0], u"filenode")
208 self.failUnless(isinstance(data[1], dict))
209 self.failIf(data[1]["mutable"])
210 self.failIf("rw_uri" in data[1]) # immutable
211 self.failUnlessEqual(data[1]["ro_uri"], self._bar_txt_uri)
212 self.failUnlessEqual(data[1]["verify_uri"], self._bar_txt_verifycap)
213 self.failUnlessEqual(data[1]["size"], len(self.BAR_CONTENTS))
215 def failUnlessIsFooJSON(self, res):
216 data = simplejson.loads(res)
217 self.failUnless(isinstance(data, list))
218 self.failUnlessEqual(data[0], "dirnode", res)
219 self.failUnless(isinstance(data[1], dict))
220 self.failUnless(data[1]["mutable"])
221 self.failUnless("rw_uri" in data[1]) # mutable
222 self.failUnlessEqual(data[1]["rw_uri"], self._foo_uri)
223 self.failUnlessEqual(data[1]["ro_uri"], self._foo_readonly_uri)
224 self.failUnlessEqual(data[1]["verify_uri"], self._foo_verifycap)
226 kidnames = sorted([unicode(n) for n in data[1]["children"]])
227 self.failUnlessEqual(kidnames,
228 [u"bar.txt", u"blockingfile", u"empty",
229 u"n\u00fc.txt", u"sub"])
230 kids = dict( [(unicode(name),value)
232 in data[1]["children"].iteritems()] )
233 self.failUnlessEqual(kids[u"sub"][0], "dirnode")
234 self.failUnless("metadata" in kids[u"sub"][1])
235 self.failUnless("ctime" in kids[u"sub"][1]["metadata"])
236 self.failUnless("mtime" in kids[u"sub"][1]["metadata"])
237 self.failUnlessEqual(kids[u"bar.txt"][0], "filenode")
238 self.failUnlessEqual(kids[u"bar.txt"][1]["size"], len(self.BAR_CONTENTS))
239 self.failUnlessEqual(kids[u"bar.txt"][1]["ro_uri"], self._bar_txt_uri)
240 self.failUnlessEqual(kids[u"bar.txt"][1]["verify_uri"],
241 self._bar_txt_verifycap)
242 self.failUnlessEqual(kids[u"bar.txt"][1]["metadata"]["ctime"],
243 self._bar_txt_metadata["ctime"])
244 self.failUnlessEqual(kids[u"n\u00fc.txt"][1]["ro_uri"],
247 def GET(self, urlpath, followRedirect=False, return_response=False,
249 # if return_response=True, this fires with (data, statuscode,
250 # respheaders) instead of just data.
251 assert not isinstance(urlpath, unicode)
252 url = self.webish_url + urlpath
253 factory = HTTPClientGETFactory(url, method="GET",
254 followRedirect=followRedirect, **kwargs)
255 reactor.connectTCP("localhost", self.webish_port, factory)
258 return (data, factory.status, factory.response_headers)
260 d.addCallback(_got_data)
261 return factory.deferred
263 def HEAD(self, urlpath, return_response=False, **kwargs):
264 # this requires some surgery, because twisted.web.client doesn't want
265 # to give us back the response headers.
266 factory = HTTPClientHEADFactory(urlpath, method="HEAD", **kwargs)
267 reactor.connectTCP("localhost", self.webish_port, factory)
270 return (data, factory.status, factory.response_headers)
272 d.addCallback(_got_data)
273 return factory.deferred
275 def PUT(self, urlpath, data, **kwargs):
276 url = self.webish_url + urlpath
277 return client.getPage(url, method="PUT", postdata=data, **kwargs)
279 def DELETE(self, urlpath):
280 url = self.webish_url + urlpath
281 return client.getPage(url, method="DELETE")
283 def POST(self, urlpath, followRedirect=False, **fields):
284 url = self.webish_url + urlpath
285 sepbase = "boogabooga"
289 form.append('Content-Disposition: form-data; name="_charset"')
293 for name, value in fields.iteritems():
294 if isinstance(value, tuple):
295 filename, value = value
296 form.append('Content-Disposition: form-data; name="%s"; '
297 'filename="%s"' % (name, filename.encode("utf-8")))
299 form.append('Content-Disposition: form-data; name="%s"' % name)
301 if isinstance(value, unicode):
302 value = value.encode("utf-8")
305 assert isinstance(value, str)
309 body = "\r\n".join(form) + "\r\n"
310 headers = {"content-type": "multipart/form-data; boundary=%s" % sepbase,
312 return client.getPage(url, method="POST", postdata=body,
313 headers=headers, followRedirect=followRedirect)
315 def shouldFail(self, res, expected_failure, which,
316 substring=None, response_substring=None):
317 if isinstance(res, failure.Failure):
318 res.trap(expected_failure)
320 self.failUnless(substring in str(res),
321 "substring '%s' not in '%s'"
322 % (substring, str(res)))
323 if response_substring:
324 self.failUnless(response_substring in res.value.response,
325 "response substring '%s' not in '%s'"
326 % (response_substring, res.value.response))
328 self.fail("%s was supposed to raise %s, not get '%s'" %
329 (which, expected_failure, res))
331 def shouldFail2(self, expected_failure, which, substring,
333 callable, *args, **kwargs):
334 assert substring is None or isinstance(substring, str)
335 assert response_substring is None or isinstance(response_substring, str)
336 d = defer.maybeDeferred(callable, *args, **kwargs)
338 if isinstance(res, failure.Failure):
339 res.trap(expected_failure)
341 self.failUnless(substring in str(res),
342 "%s: substring '%s' not in '%s'"
343 % (which, substring, str(res)))
344 if response_substring:
345 self.failUnless(response_substring in res.value.response,
346 "%s: response substring '%s' not in '%s'"
348 response_substring, res.value.response))
350 self.fail("%s was supposed to raise %s, not get '%s'" %
351 (which, expected_failure, res))
355 def should404(self, res, which):
356 if isinstance(res, failure.Failure):
357 res.trap(error.Error)
358 self.failUnlessEqual(res.value.status, "404")
360 self.fail("%s was supposed to Error(404), not get '%s'" %
364 class Web(WebMixin, WebErrorMixin, testutil.StallMixin, unittest.TestCase):
365 def test_create(self):
368 def test_welcome(self):
371 self.failUnless('Welcome To Tahoe-LAFS' in res, res)
373 self.s.basedir = 'web/test_welcome'
374 fileutil.make_dirs("web/test_welcome")
375 fileutil.make_dirs("web/test_welcome/private")
377 d.addCallback(_check)
380 def test_provisioning(self):
381 d = self.GET("/provisioning/")
383 self.failUnless('Tahoe Provisioning Tool' in res)
384 fields = {'filled': True,
385 "num_users": int(50e3),
386 "files_per_user": 1000,
387 "space_per_user": int(1e9),
388 "sharing_ratio": 1.0,
389 "encoding_parameters": "3-of-10-5",
391 "ownership_mode": "A",
392 "download_rate": 100,
397 return self.POST("/provisioning/", **fields)
399 d.addCallback(_check)
401 self.failUnless('Tahoe Provisioning Tool' in res)
402 self.failUnless("Share space consumed: 167.01TB" in res)
404 fields = {'filled': True,
405 "num_users": int(50e6),
406 "files_per_user": 1000,
407 "space_per_user": int(5e9),
408 "sharing_ratio": 1.0,
409 "encoding_parameters": "25-of-100-50",
410 "num_servers": 30000,
411 "ownership_mode": "E",
412 "drive_failure_model": "U",
414 "download_rate": 1000,
419 return self.POST("/provisioning/", **fields)
420 d.addCallback(_check2)
422 self.failUnless("Share space consumed: huge!" in res)
423 fields = {'filled': True}
424 return self.POST("/provisioning/", **fields)
425 d.addCallback(_check3)
427 self.failUnless("Share space consumed:" in res)
428 d.addCallback(_check4)
431 def test_reliability_tool(self):
433 from allmydata import reliability
434 _hush_pyflakes = reliability
436 raise unittest.SkipTest("reliability tool requires NumPy")
438 d = self.GET("/reliability/")
440 self.failUnless('Tahoe Reliability Tool' in res)
441 fields = {'drive_lifetime': "8Y",
446 "check_period": "1M",
447 "report_period": "3M",
450 return self.POST("/reliability/", **fields)
452 d.addCallback(_check)
454 self.failUnless('Tahoe Reliability Tool' in res)
455 r = r'Probability of loss \(no maintenance\):\s+<span>0.033591'
456 self.failUnless(re.search(r, res), res)
457 d.addCallback(_check2)
460 def test_status(self):
461 h = self.s.get_history()
462 dl_num = h.list_all_download_statuses()[0].get_counter()
463 ul_num = h.list_all_upload_statuses()[0].get_counter()
464 mu_num = h.list_all_mapupdate_statuses()[0].get_counter()
465 pub_num = h.list_all_publish_statuses()[0].get_counter()
466 ret_num = h.list_all_retrieve_statuses()[0].get_counter()
467 d = self.GET("/status", followRedirect=True)
469 self.failUnless('Upload and Download Status' in res, res)
470 self.failUnless('"down-%d"' % dl_num in res, res)
471 self.failUnless('"up-%d"' % ul_num in res, res)
472 self.failUnless('"mapupdate-%d"' % mu_num in res, res)
473 self.failUnless('"publish-%d"' % pub_num in res, res)
474 self.failUnless('"retrieve-%d"' % ret_num in res, res)
475 d.addCallback(_check)
476 d.addCallback(lambda res: self.GET("/status/?t=json"))
477 def _check_json(res):
478 data = simplejson.loads(res)
479 self.failUnless(isinstance(data, dict))
480 active = data["active"]
481 # TODO: test more. We need a way to fake an active operation
483 d.addCallback(_check_json)
485 d.addCallback(lambda res: self.GET("/status/down-%d" % dl_num))
487 self.failUnless("File Download Status" in res, res)
488 d.addCallback(_check_dl)
489 d.addCallback(lambda res: self.GET("/status/up-%d" % ul_num))
491 self.failUnless("File Upload Status" in res, res)
492 d.addCallback(_check_ul)
493 d.addCallback(lambda res: self.GET("/status/mapupdate-%d" % mu_num))
494 def _check_mapupdate(res):
495 self.failUnless("Mutable File Servermap Update Status" in res, res)
496 d.addCallback(_check_mapupdate)
497 d.addCallback(lambda res: self.GET("/status/publish-%d" % pub_num))
498 def _check_publish(res):
499 self.failUnless("Mutable File Publish Status" in res, res)
500 d.addCallback(_check_publish)
501 d.addCallback(lambda res: self.GET("/status/retrieve-%d" % ret_num))
502 def _check_retrieve(res):
503 self.failUnless("Mutable File Retrieve Status" in res, res)
504 d.addCallback(_check_retrieve)
508 def test_status_numbers(self):
509 drrm = status.DownloadResultsRendererMixin()
510 self.failUnlessEqual(drrm.render_time(None, None), "")
511 self.failUnlessEqual(drrm.render_time(None, 2.5), "2.50s")
512 self.failUnlessEqual(drrm.render_time(None, 0.25), "250ms")
513 self.failUnlessEqual(drrm.render_time(None, 0.0021), "2.1ms")
514 self.failUnlessEqual(drrm.render_time(None, 0.000123), "123us")
515 self.failUnlessEqual(drrm.render_rate(None, None), "")
516 self.failUnlessEqual(drrm.render_rate(None, 2500000), "2.50MBps")
517 self.failUnlessEqual(drrm.render_rate(None, 30100), "30.1kBps")
518 self.failUnlessEqual(drrm.render_rate(None, 123), "123Bps")
520 urrm = status.UploadResultsRendererMixin()
521 self.failUnlessEqual(urrm.render_time(None, None), "")
522 self.failUnlessEqual(urrm.render_time(None, 2.5), "2.50s")
523 self.failUnlessEqual(urrm.render_time(None, 0.25), "250ms")
524 self.failUnlessEqual(urrm.render_time(None, 0.0021), "2.1ms")
525 self.failUnlessEqual(urrm.render_time(None, 0.000123), "123us")
526 self.failUnlessEqual(urrm.render_rate(None, None), "")
527 self.failUnlessEqual(urrm.render_rate(None, 2500000), "2.50MBps")
528 self.failUnlessEqual(urrm.render_rate(None, 30100), "30.1kBps")
529 self.failUnlessEqual(urrm.render_rate(None, 123), "123Bps")
531 def test_GET_FILEURL(self):
532 d = self.GET(self.public_url + "/foo/bar.txt")
533 d.addCallback(self.failUnlessIsBarDotTxt)
536 def test_GET_FILEURL_range(self):
537 headers = {"range": "bytes=1-10"}
538 d = self.GET(self.public_url + "/foo/bar.txt", headers=headers,
539 return_response=True)
540 def _got((res, status, headers)):
541 self.failUnlessEqual(int(status), 206)
542 self.failUnless(headers.has_key("content-range"))
543 self.failUnlessEqual(headers["content-range"][0],
544 "bytes 1-10/%d" % len(self.BAR_CONTENTS))
545 self.failUnlessEqual(res, self.BAR_CONTENTS[1:11])
549 def test_GET_FILEURL_partial_range(self):
550 headers = {"range": "bytes=5-"}
551 length = len(self.BAR_CONTENTS)
552 d = self.GET(self.public_url + "/foo/bar.txt", headers=headers,
553 return_response=True)
554 def _got((res, status, headers)):
555 self.failUnlessEqual(int(status), 206)
556 self.failUnless(headers.has_key("content-range"))
557 self.failUnlessEqual(headers["content-range"][0],
558 "bytes 5-%d/%d" % (length-1, length))
559 self.failUnlessEqual(res, self.BAR_CONTENTS[5:])
563 def test_HEAD_FILEURL_range(self):
564 headers = {"range": "bytes=1-10"}
565 d = self.HEAD(self.public_url + "/foo/bar.txt", headers=headers,
566 return_response=True)
567 def _got((res, status, headers)):
568 self.failUnlessEqual(res, "")
569 self.failUnlessEqual(int(status), 206)
570 self.failUnless(headers.has_key("content-range"))
571 self.failUnlessEqual(headers["content-range"][0],
572 "bytes 1-10/%d" % len(self.BAR_CONTENTS))
576 def test_HEAD_FILEURL_partial_range(self):
577 headers = {"range": "bytes=5-"}
578 length = len(self.BAR_CONTENTS)
579 d = self.HEAD(self.public_url + "/foo/bar.txt", headers=headers,
580 return_response=True)
581 def _got((res, status, headers)):
582 self.failUnlessEqual(int(status), 206)
583 self.failUnless(headers.has_key("content-range"))
584 self.failUnlessEqual(headers["content-range"][0],
585 "bytes 5-%d/%d" % (length-1, length))
589 def test_GET_FILEURL_range_bad(self):
590 headers = {"range": "BOGUS=fizbop-quarnak"}
591 d = self.shouldFail2(error.Error, "test_GET_FILEURL_range_bad",
593 "Syntactically invalid http range header",
594 self.GET, self.public_url + "/foo/bar.txt",
598 def test_HEAD_FILEURL(self):
599 d = self.HEAD(self.public_url + "/foo/bar.txt", return_response=True)
600 def _got((res, status, headers)):
601 self.failUnlessEqual(res, "")
602 self.failUnlessEqual(headers["content-length"][0],
603 str(len(self.BAR_CONTENTS)))
604 self.failUnlessEqual(headers["content-type"], ["text/plain"])
608 def test_GET_FILEURL_named(self):
609 base = "/file/%s" % urllib.quote(self._bar_txt_uri)
610 base2 = "/named/%s" % urllib.quote(self._bar_txt_uri)
611 d = self.GET(base + "/@@name=/blah.txt")
612 d.addCallback(self.failUnlessIsBarDotTxt)
613 d.addCallback(lambda res: self.GET(base + "/blah.txt"))
614 d.addCallback(self.failUnlessIsBarDotTxt)
615 d.addCallback(lambda res: self.GET(base + "/ignore/lots/blah.txt"))
616 d.addCallback(self.failUnlessIsBarDotTxt)
617 d.addCallback(lambda res: self.GET(base2 + "/@@name=/blah.txt"))
618 d.addCallback(self.failUnlessIsBarDotTxt)
619 save_url = base + "?save=true&filename=blah.txt"
620 d.addCallback(lambda res: self.GET(save_url))
621 d.addCallback(self.failUnlessIsBarDotTxt) # TODO: check headers
622 u_filename = u"n\u00e9wer.txt" # n e-acute w e r . t x t
623 u_fn_e = urllib.quote(u_filename.encode("utf-8"))
624 u_url = base + "?save=true&filename=" + u_fn_e
625 d.addCallback(lambda res: self.GET(u_url))
626 d.addCallback(self.failUnlessIsBarDotTxt) # TODO: check headers
629 def test_PUT_FILEURL_named_bad(self):
630 base = "/file/%s" % urllib.quote(self._bar_txt_uri)
631 d = self.shouldFail2(error.Error, "test_PUT_FILEURL_named_bad",
633 "/file can only be used with GET or HEAD",
634 self.PUT, base + "/@@name=/blah.txt", "")
637 def test_GET_DIRURL_named_bad(self):
638 base = "/file/%s" % urllib.quote(self._foo_uri)
639 d = self.shouldFail2(error.Error, "test_PUT_DIRURL_named_bad",
642 self.GET, base + "/@@name=/blah.txt")
645 def test_GET_slash_file_bad(self):
646 d = self.shouldFail2(error.Error, "test_GET_slash_file_bad",
648 "/file must be followed by a file-cap and a name",
652 def test_GET_unhandled_URI_named(self):
653 contents, n, newuri = self.makefile(12)
654 verifier_cap = n.get_verify_cap().to_string()
655 base = "/file/%s" % urllib.quote(verifier_cap)
656 # client.create_node_from_uri() can't handle verify-caps
657 d = self.shouldFail2(error.Error, "GET_unhandled_URI_named",
658 "400 Bad Request", "is not a file-cap",
662 def test_GET_unhandled_URI(self):
663 contents, n, newuri = self.makefile(12)
664 verifier_cap = n.get_verify_cap().to_string()
665 base = "/uri/%s" % urllib.quote(verifier_cap)
666 # client.create_node_from_uri() can't handle verify-caps
667 d = self.shouldFail2(error.Error, "test_GET_unhandled_URI",
669 "GET unknown URI type: can only do t=info",
673 def test_GET_FILE_URI(self):
674 base = "/uri/%s" % urllib.quote(self._bar_txt_uri)
676 d.addCallback(self.failUnlessIsBarDotTxt)
679 def test_GET_FILE_URI_badchild(self):
680 base = "/uri/%s/boguschild" % urllib.quote(self._bar_txt_uri)
681 errmsg = "Files have no children, certainly not named 'boguschild'"
682 d = self.shouldFail2(error.Error, "test_GET_FILE_URI_badchild",
683 "400 Bad Request", errmsg,
687 def test_PUT_FILE_URI_badchild(self):
688 base = "/uri/%s/boguschild" % urllib.quote(self._bar_txt_uri)
689 errmsg = "Cannot create directory 'boguschild', because its parent is a file, not a directory"
690 d = self.shouldFail2(error.Error, "test_GET_FILE_URI_badchild",
691 "400 Bad Request", errmsg,
695 def test_GET_FILEURL_save(self):
696 d = self.GET(self.public_url + "/foo/bar.txt?filename=bar.txt&save=true")
697 # TODO: look at the headers, expect a Content-Disposition: attachment
699 d.addCallback(self.failUnlessIsBarDotTxt)
702 def test_GET_FILEURL_missing(self):
703 d = self.GET(self.public_url + "/foo/missing")
704 d.addBoth(self.should404, "test_GET_FILEURL_missing")
707 def test_PUT_overwrite_only_files(self):
708 # create a directory, put a file in that directory.
709 contents, n, filecap = self.makefile(8)
710 d = self.PUT(self.public_url + "/foo/dir?t=mkdir", "")
711 d.addCallback(lambda res:
712 self.PUT(self.public_url + "/foo/dir/file1.txt",
713 self.NEWFILE_CONTENTS))
714 # try to overwrite the file with replace=only-files
716 d.addCallback(lambda res:
717 self.PUT(self.public_url + "/foo/dir/file1.txt?t=uri&replace=only-files",
719 d.addCallback(lambda res:
720 self.shouldFail2(error.Error, "PUT_bad_t", "409 Conflict",
721 "There was already a child by that name, and you asked me "
723 self.PUT, self.public_url + "/foo/dir?t=uri&replace=only-files",
727 def test_PUT_NEWFILEURL(self):
728 d = self.PUT(self.public_url + "/foo/new.txt", self.NEWFILE_CONTENTS)
729 # TODO: we lose the response code, so we can't check this
730 #self.failUnlessEqual(responsecode, 201)
731 d.addCallback(self.failUnlessURIMatchesChild, self._foo_node, u"new.txt")
732 d.addCallback(lambda res:
733 self.failUnlessChildContentsAre(self._foo_node, u"new.txt",
734 self.NEWFILE_CONTENTS))
737 def test_PUT_NEWFILEURL_not_mutable(self):
738 d = self.PUT(self.public_url + "/foo/new.txt?mutable=false",
739 self.NEWFILE_CONTENTS)
740 # TODO: we lose the response code, so we can't check this
741 #self.failUnlessEqual(responsecode, 201)
742 d.addCallback(self.failUnlessURIMatchesChild, self._foo_node, u"new.txt")
743 d.addCallback(lambda res:
744 self.failUnlessChildContentsAre(self._foo_node, u"new.txt",
745 self.NEWFILE_CONTENTS))
748 def test_PUT_NEWFILEURL_range_bad(self):
749 headers = {"content-range": "bytes 1-10/%d" % len(self.NEWFILE_CONTENTS)}
750 target = self.public_url + "/foo/new.txt"
751 d = self.shouldFail2(error.Error, "test_PUT_NEWFILEURL_range_bad",
752 "501 Not Implemented",
753 "Content-Range in PUT not yet supported",
754 # (and certainly not for immutable files)
755 self.PUT, target, self.NEWFILE_CONTENTS[1:11],
757 d.addCallback(lambda res:
758 self.failIfNodeHasChild(self._foo_node, u"new.txt"))
761 def test_PUT_NEWFILEURL_mutable(self):
762 d = self.PUT(self.public_url + "/foo/new.txt?mutable=true",
763 self.NEWFILE_CONTENTS)
764 # TODO: we lose the response code, so we can't check this
765 #self.failUnlessEqual(responsecode, 201)
767 u = uri.from_string_mutable_filenode(res)
768 self.failUnless(u.is_mutable())
769 self.failIf(u.is_readonly())
771 d.addCallback(_check_uri)
772 d.addCallback(self.failUnlessURIMatchesChild, self._foo_node, u"new.txt")
773 d.addCallback(lambda res:
774 self.failUnlessMutableChildContentsAre(self._foo_node,
776 self.NEWFILE_CONTENTS))
779 def test_PUT_NEWFILEURL_mutable_toobig(self):
780 d = self.shouldFail2(error.Error, "test_PUT_NEWFILEURL_mutable_toobig",
781 "413 Request Entity Too Large",
782 "SDMF is limited to one segment, and 10001 > 10000",
784 self.public_url + "/foo/new.txt?mutable=true",
785 "b" * (self.s.MUTABLE_SIZELIMIT+1))
788 def test_PUT_NEWFILEURL_replace(self):
789 d = self.PUT(self.public_url + "/foo/bar.txt", self.NEWFILE_CONTENTS)
790 # TODO: we lose the response code, so we can't check this
791 #self.failUnlessEqual(responsecode, 200)
792 d.addCallback(self.failUnlessURIMatchesChild, self._foo_node, u"bar.txt")
793 d.addCallback(lambda res:
794 self.failUnlessChildContentsAre(self._foo_node, u"bar.txt",
795 self.NEWFILE_CONTENTS))
798 def test_PUT_NEWFILEURL_bad_t(self):
799 d = self.shouldFail2(error.Error, "PUT_bad_t", "400 Bad Request",
800 "PUT to a file: bad t=bogus",
801 self.PUT, self.public_url + "/foo/bar.txt?t=bogus",
805 def test_PUT_NEWFILEURL_no_replace(self):
806 d = self.PUT(self.public_url + "/foo/bar.txt?replace=false",
807 self.NEWFILE_CONTENTS)
808 d.addBoth(self.shouldFail, error.Error, "PUT_NEWFILEURL_no_replace",
810 "There was already a child by that name, and you asked me "
814 def test_PUT_NEWFILEURL_mkdirs(self):
815 d = self.PUT(self.public_url + "/foo/newdir/new.txt", self.NEWFILE_CONTENTS)
817 d.addCallback(self.failUnlessURIMatchesChild, fn, u"newdir/new.txt")
818 d.addCallback(lambda res: self.failIfNodeHasChild(fn, u"new.txt"))
819 d.addCallback(lambda res: self.failUnlessNodeHasChild(fn, u"newdir"))
820 d.addCallback(lambda res:
821 self.failUnlessChildContentsAre(fn, u"newdir/new.txt",
822 self.NEWFILE_CONTENTS))
825 def test_PUT_NEWFILEURL_blocked(self):
826 d = self.PUT(self.public_url + "/foo/blockingfile/new.txt",
827 self.NEWFILE_CONTENTS)
828 d.addBoth(self.shouldFail, error.Error, "PUT_NEWFILEURL_blocked",
830 "Unable to create directory 'blockingfile': a file was in the way")
833 def test_DELETE_FILEURL(self):
834 d = self.DELETE(self.public_url + "/foo/bar.txt")
835 d.addCallback(lambda res:
836 self.failIfNodeHasChild(self._foo_node, u"bar.txt"))
839 def test_DELETE_FILEURL_missing(self):
840 d = self.DELETE(self.public_url + "/foo/missing")
841 d.addBoth(self.should404, "test_DELETE_FILEURL_missing")
844 def test_DELETE_FILEURL_missing2(self):
845 d = self.DELETE(self.public_url + "/missing/missing")
846 d.addBoth(self.should404, "test_DELETE_FILEURL_missing2")
849 def test_GET_FILEURL_json(self):
850 # twisted.web.http.parse_qs ignores any query args without an '=', so
851 # I can't do "GET /path?json", I have to do "GET /path/t=json"
852 # instead. This may make it tricky to emulate the S3 interface
854 d = self.GET(self.public_url + "/foo/bar.txt?t=json")
855 d.addCallback(self.failUnlessIsBarJSON)
858 def test_GET_FILEURL_json_missing(self):
859 d = self.GET(self.public_url + "/foo/missing?json")
860 d.addBoth(self.should404, "test_GET_FILEURL_json_missing")
863 def test_GET_FILEURL_uri(self):
864 d = self.GET(self.public_url + "/foo/bar.txt?t=uri")
866 self.failUnlessEqual(res, self._bar_txt_uri)
867 d.addCallback(_check)
868 d.addCallback(lambda res:
869 self.GET(self.public_url + "/foo/bar.txt?t=readonly-uri"))
871 # for now, for files, uris and readonly-uris are the same
872 self.failUnlessEqual(res, self._bar_txt_uri)
873 d.addCallback(_check2)
876 def test_GET_FILEURL_badtype(self):
877 d = self.shouldHTTPError("GET t=bogus", 400, "Bad Request",
880 self.public_url + "/foo/bar.txt?t=bogus")
883 def test_GET_FILEURL_uri_missing(self):
884 d = self.GET(self.public_url + "/foo/missing?t=uri")
885 d.addBoth(self.should404, "test_GET_FILEURL_uri_missing")
888 def test_GET_DIRURL(self):
889 # the addSlash means we get a redirect here
890 # from /uri/$URI/foo/ , we need ../../../ to get back to the root
892 d = self.GET(self.public_url + "/foo", followRedirect=True)
894 self.failUnless(('<a href="%s">Return to Welcome page' % ROOT)
896 # the FILE reference points to a URI, but it should end in bar.txt
897 bar_url = ("%s/file/%s/@@named=/bar.txt" %
898 (ROOT, urllib.quote(self._bar_txt_uri)))
899 get_bar = "".join([r'<td>FILE</td>',
901 r'<a href="%s">bar.txt</a>' % bar_url,
903 r'\s+<td>%d</td>' % len(self.BAR_CONTENTS),
905 self.failUnless(re.search(get_bar, res), res)
906 for line in res.split("\n"):
907 # find the line that contains the delete button for bar.txt
908 if ("form action" in line and
909 'value="delete"' in line and
910 'value="bar.txt"' in line):
911 # the form target should use a relative URL
912 foo_url = urllib.quote("%s/uri/%s/" % (ROOT, self._foo_uri))
913 self.failUnless(('action="%s"' % foo_url) in line, line)
914 # and the when_done= should too
915 #done_url = urllib.quote(???)
916 #self.failUnless(('name="when_done" value="%s"' % done_url)
920 self.fail("unable to find delete-bar.txt line", res)
922 # the DIR reference just points to a URI
923 sub_url = ("%s/uri/%s/" % (ROOT, urllib.quote(self._sub_uri)))
924 get_sub = ((r'<td>DIR</td>')
925 +r'\s+<td><a href="%s">sub</a></td>' % sub_url)
926 self.failUnless(re.search(get_sub, res), res)
927 d.addCallback(_check)
929 # look at a directory which is readonly
930 d.addCallback(lambda res:
931 self.GET(self.public_url + "/reedownlee", followRedirect=True))
933 self.failUnless("(read-only)" in res, res)
934 self.failIf("Upload a file" in res, res)
935 d.addCallback(_check2)
937 # and at a directory that contains a readonly directory
938 d.addCallback(lambda res:
939 self.GET(self.public_url, followRedirect=True))
941 self.failUnless(re.search('<td>DIR-RO</td>'
942 r'\s+<td><a href="[\.\/]+/uri/URI%3ADIR2-RO%3A[^"]+">reedownlee</a></td>', res), res)
943 d.addCallback(_check3)
945 # and an empty directory
946 d.addCallback(lambda res: self.GET(self.public_url + "/foo/empty/"))
948 self.failUnless("directory is empty" in res, res)
949 MKDIR_BUTTON_RE=re.compile('<input type="hidden" name="t" value="mkdir" />.*<legend class="freeform-form-label">Create a new directory in this directory</legend>.*<input type="submit" value="Create" />', re.I)
950 self.failUnless(MKDIR_BUTTON_RE.search(res), res)
951 d.addCallback(_check4)
955 def test_GET_DIRURL_badtype(self):
956 d = self.shouldHTTPError("test_GET_DIRURL_badtype",
960 self.public_url + "/foo?t=bogus")
963 def test_GET_DIRURL_json(self):
964 d = self.GET(self.public_url + "/foo?t=json")
965 d.addCallback(self.failUnlessIsFooJSON)
969 def test_POST_DIRURL_manifest_no_ophandle(self):
970 d = self.shouldFail2(error.Error,
971 "test_POST_DIRURL_manifest_no_ophandle",
973 "slow operation requires ophandle=",
974 self.POST, self.public_url, t="start-manifest")
977 def test_POST_DIRURL_manifest(self):
978 d = defer.succeed(None)
979 def getman(ignored, output):
980 d = self.POST(self.public_url + "/foo/?t=start-manifest&ophandle=125",
982 d.addCallback(self.wait_for_operation, "125")
983 d.addCallback(self.get_operation_results, "125", output)
985 d.addCallback(getman, None)
986 def _got_html(manifest):
987 self.failUnless("Manifest of SI=" in manifest)
988 self.failUnless("<td>sub</td>" in manifest)
989 self.failUnless(self._sub_uri in manifest)
990 self.failUnless("<td>sub/baz.txt</td>" in manifest)
991 d.addCallback(_got_html)
993 # both t=status and unadorned GET should be identical
994 d.addCallback(lambda res: self.GET("/operations/125"))
995 d.addCallback(_got_html)
997 d.addCallback(getman, "html")
998 d.addCallback(_got_html)
999 d.addCallback(getman, "text")
1000 def _got_text(manifest):
1001 self.failUnless("\nsub " + self._sub_uri + "\n" in manifest)
1002 self.failUnless("\nsub/baz.txt URI:CHK:" in manifest)
1003 d.addCallback(_got_text)
1004 d.addCallback(getman, "JSON")
1006 data = res["manifest"]
1008 for (path_list, cap) in data:
1009 got[tuple(path_list)] = cap
1010 self.failUnlessEqual(got[(u"sub",)], self._sub_uri)
1011 self.failUnless((u"sub",u"baz.txt") in got)
1012 self.failUnless("finished" in res)
1013 self.failUnless("origin" in res)
1014 self.failUnless("storage-index" in res)
1015 self.failUnless("verifycaps" in res)
1016 self.failUnless("stats" in res)
1017 d.addCallback(_got_json)
1020 def test_POST_DIRURL_deepsize_no_ophandle(self):
1021 d = self.shouldFail2(error.Error,
1022 "test_POST_DIRURL_deepsize_no_ophandle",
1024 "slow operation requires ophandle=",
1025 self.POST, self.public_url, t="start-deep-size")
1028 def test_POST_DIRURL_deepsize(self):
1029 d = self.POST(self.public_url + "/foo/?t=start-deep-size&ophandle=126",
1030 followRedirect=True)
1031 d.addCallback(self.wait_for_operation, "126")
1032 d.addCallback(self.get_operation_results, "126", "json")
1033 def _got_json(data):
1034 self.failUnlessEqual(data["finished"], True)
1036 self.failUnless(size > 1000)
1037 d.addCallback(_got_json)
1038 d.addCallback(self.get_operation_results, "126", "text")
1040 mo = re.search(r'^size: (\d+)$', res, re.M)
1041 self.failUnless(mo, res)
1042 size = int(mo.group(1))
1043 # with directories, the size varies.
1044 self.failUnless(size > 1000)
1045 d.addCallback(_got_text)
1048 def test_POST_DIRURL_deepstats_no_ophandle(self):
1049 d = self.shouldFail2(error.Error,
1050 "test_POST_DIRURL_deepstats_no_ophandle",
1052 "slow operation requires ophandle=",
1053 self.POST, self.public_url, t="start-deep-stats")
1056 def test_POST_DIRURL_deepstats(self):
1057 d = self.POST(self.public_url + "/foo/?t=start-deep-stats&ophandle=127",
1058 followRedirect=True)
1059 d.addCallback(self.wait_for_operation, "127")
1060 d.addCallback(self.get_operation_results, "127", "json")
1061 def _got_json(stats):
1062 expected = {"count-immutable-files": 3,
1063 "count-mutable-files": 0,
1064 "count-literal-files": 0,
1066 "count-directories": 3,
1067 "size-immutable-files": 57,
1068 "size-literal-files": 0,
1069 #"size-directories": 1912, # varies
1070 #"largest-directory": 1590,
1071 "largest-directory-children": 5,
1072 "largest-immutable-file": 19,
1074 for k,v in expected.iteritems():
1075 self.failUnlessEqual(stats[k], v,
1076 "stats[%s] was %s, not %s" %
1078 self.failUnlessEqual(stats["size-files-histogram"],
1080 d.addCallback(_got_json)
1083 def test_POST_DIRURL_stream_manifest(self):
1084 d = self.POST(self.public_url + "/foo/?t=stream-manifest")
1086 self.failUnless(res.endswith("\n"))
1087 units = [simplejson.loads(t) for t in res[:-1].split("\n")]
1088 self.failUnlessEqual(len(units), 7)
1089 self.failUnlessEqual(units[-1]["type"], "stats")
1091 self.failUnlessEqual(first["path"], [])
1092 self.failUnlessEqual(first["cap"], self._foo_uri)
1093 self.failUnlessEqual(first["type"], "directory")
1094 baz = [u for u in units[:-1] if u["cap"] == self._baz_file_uri][0]
1095 self.failUnlessEqual(baz["path"], ["sub", "baz.txt"])
1096 self.failIfEqual(baz["storage-index"], None)
1097 self.failIfEqual(baz["verifycap"], None)
1098 self.failIfEqual(baz["repaircap"], None)
1100 d.addCallback(_check)
1103 def test_GET_DIRURL_uri(self):
1104 d = self.GET(self.public_url + "/foo?t=uri")
1106 self.failUnlessEqual(res, self._foo_uri)
1107 d.addCallback(_check)
1110 def test_GET_DIRURL_readonly_uri(self):
1111 d = self.GET(self.public_url + "/foo?t=readonly-uri")
1113 self.failUnlessEqual(res, self._foo_readonly_uri)
1114 d.addCallback(_check)
1117 def test_PUT_NEWDIRURL(self):
1118 d = self.PUT(self.public_url + "/foo/newdir?t=mkdir", "")
1119 d.addCallback(lambda res:
1120 self.failUnlessNodeHasChild(self._foo_node, u"newdir"))
1121 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
1122 d.addCallback(self.failUnlessNodeKeysAre, [])
1125 def test_POST_NEWDIRURL_initial_children(self):
1126 (newkids, filecap1, filecap2, filecap3,
1127 dircap) = self._create_initial_children()
1128 d = self.POST(self.public_url + "/foo/newdir?t=mkdir-with-children",
1129 children=simplejson.dumps(newkids))
1131 n = self.s.create_node_from_uri(uri.strip())
1132 d2 = self.failUnlessNodeKeysAre(n, newkids.keys())
1133 d2.addCallback(lambda ign:
1134 self.failUnlessChildURIIs(n, u"child-imm", filecap1))
1135 d2.addCallback(lambda ign:
1136 self.failUnlessChildURIIs(n, u"child-mutable",
1138 d2.addCallback(lambda ign:
1139 self.failUnlessChildURIIs(n, u"child-mutable-ro",
1141 d2.addCallback(lambda ign:
1142 self.failUnlessChildURIIs(n, u"dirchild", dircap))
1144 d.addCallback(_check)
1145 d.addCallback(lambda res:
1146 self.failUnlessNodeHasChild(self._foo_node, u"newdir"))
1147 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
1148 d.addCallback(self.failUnlessNodeKeysAre, newkids.keys())
1149 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
1150 d.addCallback(self.failUnlessChildURIIs, u"child-imm", filecap1)
1153 def test_PUT_NEWDIRURL_exists(self):
1154 d = self.PUT(self.public_url + "/foo/sub?t=mkdir", "")
1155 d.addCallback(lambda res:
1156 self.failUnlessNodeHasChild(self._foo_node, u"sub"))
1157 d.addCallback(lambda res: self._foo_node.get(u"sub"))
1158 d.addCallback(self.failUnlessNodeKeysAre, [u"baz.txt"])
1161 def test_PUT_NEWDIRURL_blocked(self):
1162 d = self.shouldFail2(error.Error, "PUT_NEWDIRURL_blocked",
1163 "409 Conflict", "Unable to create directory 'bar.txt': a file was in the way",
1165 self.public_url + "/foo/bar.txt/sub?t=mkdir", "")
1166 d.addCallback(lambda res:
1167 self.failUnlessNodeHasChild(self._foo_node, u"sub"))
1168 d.addCallback(lambda res: self._foo_node.get(u"sub"))
1169 d.addCallback(self.failUnlessNodeKeysAre, [u"baz.txt"])
1172 def test_PUT_NEWDIRURL_mkdir_p(self):
1173 d = defer.succeed(None)
1174 d.addCallback(lambda res: self.POST(self.public_url + "/foo", t='mkdir', name='mkp'))
1175 d.addCallback(lambda res: self.failUnlessNodeHasChild(self._foo_node, u"mkp"))
1176 d.addCallback(lambda res: self._foo_node.get(u"mkp"))
1177 def mkdir_p(mkpnode):
1178 url = '/uri/%s?t=mkdir-p&path=/sub1/sub2' % urllib.quote(mkpnode.get_uri())
1180 def made_subsub(ssuri):
1181 d = self._foo_node.get_child_at_path(u"mkp/sub1/sub2")
1182 d.addCallback(lambda ssnode: self.failUnlessEqual(ssnode.get_uri(), ssuri))
1184 d.addCallback(lambda uri2: self.failUnlessEqual(uri2, ssuri))
1186 d.addCallback(made_subsub)
1188 d.addCallback(mkdir_p)
1191 def test_PUT_NEWDIRURL_mkdirs(self):
1192 d = self.PUT(self.public_url + "/foo/subdir/newdir?t=mkdir", "")
1193 d.addCallback(lambda res:
1194 self.failIfNodeHasChild(self._foo_node, u"newdir"))
1195 d.addCallback(lambda res:
1196 self.failUnlessNodeHasChild(self._foo_node, u"subdir"))
1197 d.addCallback(lambda res:
1198 self._foo_node.get_child_at_path(u"subdir/newdir"))
1199 d.addCallback(self.failUnlessNodeKeysAre, [])
1202 def test_DELETE_DIRURL(self):
1203 d = self.DELETE(self.public_url + "/foo")
1204 d.addCallback(lambda res:
1205 self.failIfNodeHasChild(self.public_root, u"foo"))
1208 def test_DELETE_DIRURL_missing(self):
1209 d = self.DELETE(self.public_url + "/foo/missing")
1210 d.addBoth(self.should404, "test_DELETE_DIRURL_missing")
1211 d.addCallback(lambda res:
1212 self.failUnlessNodeHasChild(self.public_root, u"foo"))
1215 def test_DELETE_DIRURL_missing2(self):
1216 d = self.DELETE(self.public_url + "/missing")
1217 d.addBoth(self.should404, "test_DELETE_DIRURL_missing2")
1220 def dump_root(self):
1222 w = webish.DirnodeWalkerMixin()
1223 def visitor(childpath, childnode, metadata):
1225 d = w.walk(self.public_root, visitor)
1228 def failUnlessNodeKeysAre(self, node, expected_keys):
1229 for k in expected_keys:
1230 assert isinstance(k, unicode)
1232 def _check(children):
1233 self.failUnlessEqual(sorted(children.keys()), sorted(expected_keys))
1234 d.addCallback(_check)
1236 def failUnlessNodeHasChild(self, node, name):
1237 assert isinstance(name, unicode)
1239 def _check(children):
1240 self.failUnless(name in children)
1241 d.addCallback(_check)
1243 def failIfNodeHasChild(self, node, name):
1244 assert isinstance(name, unicode)
1246 def _check(children):
1247 self.failIf(name in children)
1248 d.addCallback(_check)
1251 def failUnlessChildContentsAre(self, node, name, expected_contents):
1252 assert isinstance(name, unicode)
1253 d = node.get_child_at_path(name)
1254 d.addCallback(lambda node: node.download_to_data())
1255 def _check(contents):
1256 self.failUnlessEqual(contents, expected_contents)
1257 d.addCallback(_check)
1260 def failUnlessMutableChildContentsAre(self, node, name, expected_contents):
1261 assert isinstance(name, unicode)
1262 d = node.get_child_at_path(name)
1263 d.addCallback(lambda node: node.download_best_version())
1264 def _check(contents):
1265 self.failUnlessEqual(contents, expected_contents)
1266 d.addCallback(_check)
1269 def failUnlessChildURIIs(self, node, name, expected_uri):
1270 assert isinstance(name, unicode)
1271 d = node.get_child_at_path(name)
1273 self.failUnlessEqual(child.get_uri(), expected_uri.strip())
1274 d.addCallback(_check)
1277 def failUnlessURIMatchesChild(self, got_uri, node, name):
1278 assert isinstance(name, unicode)
1279 d = node.get_child_at_path(name)
1281 self.failUnlessEqual(got_uri.strip(), child.get_uri())
1282 d.addCallback(_check)
1285 def failUnlessCHKURIHasContents(self, got_uri, contents):
1286 self.failUnless(FakeCHKFileNode.all_contents[got_uri] == contents)
1288 def test_POST_upload(self):
1289 d = self.POST(self.public_url + "/foo", t="upload",
1290 file=("new.txt", self.NEWFILE_CONTENTS))
1292 d.addCallback(self.failUnlessURIMatchesChild, fn, u"new.txt")
1293 d.addCallback(lambda res:
1294 self.failUnlessChildContentsAre(fn, u"new.txt",
1295 self.NEWFILE_CONTENTS))
1298 def test_POST_upload_unicode(self):
1299 filename = u"n\u00e9wer.txt" # n e-acute w e r . t x t
1300 d = self.POST(self.public_url + "/foo", t="upload",
1301 file=(filename, self.NEWFILE_CONTENTS))
1303 d.addCallback(self.failUnlessURIMatchesChild, fn, filename)
1304 d.addCallback(lambda res:
1305 self.failUnlessChildContentsAre(fn, filename,
1306 self.NEWFILE_CONTENTS))
1307 target_url = self.public_url + "/foo/" + filename.encode("utf-8")
1308 d.addCallback(lambda res: self.GET(target_url))
1309 d.addCallback(lambda contents: self.failUnlessEqual(contents,
1310 self.NEWFILE_CONTENTS,
1314 def test_POST_upload_unicode_named(self):
1315 filename = u"n\u00e9wer.txt" # n e-acute w e r . t x t
1316 d = self.POST(self.public_url + "/foo", t="upload",
1318 file=("overridden", self.NEWFILE_CONTENTS))
1320 d.addCallback(self.failUnlessURIMatchesChild, fn, filename)
1321 d.addCallback(lambda res:
1322 self.failUnlessChildContentsAre(fn, filename,
1323 self.NEWFILE_CONTENTS))
1324 target_url = self.public_url + "/foo/" + filename.encode("utf-8")
1325 d.addCallback(lambda res: self.GET(target_url))
1326 d.addCallback(lambda contents: self.failUnlessEqual(contents,
1327 self.NEWFILE_CONTENTS,
1331 def test_POST_upload_no_link(self):
1332 d = self.POST("/uri", t="upload",
1333 file=("new.txt", self.NEWFILE_CONTENTS))
1334 def _check_upload_results(page):
1335 # this should be a page which describes the results of the upload
1336 # that just finished.
1337 self.failUnless("Upload Results:" in page)
1338 self.failUnless("URI:" in page)
1339 uri_re = re.compile("URI: <tt><span>(.*)</span>")
1340 mo = uri_re.search(page)
1341 self.failUnless(mo, page)
1342 new_uri = mo.group(1)
1344 d.addCallback(_check_upload_results)
1345 d.addCallback(self.failUnlessCHKURIHasContents, self.NEWFILE_CONTENTS)
1348 def test_POST_upload_no_link_whendone(self):
1349 d = self.POST("/uri", t="upload", when_done="/",
1350 file=("new.txt", self.NEWFILE_CONTENTS))
1351 d.addBoth(self.shouldRedirect, "/")
1354 def shouldRedirect2(self, which, checker, callable, *args, **kwargs):
1355 d = defer.maybeDeferred(callable, *args, **kwargs)
1357 if isinstance(res, failure.Failure):
1358 res.trap(error.PageRedirect)
1359 statuscode = res.value.status
1360 target = res.value.location
1361 return checker(statuscode, target)
1362 self.fail("%s: callable was supposed to redirect, not return '%s'"
1367 def test_POST_upload_no_link_whendone_results(self):
1368 def check(statuscode, target):
1369 self.failUnlessEqual(statuscode, str(http.FOUND))
1370 self.failUnless(target.startswith(self.webish_url), target)
1371 return client.getPage(target, method="GET")
1372 d = self.shouldRedirect2("test_POST_upload_no_link_whendone_results",
1374 self.POST, "/uri", t="upload",
1375 when_done="/uri/%(uri)s",
1376 file=("new.txt", self.NEWFILE_CONTENTS))
1377 d.addCallback(lambda res:
1378 self.failUnlessEqual(res, self.NEWFILE_CONTENTS))
1381 def test_POST_upload_no_link_mutable(self):
1382 d = self.POST("/uri", t="upload", mutable="true",
1383 file=("new.txt", self.NEWFILE_CONTENTS))
1384 def _check(filecap):
1385 filecap = filecap.strip()
1386 self.failUnless(filecap.startswith("URI:SSK:"), filecap)
1387 self.filecap = filecap
1388 u = uri.WriteableSSKFileURI.init_from_string(filecap)
1389 self.failUnless(u.storage_index in FakeMutableFileNode.all_contents)
1390 n = self.s.create_node_from_uri(filecap)
1391 return n.download_best_version()
1392 d.addCallback(_check)
1394 self.failUnlessEqual(data, self.NEWFILE_CONTENTS)
1395 return self.GET("/uri/%s" % urllib.quote(self.filecap))
1396 d.addCallback(_check2)
1398 self.failUnlessEqual(data, self.NEWFILE_CONTENTS)
1399 return self.GET("/file/%s" % urllib.quote(self.filecap))
1400 d.addCallback(_check3)
1402 self.failUnlessEqual(data, self.NEWFILE_CONTENTS)
1403 d.addCallback(_check4)
1406 def test_POST_upload_no_link_mutable_toobig(self):
1407 d = self.shouldFail2(error.Error,
1408 "test_POST_upload_no_link_mutable_toobig",
1409 "413 Request Entity Too Large",
1410 "SDMF is limited to one segment, and 10001 > 10000",
1412 "/uri", t="upload", mutable="true",
1414 "b" * (self.s.MUTABLE_SIZELIMIT+1)) )
1417 def test_POST_upload_mutable(self):
1418 # this creates a mutable file
1419 d = self.POST(self.public_url + "/foo", t="upload", mutable="true",
1420 file=("new.txt", self.NEWFILE_CONTENTS))
1422 d.addCallback(self.failUnlessURIMatchesChild, fn, u"new.txt")
1423 d.addCallback(lambda res:
1424 self.failUnlessMutableChildContentsAre(fn, u"new.txt",
1425 self.NEWFILE_CONTENTS))
1426 d.addCallback(lambda res: self._foo_node.get(u"new.txt"))
1428 self.failUnless(IMutableFileNode.providedBy(newnode))
1429 self.failUnless(newnode.is_mutable())
1430 self.failIf(newnode.is_readonly())
1431 self._mutable_node = newnode
1432 self._mutable_uri = newnode.get_uri()
1435 # now upload it again and make sure that the URI doesn't change
1436 NEWER_CONTENTS = self.NEWFILE_CONTENTS + "newer\n"
1437 d.addCallback(lambda res:
1438 self.POST(self.public_url + "/foo", t="upload",
1440 file=("new.txt", NEWER_CONTENTS)))
1441 d.addCallback(self.failUnlessURIMatchesChild, fn, u"new.txt")
1442 d.addCallback(lambda res:
1443 self.failUnlessMutableChildContentsAre(fn, u"new.txt",
1445 d.addCallback(lambda res: self._foo_node.get(u"new.txt"))
1447 self.failUnless(IMutableFileNode.providedBy(newnode))
1448 self.failUnless(newnode.is_mutable())
1449 self.failIf(newnode.is_readonly())
1450 self.failUnlessEqual(self._mutable_uri, newnode.get_uri())
1451 d.addCallback(_got2)
1453 # upload a second time, using PUT instead of POST
1454 NEW2_CONTENTS = NEWER_CONTENTS + "overwrite with PUT\n"
1455 d.addCallback(lambda res:
1456 self.PUT(self.public_url + "/foo/new.txt", NEW2_CONTENTS))
1457 d.addCallback(self.failUnlessURIMatchesChild, fn, u"new.txt")
1458 d.addCallback(lambda res:
1459 self.failUnlessMutableChildContentsAre(fn, u"new.txt",
1462 # finally list the directory, since mutable files are displayed
1463 # slightly differently
1465 d.addCallback(lambda res:
1466 self.GET(self.public_url + "/foo/",
1467 followRedirect=True))
1468 def _check_page(res):
1469 # TODO: assert more about the contents
1470 self.failUnless("SSK" in res)
1472 d.addCallback(_check_page)
1474 d.addCallback(lambda res: self._foo_node.get(u"new.txt"))
1476 self.failUnless(IMutableFileNode.providedBy(newnode))
1477 self.failUnless(newnode.is_mutable())
1478 self.failIf(newnode.is_readonly())
1479 self.failUnlessEqual(self._mutable_uri, newnode.get_uri())
1480 d.addCallback(_got3)
1482 # look at the JSON form of the enclosing directory
1483 d.addCallback(lambda res:
1484 self.GET(self.public_url + "/foo/?t=json",
1485 followRedirect=True))
1486 def _check_page_json(res):
1487 parsed = simplejson.loads(res)
1488 self.failUnlessEqual(parsed[0], "dirnode")
1489 children = dict( [(unicode(name),value)
1491 in parsed[1]["children"].iteritems()] )
1492 self.failUnless("new.txt" in children)
1493 new_json = children["new.txt"]
1494 self.failUnlessEqual(new_json[0], "filenode")
1495 self.failUnless(new_json[1]["mutable"])
1496 self.failUnlessEqual(new_json[1]["rw_uri"], self._mutable_uri)
1497 ro_uri = unicode(self._mutable_node.get_readonly().to_string())
1498 self.failUnlessEqual(new_json[1]["ro_uri"], ro_uri)
1499 d.addCallback(_check_page_json)
1501 # and the JSON form of the file
1502 d.addCallback(lambda res:
1503 self.GET(self.public_url + "/foo/new.txt?t=json"))
1504 def _check_file_json(res):
1505 parsed = simplejson.loads(res)
1506 self.failUnlessEqual(parsed[0], "filenode")
1507 self.failUnless(parsed[1]["mutable"])
1508 self.failUnlessEqual(parsed[1]["rw_uri"], self._mutable_uri)
1509 ro_uri = unicode(self._mutable_node.get_readonly().to_string())
1510 self.failUnlessEqual(parsed[1]["ro_uri"], ro_uri)
1511 d.addCallback(_check_file_json)
1513 # and look at t=uri and t=readonly-uri
1514 d.addCallback(lambda res:
1515 self.GET(self.public_url + "/foo/new.txt?t=uri"))
1516 d.addCallback(lambda res: self.failUnlessEqual(res, self._mutable_uri))
1517 d.addCallback(lambda res:
1518 self.GET(self.public_url + "/foo/new.txt?t=readonly-uri"))
1519 def _check_ro_uri(res):
1520 ro_uri = unicode(self._mutable_node.get_readonly().to_string())
1521 self.failUnlessEqual(res, ro_uri)
1522 d.addCallback(_check_ro_uri)
1524 # make sure we can get to it from /uri/URI
1525 d.addCallback(lambda res:
1526 self.GET("/uri/%s" % urllib.quote(self._mutable_uri)))
1527 d.addCallback(lambda res:
1528 self.failUnlessEqual(res, NEW2_CONTENTS))
1530 # and that HEAD computes the size correctly
1531 d.addCallback(lambda res:
1532 self.HEAD(self.public_url + "/foo/new.txt",
1533 return_response=True))
1534 def _got_headers((res, status, headers)):
1535 self.failUnlessEqual(res, "")
1536 self.failUnlessEqual(headers["content-length"][0],
1537 str(len(NEW2_CONTENTS)))
1538 self.failUnlessEqual(headers["content-type"], ["text/plain"])
1539 d.addCallback(_got_headers)
1541 # make sure that size errors are displayed correctly for overwrite
1542 d.addCallback(lambda res:
1543 self.shouldFail2(error.Error,
1544 "test_POST_upload_mutable-toobig",
1545 "413 Request Entity Too Large",
1546 "SDMF is limited to one segment, and 10001 > 10000",
1548 self.public_url + "/foo", t="upload",
1551 "b" * (self.s.MUTABLE_SIZELIMIT+1)),
1554 d.addErrback(self.dump_error)
1557 def test_POST_upload_mutable_toobig(self):
1558 d = self.shouldFail2(error.Error,
1559 "test_POST_upload_mutable_toobig",
1560 "413 Request Entity Too Large",
1561 "SDMF is limited to one segment, and 10001 > 10000",
1563 self.public_url + "/foo",
1564 t="upload", mutable="true",
1566 "b" * (self.s.MUTABLE_SIZELIMIT+1)) )
1569 def dump_error(self, f):
1570 # if the web server returns an error code (like 400 Bad Request),
1571 # web.client.getPage puts the HTTP response body into the .response
1572 # attribute of the exception object that it gives back. It does not
1573 # appear in the Failure's repr(), so the ERROR that trial displays
1574 # will be rather terse and unhelpful. addErrback this method to the
1575 # end of your chain to get more information out of these errors.
1576 if f.check(error.Error):
1577 print "web.error.Error:"
1579 print f.value.response
1582 def test_POST_upload_replace(self):
1583 d = self.POST(self.public_url + "/foo", t="upload",
1584 file=("bar.txt", self.NEWFILE_CONTENTS))
1586 d.addCallback(self.failUnlessURIMatchesChild, fn, u"bar.txt")
1587 d.addCallback(lambda res:
1588 self.failUnlessChildContentsAre(fn, u"bar.txt",
1589 self.NEWFILE_CONTENTS))
1592 def test_POST_upload_no_replace_ok(self):
1593 d = self.POST(self.public_url + "/foo?replace=false", t="upload",
1594 file=("new.txt", self.NEWFILE_CONTENTS))
1595 d.addCallback(lambda res: self.GET(self.public_url + "/foo/new.txt"))
1596 d.addCallback(lambda res: self.failUnlessEqual(res,
1597 self.NEWFILE_CONTENTS))
1600 def test_POST_upload_no_replace_queryarg(self):
1601 d = self.POST(self.public_url + "/foo?replace=false", t="upload",
1602 file=("bar.txt", self.NEWFILE_CONTENTS))
1603 d.addBoth(self.shouldFail, error.Error,
1604 "POST_upload_no_replace_queryarg",
1606 "There was already a child by that name, and you asked me "
1607 "to not replace it")
1608 d.addCallback(lambda res: self.GET(self.public_url + "/foo/bar.txt"))
1609 d.addCallback(self.failUnlessIsBarDotTxt)
1612 def test_POST_upload_no_replace_field(self):
1613 d = self.POST(self.public_url + "/foo", t="upload", replace="false",
1614 file=("bar.txt", self.NEWFILE_CONTENTS))
1615 d.addBoth(self.shouldFail, error.Error, "POST_upload_no_replace_field",
1617 "There was already a child by that name, and you asked me "
1618 "to not replace it")
1619 d.addCallback(lambda res: self.GET(self.public_url + "/foo/bar.txt"))
1620 d.addCallback(self.failUnlessIsBarDotTxt)
1623 def test_POST_upload_whendone(self):
1624 d = self.POST(self.public_url + "/foo", t="upload", when_done="/THERE",
1625 file=("new.txt", self.NEWFILE_CONTENTS))
1626 d.addBoth(self.shouldRedirect, "/THERE")
1628 d.addCallback(lambda res:
1629 self.failUnlessChildContentsAre(fn, u"new.txt",
1630 self.NEWFILE_CONTENTS))
1633 def test_POST_upload_named(self):
1635 d = self.POST(self.public_url + "/foo", t="upload",
1636 name="new.txt", file=self.NEWFILE_CONTENTS)
1637 d.addCallback(self.failUnlessURIMatchesChild, fn, u"new.txt")
1638 d.addCallback(lambda res:
1639 self.failUnlessChildContentsAre(fn, u"new.txt",
1640 self.NEWFILE_CONTENTS))
1643 def test_POST_upload_named_badfilename(self):
1644 d = self.POST(self.public_url + "/foo", t="upload",
1645 name="slashes/are/bad.txt", file=self.NEWFILE_CONTENTS)
1646 d.addBoth(self.shouldFail, error.Error,
1647 "test_POST_upload_named_badfilename",
1649 "name= may not contain a slash",
1651 # make sure that nothing was added
1652 d.addCallback(lambda res:
1653 self.failUnlessNodeKeysAre(self._foo_node,
1654 [u"bar.txt", u"blockingfile",
1655 u"empty", u"n\u00fc.txt",
1659 def test_POST_FILEURL_check(self):
1660 bar_url = self.public_url + "/foo/bar.txt"
1661 d = self.POST(bar_url, t="check")
1663 self.failUnless("Healthy :" in res)
1664 d.addCallback(_check)
1665 redir_url = "http://allmydata.org/TARGET"
1666 def _check2(statuscode, target):
1667 self.failUnlessEqual(statuscode, str(http.FOUND))
1668 self.failUnlessEqual(target, redir_url)
1669 d.addCallback(lambda res:
1670 self.shouldRedirect2("test_POST_FILEURL_check",
1674 when_done=redir_url))
1675 d.addCallback(lambda res:
1676 self.POST(bar_url, t="check", return_to=redir_url))
1678 self.failUnless("Healthy :" in res)
1679 self.failUnless("Return to file" in res)
1680 self.failUnless(redir_url in res)
1681 d.addCallback(_check3)
1683 d.addCallback(lambda res:
1684 self.POST(bar_url, t="check", output="JSON"))
1685 def _check_json(res):
1686 data = simplejson.loads(res)
1687 self.failUnless("storage-index" in data)
1688 self.failUnless(data["results"]["healthy"])
1689 d.addCallback(_check_json)
1693 def test_POST_FILEURL_check_and_repair(self):
1694 bar_url = self.public_url + "/foo/bar.txt"
1695 d = self.POST(bar_url, t="check", repair="true")
1697 self.failUnless("Healthy :" in res)
1698 d.addCallback(_check)
1699 redir_url = "http://allmydata.org/TARGET"
1700 def _check2(statuscode, target):
1701 self.failUnlessEqual(statuscode, str(http.FOUND))
1702 self.failUnlessEqual(target, redir_url)
1703 d.addCallback(lambda res:
1704 self.shouldRedirect2("test_POST_FILEURL_check_and_repair",
1707 t="check", repair="true",
1708 when_done=redir_url))
1709 d.addCallback(lambda res:
1710 self.POST(bar_url, t="check", return_to=redir_url))
1712 self.failUnless("Healthy :" in res)
1713 self.failUnless("Return to file" in res)
1714 self.failUnless(redir_url in res)
1715 d.addCallback(_check3)
1718 def test_POST_DIRURL_check(self):
1719 foo_url = self.public_url + "/foo/"
1720 d = self.POST(foo_url, t="check")
1722 self.failUnless("Healthy :" in res, res)
1723 d.addCallback(_check)
1724 redir_url = "http://allmydata.org/TARGET"
1725 def _check2(statuscode, target):
1726 self.failUnlessEqual(statuscode, str(http.FOUND))
1727 self.failUnlessEqual(target, redir_url)
1728 d.addCallback(lambda res:
1729 self.shouldRedirect2("test_POST_DIRURL_check",
1733 when_done=redir_url))
1734 d.addCallback(lambda res:
1735 self.POST(foo_url, t="check", return_to=redir_url))
1737 self.failUnless("Healthy :" in res, res)
1738 self.failUnless("Return to file/directory" in res)
1739 self.failUnless(redir_url in res)
1740 d.addCallback(_check3)
1742 d.addCallback(lambda res:
1743 self.POST(foo_url, t="check", output="JSON"))
1744 def _check_json(res):
1745 data = simplejson.loads(res)
1746 self.failUnless("storage-index" in data)
1747 self.failUnless(data["results"]["healthy"])
1748 d.addCallback(_check_json)
1752 def test_POST_DIRURL_check_and_repair(self):
1753 foo_url = self.public_url + "/foo/"
1754 d = self.POST(foo_url, t="check", repair="true")
1756 self.failUnless("Healthy :" in res, res)
1757 d.addCallback(_check)
1758 redir_url = "http://allmydata.org/TARGET"
1759 def _check2(statuscode, target):
1760 self.failUnlessEqual(statuscode, str(http.FOUND))
1761 self.failUnlessEqual(target, redir_url)
1762 d.addCallback(lambda res:
1763 self.shouldRedirect2("test_POST_DIRURL_check_and_repair",
1766 t="check", repair="true",
1767 when_done=redir_url))
1768 d.addCallback(lambda res:
1769 self.POST(foo_url, t="check", return_to=redir_url))
1771 self.failUnless("Healthy :" in res)
1772 self.failUnless("Return to file/directory" in res)
1773 self.failUnless(redir_url in res)
1774 d.addCallback(_check3)
1777 def wait_for_operation(self, ignored, ophandle):
1778 url = "/operations/" + ophandle
1779 url += "?t=status&output=JSON"
1782 data = simplejson.loads(res)
1783 if not data["finished"]:
1784 d = self.stall(delay=1.0)
1785 d.addCallback(self.wait_for_operation, ophandle)
1791 def get_operation_results(self, ignored, ophandle, output=None):
1792 url = "/operations/" + ophandle
1795 url += "&output=" + output
1798 if output and output.lower() == "json":
1799 return simplejson.loads(res)
1804 def test_POST_DIRURL_deepcheck_no_ophandle(self):
1805 d = self.shouldFail2(error.Error,
1806 "test_POST_DIRURL_deepcheck_no_ophandle",
1808 "slow operation requires ophandle=",
1809 self.POST, self.public_url, t="start-deep-check")
1812 def test_POST_DIRURL_deepcheck(self):
1813 def _check_redirect(statuscode, target):
1814 self.failUnlessEqual(statuscode, str(http.FOUND))
1815 self.failUnless(target.endswith("/operations/123"))
1816 d = self.shouldRedirect2("test_POST_DIRURL_deepcheck", _check_redirect,
1817 self.POST, self.public_url,
1818 t="start-deep-check", ophandle="123")
1819 d.addCallback(self.wait_for_operation, "123")
1820 def _check_json(data):
1821 self.failUnlessEqual(data["finished"], True)
1822 self.failUnlessEqual(data["count-objects-checked"], 8)
1823 self.failUnlessEqual(data["count-objects-healthy"], 8)
1824 d.addCallback(_check_json)
1825 d.addCallback(self.get_operation_results, "123", "html")
1826 def _check_html(res):
1827 self.failUnless("Objects Checked: <span>8</span>" in res)
1828 self.failUnless("Objects Healthy: <span>8</span>" in res)
1829 d.addCallback(_check_html)
1831 d.addCallback(lambda res:
1832 self.GET("/operations/123/"))
1833 d.addCallback(_check_html) # should be the same as without the slash
1835 d.addCallback(lambda res:
1836 self.shouldFail2(error.Error, "one", "404 Not Found",
1837 "No detailed results for SI bogus",
1838 self.GET, "/operations/123/bogus"))
1840 foo_si = self._foo_node.get_storage_index()
1841 foo_si_s = base32.b2a(foo_si)
1842 d.addCallback(lambda res:
1843 self.GET("/operations/123/%s?output=JSON" % foo_si_s))
1844 def _check_foo_json(res):
1845 data = simplejson.loads(res)
1846 self.failUnlessEqual(data["storage-index"], foo_si_s)
1847 self.failUnless(data["results"]["healthy"])
1848 d.addCallback(_check_foo_json)
1851 def test_POST_DIRURL_deepcheck_and_repair(self):
1852 d = self.POST(self.public_url, t="start-deep-check", repair="true",
1853 ophandle="124", output="json", followRedirect=True)
1854 d.addCallback(self.wait_for_operation, "124")
1855 def _check_json(data):
1856 self.failUnlessEqual(data["finished"], True)
1857 self.failUnlessEqual(data["count-objects-checked"], 8)
1858 self.failUnlessEqual(data["count-objects-healthy-pre-repair"], 8)
1859 self.failUnlessEqual(data["count-objects-unhealthy-pre-repair"], 0)
1860 self.failUnlessEqual(data["count-corrupt-shares-pre-repair"], 0)
1861 self.failUnlessEqual(data["count-repairs-attempted"], 0)
1862 self.failUnlessEqual(data["count-repairs-successful"], 0)
1863 self.failUnlessEqual(data["count-repairs-unsuccessful"], 0)
1864 self.failUnlessEqual(data["count-objects-healthy-post-repair"], 8)
1865 self.failUnlessEqual(data["count-objects-unhealthy-post-repair"], 0)
1866 self.failUnlessEqual(data["count-corrupt-shares-post-repair"], 0)
1867 d.addCallback(_check_json)
1868 d.addCallback(self.get_operation_results, "124", "html")
1869 def _check_html(res):
1870 self.failUnless("Objects Checked: <span>8</span>" in res)
1872 self.failUnless("Objects Healthy (before repair): <span>8</span>" in res)
1873 self.failUnless("Objects Unhealthy (before repair): <span>0</span>" in res)
1874 self.failUnless("Corrupt Shares (before repair): <span>0</span>" in res)
1876 self.failUnless("Repairs Attempted: <span>0</span>" in res)
1877 self.failUnless("Repairs Successful: <span>0</span>" in res)
1878 self.failUnless("Repairs Unsuccessful: <span>0</span>" in res)
1880 self.failUnless("Objects Healthy (after repair): <span>8</span>" in res)
1881 self.failUnless("Objects Unhealthy (after repair): <span>0</span>" in res)
1882 self.failUnless("Corrupt Shares (after repair): <span>0</span>" in res)
1883 d.addCallback(_check_html)
1886 def test_POST_FILEURL_bad_t(self):
1887 d = self.shouldFail2(error.Error, "POST_bad_t", "400 Bad Request",
1888 "POST to file: bad t=bogus",
1889 self.POST, self.public_url + "/foo/bar.txt",
1893 def test_POST_mkdir(self): # return value?
1894 d = self.POST(self.public_url + "/foo", t="mkdir", name="newdir")
1895 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
1896 d.addCallback(self.failUnlessNodeKeysAre, [])
1899 def test_POST_mkdir_initial_children(self):
1900 newkids, filecap1, ign, ign, ign = self._create_initial_children()
1901 d = self.POST(self.public_url + "/foo", t="mkdir-with-children",
1902 name="newdir", children=simplejson.dumps(newkids))
1903 d.addCallback(lambda res:
1904 self.failUnlessNodeHasChild(self._foo_node, u"newdir"))
1905 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
1906 d.addCallback(self.failUnlessNodeKeysAre, newkids.keys())
1907 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
1908 d.addCallback(self.failUnlessChildURIIs, u"child-imm", filecap1)
1911 def test_POST_mkdir_2(self):
1912 d = self.POST(self.public_url + "/foo/newdir?t=mkdir", "")
1913 d.addCallback(lambda res:
1914 self.failUnlessNodeHasChild(self._foo_node, u"newdir"))
1915 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
1916 d.addCallback(self.failUnlessNodeKeysAre, [])
1919 def test_POST_mkdirs_2(self):
1920 d = self.POST(self.public_url + "/foo/bardir/newdir?t=mkdir", "")
1921 d.addCallback(lambda res:
1922 self.failUnlessNodeHasChild(self._foo_node, u"bardir"))
1923 d.addCallback(lambda res: self._foo_node.get(u"bardir"))
1924 d.addCallback(lambda bardirnode: bardirnode.get(u"newdir"))
1925 d.addCallback(self.failUnlessNodeKeysAre, [])
1928 def test_POST_mkdir_no_parentdir_noredirect(self):
1929 d = self.POST("/uri?t=mkdir")
1930 def _after_mkdir(res):
1931 uri.DirectoryURI.init_from_string(res)
1932 d.addCallback(_after_mkdir)
1935 def test_POST_mkdir_no_parentdir_redirect(self):
1936 d = self.POST("/uri?t=mkdir&redirect_to_result=true")
1937 d.addBoth(self.shouldRedirect, None, statuscode='303')
1938 def _check_target(target):
1939 target = urllib.unquote(target)
1940 self.failUnless(target.startswith("uri/URI:DIR2:"), target)
1941 d.addCallback(_check_target)
1944 def _create_initial_children(self):
1945 contents, n, filecap1 = self.makefile(12)
1946 md1 = {"metakey1": "metavalue1"}
1947 filecap2 = make_mutable_file_uri()
1948 node3 = self.s.create_node_from_uri(make_mutable_file_uri())
1949 filecap3 = node3.get_readonly_uri()
1950 node4 = self.s.create_node_from_uri(make_mutable_file_uri())
1951 dircap = DirectoryNode(node4, None, None).get_uri()
1952 newkids = {u"child-imm": ["filenode", {"ro_uri": filecap1,
1953 "metadata": md1, }],
1954 u"child-mutable": ["filenode", {"rw_uri": filecap2}],
1955 u"child-mutable-ro": ["filenode", {"ro_uri": filecap3}],
1956 u"dirchild": ["dirnode", {"rw_uri": dircap}],
1958 return newkids, filecap1, filecap2, filecap3, dircap
1960 def test_POST_mkdir_no_parentdir_initial_children(self):
1961 (newkids, filecap1, filecap2, filecap3,
1962 dircap) = self._create_initial_children()
1963 d = self.POST("/uri?t=mkdir-with-children",
1964 children=simplejson.dumps(newkids))
1965 def _after_mkdir(res):
1966 self.failUnless(res.startswith("URI:DIR"), res)
1967 n = self.s.create_node_from_uri(res)
1968 d2 = self.failUnlessNodeKeysAre(n, newkids.keys())
1969 d2.addCallback(lambda ign:
1970 self.failUnlessChildURIIs(n, u"child-imm", filecap1))
1971 d2.addCallback(lambda ign:
1972 self.failUnlessChildURIIs(n, u"child-mutable",
1974 d2.addCallback(lambda ign:
1975 self.failUnlessChildURIIs(n, u"child-mutable-ro",
1977 d2.addCallback(lambda ign:
1978 self.failUnlessChildURIIs(n, u"dirchild", dircap))
1980 d.addCallback(_after_mkdir)
1983 def test_POST_mkdir_no_parentdir_unexpected_children(self):
1984 # the regular /uri?t=mkdir operation is specified to ignore its body.
1985 # Only t=mkdir-with-children pays attention to it.
1986 (newkids, filecap1, filecap2, filecap3,
1987 dircap) = self._create_initial_children()
1988 d = self.shouldHTTPError("POST t=mkdir unexpected children",
1990 "t=mkdir does not accept children=, "
1991 "try t=mkdir-with-children instead",
1992 self.POST, "/uri?t=mkdir", # without children
1993 children=simplejson.dumps(newkids))
1996 def test_POST_noparent_bad(self):
1997 d = self.shouldHTTPError("POST /uri?t=bogus", 400, "Bad Request",
1998 "/uri accepts only PUT, PUT?t=mkdir, "
1999 "POST?t=upload, and POST?t=mkdir",
2000 self.POST, "/uri?t=bogus")
2003 def test_welcome_page_mkdir_button(self):
2004 # Fetch the welcome page.
2006 def _after_get_welcome_page(res):
2007 MKDIR_BUTTON_RE=re.compile('<form action="([^"]*)" method="post".*?<input type="hidden" name="t" value="([^"]*)" /><input type="hidden" name="([^"]*)" value="([^"]*)" /><input type="submit" value="Create a directory" />', re.I)
2008 mo = MKDIR_BUTTON_RE.search(res)
2009 formaction = mo.group(1)
2011 formaname = mo.group(3)
2012 formavalue = mo.group(4)
2013 return (formaction, formt, formaname, formavalue)
2014 d.addCallback(_after_get_welcome_page)
2015 def _after_parse_form(res):
2016 (formaction, formt, formaname, formavalue) = res
2017 return self.POST("/%s?t=%s&%s=%s" % (formaction, formt, formaname, formavalue))
2018 d.addCallback(_after_parse_form)
2019 d.addBoth(self.shouldRedirect, None, statuscode='303')
2022 def test_POST_mkdir_replace(self): # return value?
2023 d = self.POST(self.public_url + "/foo", t="mkdir", name="sub")
2024 d.addCallback(lambda res: self._foo_node.get(u"sub"))
2025 d.addCallback(self.failUnlessNodeKeysAre, [])
2028 def test_POST_mkdir_no_replace_queryarg(self): # return value?
2029 d = self.POST(self.public_url + "/foo?replace=false", t="mkdir", name="sub")
2030 d.addBoth(self.shouldFail, error.Error,
2031 "POST_mkdir_no_replace_queryarg",
2033 "There was already a child by that name, and you asked me "
2034 "to not replace it")
2035 d.addCallback(lambda res: self._foo_node.get(u"sub"))
2036 d.addCallback(self.failUnlessNodeKeysAre, [u"baz.txt"])
2039 def test_POST_mkdir_no_replace_field(self): # return value?
2040 d = self.POST(self.public_url + "/foo", t="mkdir", name="sub",
2042 d.addBoth(self.shouldFail, error.Error, "POST_mkdir_no_replace_field",
2044 "There was already a child by that name, and you asked me "
2045 "to not replace it")
2046 d.addCallback(lambda res: self._foo_node.get(u"sub"))
2047 d.addCallback(self.failUnlessNodeKeysAre, [u"baz.txt"])
2050 def test_POST_mkdir_whendone_field(self):
2051 d = self.POST(self.public_url + "/foo",
2052 t="mkdir", name="newdir", when_done="/THERE")
2053 d.addBoth(self.shouldRedirect, "/THERE")
2054 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
2055 d.addCallback(self.failUnlessNodeKeysAre, [])
2058 def test_POST_mkdir_whendone_queryarg(self):
2059 d = self.POST(self.public_url + "/foo?when_done=/THERE",
2060 t="mkdir", name="newdir")
2061 d.addBoth(self.shouldRedirect, "/THERE")
2062 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
2063 d.addCallback(self.failUnlessNodeKeysAre, [])
2066 def test_POST_bad_t(self):
2067 d = self.shouldFail2(error.Error, "POST_bad_t", "400 Bad Request",
2068 "POST to a directory with bad t=BOGUS",
2069 self.POST, self.public_url + "/foo", t="BOGUS")
2072 def test_POST_set_children(self):
2073 contents9, n9, newuri9 = self.makefile(9)
2074 contents10, n10, newuri10 = self.makefile(10)
2075 contents11, n11, newuri11 = self.makefile(11)
2078 "atomic_added_1": [ "filenode", { "rw_uri": "%s",
2081 "ctime": 1002777696.7564139,
2082 "mtime": 1002777696.7564139
2085 "atomic_added_2": [ "filenode", { "rw_uri": "%s",
2088 "ctime": 1002777696.7564139,
2089 "mtime": 1002777696.7564139
2092 "atomic_added_3": [ "filenode", { "rw_uri": "%s",
2095 "ctime": 1002777696.7564139,
2096 "mtime": 1002777696.7564139
2099 }""" % (newuri9, newuri10, newuri11)
2101 url = self.webish_url + self.public_url + "/foo" + "?t=set_children"
2103 d = client.getPage(url, method="POST", postdata=reqbody)
2105 self.failUnlessURIMatchesChild(newuri9, self._foo_node, u"atomic_added_1")
2106 self.failUnlessURIMatchesChild(newuri10, self._foo_node, u"atomic_added_2")
2107 self.failUnlessURIMatchesChild(newuri11, self._foo_node, u"atomic_added_3")
2109 d.addCallback(_then)
2110 d.addErrback(self.dump_error)
2113 def test_POST_put_uri(self):
2114 contents, n, newuri = self.makefile(8)
2115 d = self.POST(self.public_url + "/foo", t="uri", name="new.txt", uri=newuri)
2116 d.addCallback(self.failUnlessURIMatchesChild, self._foo_node, u"new.txt")
2117 d.addCallback(lambda res:
2118 self.failUnlessChildContentsAre(self._foo_node, u"new.txt",
2122 def test_POST_put_uri_replace(self):
2123 contents, n, newuri = self.makefile(8)
2124 d = self.POST(self.public_url + "/foo", t="uri", name="bar.txt", uri=newuri)
2125 d.addCallback(self.failUnlessURIMatchesChild, self._foo_node, u"bar.txt")
2126 d.addCallback(lambda res:
2127 self.failUnlessChildContentsAre(self._foo_node, u"bar.txt",
2131 def test_POST_put_uri_no_replace_queryarg(self):
2132 contents, n, newuri = self.makefile(8)
2133 d = self.POST(self.public_url + "/foo?replace=false", t="uri",
2134 name="bar.txt", uri=newuri)
2135 d.addBoth(self.shouldFail, error.Error,
2136 "POST_put_uri_no_replace_queryarg",
2138 "There was already a child by that name, and you asked me "
2139 "to not replace it")
2140 d.addCallback(lambda res: self.GET(self.public_url + "/foo/bar.txt"))
2141 d.addCallback(self.failUnlessIsBarDotTxt)
2144 def test_POST_put_uri_no_replace_field(self):
2145 contents, n, newuri = self.makefile(8)
2146 d = self.POST(self.public_url + "/foo", t="uri", replace="false",
2147 name="bar.txt", uri=newuri)
2148 d.addBoth(self.shouldFail, error.Error,
2149 "POST_put_uri_no_replace_field",
2151 "There was already a child by that name, and you asked me "
2152 "to not replace it")
2153 d.addCallback(lambda res: self.GET(self.public_url + "/foo/bar.txt"))
2154 d.addCallback(self.failUnlessIsBarDotTxt)
2157 def test_POST_delete(self):
2158 d = self.POST(self.public_url + "/foo", t="delete", name="bar.txt")
2159 d.addCallback(lambda res: self._foo_node.list())
2160 def _check(children):
2161 self.failIf(u"bar.txt" in children)
2162 d.addCallback(_check)
2165 def test_POST_rename_file(self):
2166 d = self.POST(self.public_url + "/foo", t="rename",
2167 from_name="bar.txt", to_name='wibble.txt')
2168 d.addCallback(lambda res:
2169 self.failIfNodeHasChild(self._foo_node, u"bar.txt"))
2170 d.addCallback(lambda res:
2171 self.failUnlessNodeHasChild(self._foo_node, u"wibble.txt"))
2172 d.addCallback(lambda res: self.GET(self.public_url + "/foo/wibble.txt"))
2173 d.addCallback(self.failUnlessIsBarDotTxt)
2174 d.addCallback(lambda res: self.GET(self.public_url + "/foo/wibble.txt?t=json"))
2175 d.addCallback(self.failUnlessIsBarJSON)
2178 def test_POST_rename_file_redundant(self):
2179 d = self.POST(self.public_url + "/foo", t="rename",
2180 from_name="bar.txt", to_name='bar.txt')
2181 d.addCallback(lambda res:
2182 self.failUnlessNodeHasChild(self._foo_node, u"bar.txt"))
2183 d.addCallback(lambda res: self.GET(self.public_url + "/foo/bar.txt"))
2184 d.addCallback(self.failUnlessIsBarDotTxt)
2185 d.addCallback(lambda res: self.GET(self.public_url + "/foo/bar.txt?t=json"))
2186 d.addCallback(self.failUnlessIsBarJSON)
2189 def test_POST_rename_file_replace(self):
2190 # rename a file and replace a directory with it
2191 d = self.POST(self.public_url + "/foo", t="rename",
2192 from_name="bar.txt", to_name='empty')
2193 d.addCallback(lambda res:
2194 self.failIfNodeHasChild(self._foo_node, u"bar.txt"))
2195 d.addCallback(lambda res:
2196 self.failUnlessNodeHasChild(self._foo_node, u"empty"))
2197 d.addCallback(lambda res: self.GET(self.public_url + "/foo/empty"))
2198 d.addCallback(self.failUnlessIsBarDotTxt)
2199 d.addCallback(lambda res: self.GET(self.public_url + "/foo/empty?t=json"))
2200 d.addCallback(self.failUnlessIsBarJSON)
2203 def test_POST_rename_file_no_replace_queryarg(self):
2204 # rename a file and replace a directory with it
2205 d = self.POST(self.public_url + "/foo?replace=false", t="rename",
2206 from_name="bar.txt", to_name='empty')
2207 d.addBoth(self.shouldFail, error.Error,
2208 "POST_rename_file_no_replace_queryarg",
2210 "There was already a child by that name, and you asked me "
2211 "to not replace it")
2212 d.addCallback(lambda res: self.GET(self.public_url + "/foo/empty?t=json"))
2213 d.addCallback(self.failUnlessIsEmptyJSON)
2216 def test_POST_rename_file_no_replace_field(self):
2217 # rename a file and replace a directory with it
2218 d = self.POST(self.public_url + "/foo", t="rename", replace="false",
2219 from_name="bar.txt", to_name='empty')
2220 d.addBoth(self.shouldFail, error.Error,
2221 "POST_rename_file_no_replace_field",
2223 "There was already a child by that name, and you asked me "
2224 "to not replace it")
2225 d.addCallback(lambda res: self.GET(self.public_url + "/foo/empty?t=json"))
2226 d.addCallback(self.failUnlessIsEmptyJSON)
2229 def failUnlessIsEmptyJSON(self, res):
2230 data = simplejson.loads(res)
2231 self.failUnlessEqual(data[0], "dirnode", data)
2232 self.failUnlessEqual(len(data[1]["children"]), 0)
2234 def test_POST_rename_file_slash_fail(self):
2235 d = self.POST(self.public_url + "/foo", t="rename",
2236 from_name="bar.txt", to_name='kirk/spock.txt')
2237 d.addBoth(self.shouldFail, error.Error,
2238 "test_POST_rename_file_slash_fail",
2240 "to_name= may not contain a slash",
2242 d.addCallback(lambda res:
2243 self.failUnlessNodeHasChild(self._foo_node, u"bar.txt"))
2246 def test_POST_rename_dir(self):
2247 d = self.POST(self.public_url, t="rename",
2248 from_name="foo", to_name='plunk')
2249 d.addCallback(lambda res:
2250 self.failIfNodeHasChild(self.public_root, u"foo"))
2251 d.addCallback(lambda res:
2252 self.failUnlessNodeHasChild(self.public_root, u"plunk"))
2253 d.addCallback(lambda res: self.GET(self.public_url + "/plunk?t=json"))
2254 d.addCallback(self.failUnlessIsFooJSON)
2257 def shouldRedirect(self, res, target=None, statuscode=None, which=""):
2258 """ If target is not None then the redirection has to go to target. If
2259 statuscode is not None then the redirection has to be accomplished with
2260 that HTTP status code."""
2261 if not isinstance(res, failure.Failure):
2262 to_where = (target is None) and "somewhere" or ("to " + target)
2263 self.fail("%s: we were expecting to get redirected %s, not get an"
2264 " actual page: %s" % (which, to_where, res))
2265 res.trap(error.PageRedirect)
2266 if statuscode is not None:
2267 self.failUnlessEqual(res.value.status, statuscode,
2268 "%s: not a redirect" % which)
2269 if target is not None:
2270 # the PageRedirect does not seem to capture the uri= query arg
2271 # properly, so we can't check for it.
2272 realtarget = self.webish_url + target
2273 self.failUnlessEqual(res.value.location, realtarget,
2274 "%s: wrong target" % which)
2275 return res.value.location
2277 def test_GET_URI_form(self):
2278 base = "/uri?uri=%s" % self._bar_txt_uri
2279 # this is supposed to give us a redirect to /uri/$URI, plus arguments
2280 targetbase = "/uri/%s" % urllib.quote(self._bar_txt_uri)
2282 d.addBoth(self.shouldRedirect, targetbase)
2283 d.addCallback(lambda res: self.GET(base+"&filename=bar.txt"))
2284 d.addBoth(self.shouldRedirect, targetbase+"?filename=bar.txt")
2285 d.addCallback(lambda res: self.GET(base+"&t=json"))
2286 d.addBoth(self.shouldRedirect, targetbase+"?t=json")
2287 d.addCallback(self.log, "about to get file by uri")
2288 d.addCallback(lambda res: self.GET(base, followRedirect=True))
2289 d.addCallback(self.failUnlessIsBarDotTxt)
2290 d.addCallback(self.log, "got file by uri, about to get dir by uri")
2291 d.addCallback(lambda res: self.GET("/uri?uri=%s&t=json" % self._foo_uri,
2292 followRedirect=True))
2293 d.addCallback(self.failUnlessIsFooJSON)
2294 d.addCallback(self.log, "got dir by uri")
2298 def test_GET_URI_form_bad(self):
2299 d = self.shouldFail2(error.Error, "test_GET_URI_form_bad",
2300 "400 Bad Request", "GET /uri requires uri=",
2304 def test_GET_rename_form(self):
2305 d = self.GET(self.public_url + "/foo?t=rename-form&name=bar.txt",
2306 followRedirect=True)
2308 self.failUnless('name="when_done" value="."' in res, res)
2309 self.failUnless(re.search(r'name="from_name" value="bar\.txt"', res))
2310 d.addCallback(_check)
2313 def log(self, res, msg):
2314 #print "MSG: %s RES: %s" % (msg, res)
2318 def test_GET_URI_URL(self):
2319 base = "/uri/%s" % self._bar_txt_uri
2321 d.addCallback(self.failUnlessIsBarDotTxt)
2322 d.addCallback(lambda res: self.GET(base+"?filename=bar.txt"))
2323 d.addCallback(self.failUnlessIsBarDotTxt)
2324 d.addCallback(lambda res: self.GET(base+"?filename=bar.txt&save=true"))
2325 d.addCallback(self.failUnlessIsBarDotTxt)
2328 def test_GET_URI_URL_dir(self):
2329 base = "/uri/%s?t=json" % self._foo_uri
2331 d.addCallback(self.failUnlessIsFooJSON)
2334 def test_GET_URI_URL_missing(self):
2335 base = "/uri/%s" % self._bad_file_uri
2336 d = self.shouldHTTPError("test_GET_URI_URL_missing",
2337 http.GONE, None, "NotEnoughSharesError",
2339 # TODO: how can we exercise both sides of WebDownloadTarget.fail
2340 # here? we must arrange for a download to fail after target.open()
2341 # has been called, and then inspect the response to see that it is
2342 # shorter than we expected.
2345 def test_PUT_DIRURL_uri(self):
2346 d = self.s.create_dirnode()
2348 new_uri = dn.get_uri()
2349 # replace /foo with a new (empty) directory
2350 d = self.PUT(self.public_url + "/foo?t=uri", new_uri)
2351 d.addCallback(lambda res:
2352 self.failUnlessEqual(res.strip(), new_uri))
2353 d.addCallback(lambda res:
2354 self.failUnlessChildURIIs(self.public_root,
2358 d.addCallback(_made_dir)
2361 def test_PUT_DIRURL_uri_noreplace(self):
2362 d = self.s.create_dirnode()
2364 new_uri = dn.get_uri()
2365 # replace /foo with a new (empty) directory, but ask that
2366 # replace=false, so it should fail
2367 d = self.shouldFail2(error.Error, "test_PUT_DIRURL_uri_noreplace",
2368 "409 Conflict", "There was already a child by that name, and you asked me to not replace it",
2370 self.public_url + "/foo?t=uri&replace=false",
2372 d.addCallback(lambda res:
2373 self.failUnlessChildURIIs(self.public_root,
2377 d.addCallback(_made_dir)
2380 def test_PUT_DIRURL_bad_t(self):
2381 d = self.shouldFail2(error.Error, "test_PUT_DIRURL_bad_t",
2382 "400 Bad Request", "PUT to a directory",
2383 self.PUT, self.public_url + "/foo?t=BOGUS", "")
2384 d.addCallback(lambda res:
2385 self.failUnlessChildURIIs(self.public_root,
2390 def test_PUT_NEWFILEURL_uri(self):
2391 contents, n, new_uri = self.makefile(8)
2392 d = self.PUT(self.public_url + "/foo/new.txt?t=uri", new_uri)
2393 d.addCallback(lambda res: self.failUnlessEqual(res.strip(), new_uri))
2394 d.addCallback(lambda res:
2395 self.failUnlessChildContentsAre(self._foo_node, u"new.txt",
2399 def test_PUT_NEWFILEURL_uri_replace(self):
2400 contents, n, new_uri = self.makefile(8)
2401 d = self.PUT(self.public_url + "/foo/bar.txt?t=uri", new_uri)
2402 d.addCallback(lambda res: self.failUnlessEqual(res.strip(), new_uri))
2403 d.addCallback(lambda res:
2404 self.failUnlessChildContentsAre(self._foo_node, u"bar.txt",
2408 def test_PUT_NEWFILEURL_uri_no_replace(self):
2409 contents, n, new_uri = self.makefile(8)
2410 d = self.PUT(self.public_url + "/foo/bar.txt?t=uri&replace=false", new_uri)
2411 d.addBoth(self.shouldFail, error.Error, "PUT_NEWFILEURL_uri_no_replace",
2413 "There was already a child by that name, and you asked me "
2414 "to not replace it")
2417 def test_PUT_NEWFILE_URI(self):
2418 file_contents = "New file contents here\n"
2419 d = self.PUT("/uri", file_contents)
2421 assert isinstance(uri, str), uri
2422 self.failUnless(uri in FakeCHKFileNode.all_contents)
2423 self.failUnlessEqual(FakeCHKFileNode.all_contents[uri],
2425 return self.GET("/uri/%s" % uri)
2426 d.addCallback(_check)
2428 self.failUnlessEqual(res, file_contents)
2429 d.addCallback(_check2)
2432 def test_PUT_NEWFILE_URI_not_mutable(self):
2433 file_contents = "New file contents here\n"
2434 d = self.PUT("/uri?mutable=false", file_contents)
2436 assert isinstance(uri, str), uri
2437 self.failUnless(uri in FakeCHKFileNode.all_contents)
2438 self.failUnlessEqual(FakeCHKFileNode.all_contents[uri],
2440 return self.GET("/uri/%s" % uri)
2441 d.addCallback(_check)
2443 self.failUnlessEqual(res, file_contents)
2444 d.addCallback(_check2)
2447 def test_PUT_NEWFILE_URI_only_PUT(self):
2448 d = self.PUT("/uri?t=bogus", "")
2449 d.addBoth(self.shouldFail, error.Error,
2450 "PUT_NEWFILE_URI_only_PUT",
2452 "/uri accepts only PUT, PUT?t=mkdir, POST?t=upload, and POST?t=mkdir")
2455 def test_PUT_NEWFILE_URI_mutable(self):
2456 file_contents = "New file contents here\n"
2457 d = self.PUT("/uri?mutable=true", file_contents)
2458 def _check1(filecap):
2459 filecap = filecap.strip()
2460 self.failUnless(filecap.startswith("URI:SSK:"), filecap)
2461 self.filecap = filecap
2462 u = uri.WriteableSSKFileURI.init_from_string(filecap)
2463 self.failUnless(u.storage_index in FakeMutableFileNode.all_contents)
2464 n = self.s.create_node_from_uri(filecap)
2465 return n.download_best_version()
2466 d.addCallback(_check1)
2468 self.failUnlessEqual(data, file_contents)
2469 return self.GET("/uri/%s" % urllib.quote(self.filecap))
2470 d.addCallback(_check2)
2472 self.failUnlessEqual(res, file_contents)
2473 d.addCallback(_check3)
2476 def test_PUT_mkdir(self):
2477 d = self.PUT("/uri?t=mkdir", "")
2479 n = self.s.create_node_from_uri(uri.strip())
2480 d2 = self.failUnlessNodeKeysAre(n, [])
2481 d2.addCallback(lambda res:
2482 self.GET("/uri/%s?t=json" % uri))
2484 d.addCallback(_check)
2485 d.addCallback(self.failUnlessIsEmptyJSON)
2488 def test_POST_check(self):
2489 d = self.POST(self.public_url + "/foo", t="check", name="bar.txt")
2491 # this returns a string form of the results, which are probably
2492 # None since we're using fake filenodes.
2493 # TODO: verify that the check actually happened, by changing
2494 # FakeCHKFileNode to count how many times .check() has been
2497 d.addCallback(_done)
2500 def test_bad_method(self):
2501 url = self.webish_url + self.public_url + "/foo/bar.txt"
2502 d = self.shouldHTTPError("test_bad_method",
2503 501, "Not Implemented",
2504 "I don't know how to treat a BOGUS request.",
2505 client.getPage, url, method="BOGUS")
2508 def test_short_url(self):
2509 url = self.webish_url + "/uri"
2510 d = self.shouldHTTPError("test_short_url", 501, "Not Implemented",
2511 "I don't know how to treat a DELETE request.",
2512 client.getPage, url, method="DELETE")
2515 def test_ophandle_bad(self):
2516 url = self.webish_url + "/operations/bogus?t=status"
2517 d = self.shouldHTTPError("test_ophandle_bad", 404, "404 Not Found",
2518 "unknown/expired handle 'bogus'",
2519 client.getPage, url)
2522 def test_ophandle_cancel(self):
2523 d = self.POST(self.public_url + "/foo/?t=start-manifest&ophandle=128",
2524 followRedirect=True)
2525 d.addCallback(lambda ignored:
2526 self.GET("/operations/128?t=status&output=JSON"))
2528 data = simplejson.loads(res)
2529 self.failUnless("finished" in data, res)
2530 monitor = self.ws.root.child_operations.handles["128"][0]
2531 d = self.POST("/operations/128?t=cancel&output=JSON")
2533 data = simplejson.loads(res)
2534 self.failUnless("finished" in data, res)
2535 # t=cancel causes the handle to be forgotten
2536 self.failUnless(monitor.is_cancelled())
2537 d.addCallback(_check2)
2539 d.addCallback(_check1)
2540 d.addCallback(lambda ignored:
2541 self.shouldHTTPError("test_ophandle_cancel",
2542 404, "404 Not Found",
2543 "unknown/expired handle '128'",
2545 "/operations/128?t=status&output=JSON"))
2548 def test_ophandle_retainfor(self):
2549 d = self.POST(self.public_url + "/foo/?t=start-manifest&ophandle=129&retain-for=60",
2550 followRedirect=True)
2551 d.addCallback(lambda ignored:
2552 self.GET("/operations/129?t=status&output=JSON&retain-for=0"))
2554 data = simplejson.loads(res)
2555 self.failUnless("finished" in data, res)
2556 d.addCallback(_check1)
2557 # the retain-for=0 will cause the handle to be expired very soon
2558 d.addCallback(self.stall, 2.0)
2559 d.addCallback(lambda ignored:
2560 self.shouldHTTPError("test_ophandle_retainfor",
2561 404, "404 Not Found",
2562 "unknown/expired handle '129'",
2564 "/operations/129?t=status&output=JSON"))
2567 def test_ophandle_release_after_complete(self):
2568 d = self.POST(self.public_url + "/foo/?t=start-manifest&ophandle=130",
2569 followRedirect=True)
2570 d.addCallback(self.wait_for_operation, "130")
2571 d.addCallback(lambda ignored:
2572 self.GET("/operations/130?t=status&output=JSON&release-after-complete=true"))
2573 # the release-after-complete=true will cause the handle to be expired
2574 d.addCallback(lambda ignored:
2575 self.shouldHTTPError("test_ophandle_release_after_complete",
2576 404, "404 Not Found",
2577 "unknown/expired handle '130'",
2579 "/operations/130?t=status&output=JSON"))
2582 def test_incident(self):
2583 d = self.POST("/report_incident", details="eek")
2585 self.failUnless("Thank you for your report!" in res, res)
2586 d.addCallback(_done)
2589 def test_static(self):
2590 webdir = os.path.join(self.staticdir, "subdir")
2591 fileutil.make_dirs(webdir)
2592 f = open(os.path.join(webdir, "hello.txt"), "wb")
2596 d = self.GET("/static/subdir/hello.txt")
2598 self.failUnlessEqual(res, "hello")
2599 d.addCallback(_check)
2603 class Util(unittest.TestCase, ShouldFailMixin):
2604 def test_parse_replace_arg(self):
2605 self.failUnlessEqual(common.parse_replace_arg("true"), True)
2606 self.failUnlessEqual(common.parse_replace_arg("false"), False)
2607 self.failUnlessEqual(common.parse_replace_arg("only-files"),
2609 self.shouldFail(AssertionError, "test_parse_replace_arg", "",
2610 common.parse_replace_arg, "only_fles")
2612 def test_abbreviate_time(self):
2613 self.failUnlessEqual(common.abbreviate_time(None), "")
2614 self.failUnlessEqual(common.abbreviate_time(1.234), "1.23s")
2615 self.failUnlessEqual(common.abbreviate_time(0.123), "123ms")
2616 self.failUnlessEqual(common.abbreviate_time(0.00123), "1.2ms")
2617 self.failUnlessEqual(common.abbreviate_time(0.000123), "123us")
2619 def test_abbreviate_rate(self):
2620 self.failUnlessEqual(common.abbreviate_rate(None), "")
2621 self.failUnlessEqual(common.abbreviate_rate(1234000), "1.23MBps")
2622 self.failUnlessEqual(common.abbreviate_rate(12340), "12.3kBps")
2623 self.failUnlessEqual(common.abbreviate_rate(123), "123Bps")
2625 def test_abbreviate_size(self):
2626 self.failUnlessEqual(common.abbreviate_size(None), "")
2627 self.failUnlessEqual(common.abbreviate_size(1.23*1000*1000*1000), "1.23GB")
2628 self.failUnlessEqual(common.abbreviate_size(1.23*1000*1000), "1.23MB")
2629 self.failUnlessEqual(common.abbreviate_size(1230), "1.2kB")
2630 self.failUnlessEqual(common.abbreviate_size(123), "123B")
2632 def test_plural(self):
2634 return "%d second%s" % (s, status.plural(s))
2635 self.failUnlessEqual(convert(0), "0 seconds")
2636 self.failUnlessEqual(convert(1), "1 second")
2637 self.failUnlessEqual(convert(2), "2 seconds")
2639 return "has share%s: %s" % (status.plural(s), ",".join(s))
2640 self.failUnlessEqual(convert2([]), "has shares: ")
2641 self.failUnlessEqual(convert2(["1"]), "has share: 1")
2642 self.failUnlessEqual(convert2(["1","2"]), "has shares: 1,2")
2645 class Grid(GridTestMixin, WebErrorMixin, unittest.TestCase, ShouldFailMixin):
2647 def CHECK(self, ign, which, args, clientnum=0):
2648 fileurl = self.fileurls[which]
2649 url = fileurl + "?" + args
2650 return self.GET(url, method="POST", clientnum=clientnum)
2652 def test_filecheck(self):
2653 self.basedir = "web/Grid/filecheck"
2655 c0 = self.g.clients[0]
2658 d = c0.upload(upload.Data(DATA, convergence=""))
2659 def _stash_uri(ur, which):
2660 self.uris[which] = ur.uri
2661 d.addCallback(_stash_uri, "good")
2662 d.addCallback(lambda ign:
2663 c0.upload(upload.Data(DATA+"1", convergence="")))
2664 d.addCallback(_stash_uri, "sick")
2665 d.addCallback(lambda ign:
2666 c0.upload(upload.Data(DATA+"2", convergence="")))
2667 d.addCallback(_stash_uri, "dead")
2668 def _stash_mutable_uri(n, which):
2669 self.uris[which] = n.get_uri()
2670 assert isinstance(self.uris[which], str)
2671 d.addCallback(lambda ign: c0.create_mutable_file(DATA+"3"))
2672 d.addCallback(_stash_mutable_uri, "corrupt")
2673 d.addCallback(lambda ign:
2674 c0.upload(upload.Data("literal", convergence="")))
2675 d.addCallback(_stash_uri, "small")
2677 def _compute_fileurls(ignored):
2679 for which in self.uris:
2680 self.fileurls[which] = "uri/" + urllib.quote(self.uris[which])
2681 d.addCallback(_compute_fileurls)
2683 def _clobber_shares(ignored):
2684 good_shares = self.find_shares(self.uris["good"])
2685 self.failUnlessEqual(len(good_shares), 10)
2686 sick_shares = self.find_shares(self.uris["sick"])
2687 os.unlink(sick_shares[0][2])
2688 dead_shares = self.find_shares(self.uris["dead"])
2689 for i in range(1, 10):
2690 os.unlink(dead_shares[i][2])
2691 c_shares = self.find_shares(self.uris["corrupt"])
2692 cso = CorruptShareOptions()
2693 cso.stdout = StringIO()
2694 cso.parseOptions([c_shares[0][2]])
2696 d.addCallback(_clobber_shares)
2698 d.addCallback(self.CHECK, "good", "t=check")
2699 def _got_html_good(res):
2700 self.failUnless("Healthy" in res, res)
2701 self.failIf("Not Healthy" in res, res)
2702 d.addCallback(_got_html_good)
2703 d.addCallback(self.CHECK, "good", "t=check&return_to=somewhere")
2704 def _got_html_good_return_to(res):
2705 self.failUnless("Healthy" in res, res)
2706 self.failIf("Not Healthy" in res, res)
2707 self.failUnless('<a href="somewhere">Return to file'
2709 d.addCallback(_got_html_good_return_to)
2710 d.addCallback(self.CHECK, "good", "t=check&output=json")
2711 def _got_json_good(res):
2712 r = simplejson.loads(res)
2713 self.failUnlessEqual(r["summary"], "Healthy")
2714 self.failUnless(r["results"]["healthy"])
2715 self.failIf(r["results"]["needs-rebalancing"])
2716 self.failUnless(r["results"]["recoverable"])
2717 d.addCallback(_got_json_good)
2719 d.addCallback(self.CHECK, "small", "t=check")
2720 def _got_html_small(res):
2721 self.failUnless("Literal files are always healthy" in res, res)
2722 self.failIf("Not Healthy" in res, res)
2723 d.addCallback(_got_html_small)
2724 d.addCallback(self.CHECK, "small", "t=check&return_to=somewhere")
2725 def _got_html_small_return_to(res):
2726 self.failUnless("Literal files are always healthy" in res, res)
2727 self.failIf("Not Healthy" in res, res)
2728 self.failUnless('<a href="somewhere">Return to file'
2730 d.addCallback(_got_html_small_return_to)
2731 d.addCallback(self.CHECK, "small", "t=check&output=json")
2732 def _got_json_small(res):
2733 r = simplejson.loads(res)
2734 self.failUnlessEqual(r["storage-index"], "")
2735 self.failUnless(r["results"]["healthy"])
2736 d.addCallback(_got_json_small)
2738 d.addCallback(self.CHECK, "sick", "t=check")
2739 def _got_html_sick(res):
2740 self.failUnless("Not Healthy" in res, res)
2741 d.addCallback(_got_html_sick)
2742 d.addCallback(self.CHECK, "sick", "t=check&output=json")
2743 def _got_json_sick(res):
2744 r = simplejson.loads(res)
2745 self.failUnlessEqual(r["summary"],
2746 "Not Healthy: 9 shares (enc 3-of-10)")
2747 self.failIf(r["results"]["healthy"])
2748 self.failIf(r["results"]["needs-rebalancing"])
2749 self.failUnless(r["results"]["recoverable"])
2750 d.addCallback(_got_json_sick)
2752 d.addCallback(self.CHECK, "dead", "t=check")
2753 def _got_html_dead(res):
2754 self.failUnless("Not Healthy" in res, res)
2755 d.addCallback(_got_html_dead)
2756 d.addCallback(self.CHECK, "dead", "t=check&output=json")
2757 def _got_json_dead(res):
2758 r = simplejson.loads(res)
2759 self.failUnlessEqual(r["summary"],
2760 "Not Healthy: 1 shares (enc 3-of-10)")
2761 self.failIf(r["results"]["healthy"])
2762 self.failIf(r["results"]["needs-rebalancing"])
2763 self.failIf(r["results"]["recoverable"])
2764 d.addCallback(_got_json_dead)
2766 d.addCallback(self.CHECK, "corrupt", "t=check&verify=true")
2767 def _got_html_corrupt(res):
2768 self.failUnless("Not Healthy! : Unhealthy" in res, res)
2769 d.addCallback(_got_html_corrupt)
2770 d.addCallback(self.CHECK, "corrupt", "t=check&verify=true&output=json")
2771 def _got_json_corrupt(res):
2772 r = simplejson.loads(res)
2773 self.failUnless("Unhealthy: 9 shares (enc 3-of-10)" in r["summary"],
2775 self.failIf(r["results"]["healthy"])
2776 self.failUnless(r["results"]["recoverable"])
2777 self.failUnlessEqual(r["results"]["count-shares-good"], 9)
2778 self.failUnlessEqual(r["results"]["count-corrupt-shares"], 1)
2779 d.addCallback(_got_json_corrupt)
2781 d.addErrback(self.explain_web_error)
2784 def test_repair_html(self):
2785 self.basedir = "web/Grid/repair_html"
2787 c0 = self.g.clients[0]
2790 d = c0.upload(upload.Data(DATA, convergence=""))
2791 def _stash_uri(ur, which):
2792 self.uris[which] = ur.uri
2793 d.addCallback(_stash_uri, "good")
2794 d.addCallback(lambda ign:
2795 c0.upload(upload.Data(DATA+"1", convergence="")))
2796 d.addCallback(_stash_uri, "sick")
2797 d.addCallback(lambda ign:
2798 c0.upload(upload.Data(DATA+"2", convergence="")))
2799 d.addCallback(_stash_uri, "dead")
2800 def _stash_mutable_uri(n, which):
2801 self.uris[which] = n.get_uri()
2802 assert isinstance(self.uris[which], str)
2803 d.addCallback(lambda ign: c0.create_mutable_file(DATA+"3"))
2804 d.addCallback(_stash_mutable_uri, "corrupt")
2806 def _compute_fileurls(ignored):
2808 for which in self.uris:
2809 self.fileurls[which] = "uri/" + urllib.quote(self.uris[which])
2810 d.addCallback(_compute_fileurls)
2812 def _clobber_shares(ignored):
2813 good_shares = self.find_shares(self.uris["good"])
2814 self.failUnlessEqual(len(good_shares), 10)
2815 sick_shares = self.find_shares(self.uris["sick"])
2816 os.unlink(sick_shares[0][2])
2817 dead_shares = self.find_shares(self.uris["dead"])
2818 for i in range(1, 10):
2819 os.unlink(dead_shares[i][2])
2820 c_shares = self.find_shares(self.uris["corrupt"])
2821 cso = CorruptShareOptions()
2822 cso.stdout = StringIO()
2823 cso.parseOptions([c_shares[0][2]])
2825 d.addCallback(_clobber_shares)
2827 d.addCallback(self.CHECK, "good", "t=check&repair=true")
2828 def _got_html_good(res):
2829 self.failUnless("Healthy" in res, res)
2830 self.failIf("Not Healthy" in res, res)
2831 self.failUnless("No repair necessary" in res, res)
2832 d.addCallback(_got_html_good)
2834 d.addCallback(self.CHECK, "sick", "t=check&repair=true")
2835 def _got_html_sick(res):
2836 self.failUnless("Healthy : healthy" in res, res)
2837 self.failIf("Not Healthy" in res, res)
2838 self.failUnless("Repair successful" in res, res)
2839 d.addCallback(_got_html_sick)
2841 # repair of a dead file will fail, of course, but it isn't yet
2842 # clear how this should be reported. Right now it shows up as
2845 #d.addCallback(self.CHECK, "dead", "t=check&repair=true")
2846 #def _got_html_dead(res):
2848 # self.failUnless("Healthy : healthy" in res, res)
2849 # self.failIf("Not Healthy" in res, res)
2850 # self.failUnless("No repair necessary" in res, res)
2851 #d.addCallback(_got_html_dead)
2853 d.addCallback(self.CHECK, "corrupt", "t=check&verify=true&repair=true")
2854 def _got_html_corrupt(res):
2855 self.failUnless("Healthy : Healthy" in res, res)
2856 self.failIf("Not Healthy" in res, res)
2857 self.failUnless("Repair successful" in res, res)
2858 d.addCallback(_got_html_corrupt)
2860 d.addErrback(self.explain_web_error)
2863 def test_repair_json(self):
2864 self.basedir = "web/Grid/repair_json"
2866 c0 = self.g.clients[0]
2869 d = c0.upload(upload.Data(DATA+"1", convergence=""))
2870 def _stash_uri(ur, which):
2871 self.uris[which] = ur.uri
2872 d.addCallback(_stash_uri, "sick")
2874 def _compute_fileurls(ignored):
2876 for which in self.uris:
2877 self.fileurls[which] = "uri/" + urllib.quote(self.uris[which])
2878 d.addCallback(_compute_fileurls)
2880 def _clobber_shares(ignored):
2881 sick_shares = self.find_shares(self.uris["sick"])
2882 os.unlink(sick_shares[0][2])
2883 d.addCallback(_clobber_shares)
2885 d.addCallback(self.CHECK, "sick", "t=check&repair=true&output=json")
2886 def _got_json_sick(res):
2887 r = simplejson.loads(res)
2888 self.failUnlessEqual(r["repair-attempted"], True)
2889 self.failUnlessEqual(r["repair-successful"], True)
2890 self.failUnlessEqual(r["pre-repair-results"]["summary"],
2891 "Not Healthy: 9 shares (enc 3-of-10)")
2892 self.failIf(r["pre-repair-results"]["results"]["healthy"])
2893 self.failUnlessEqual(r["post-repair-results"]["summary"], "healthy")
2894 self.failUnless(r["post-repair-results"]["results"]["healthy"])
2895 d.addCallback(_got_json_sick)
2897 d.addErrback(self.explain_web_error)
2900 def test_unknown(self):
2901 self.basedir = "web/Grid/unknown"
2903 c0 = self.g.clients[0]
2907 future_writecap = "x-tahoe-crazy://I_am_from_the_future."
2908 future_readcap = "x-tahoe-crazy-readonly://I_am_from_the_future."
2909 # the future cap format may contain slashes, which must be tolerated
2910 expected_info_url = "uri/%s?t=info" % urllib.quote(future_writecap,
2912 future_node = UnknownNode(future_writecap, future_readcap)
2914 d = c0.create_dirnode()
2915 def _stash_root_and_create_file(n):
2917 self.rooturl = "uri/" + urllib.quote(n.get_uri()) + "/"
2918 self.rourl = "uri/" + urllib.quote(n.get_readonly_uri()) + "/"
2919 return self.rootnode.set_node(u"future", future_node)
2920 d.addCallback(_stash_root_and_create_file)
2921 # make sure directory listing tolerates unknown nodes
2922 d.addCallback(lambda ign: self.GET(self.rooturl))
2923 def _check_html(res):
2924 self.failUnlessIn("<td>future</td>", res)
2925 # find the More Info link for "future", should be relative
2926 mo = re.search(r'<a href="([^"]+)">More Info</a>', res)
2927 info_url = mo.group(1)
2928 self.failUnlessEqual(info_url, "future?t=info")
2930 d.addCallback(_check_html)
2931 d.addCallback(lambda ign: self.GET(self.rooturl+"?t=json"))
2932 def _check_json(res, expect_writecap):
2933 data = simplejson.loads(res)
2934 self.failUnlessEqual(data[0], "dirnode")
2935 f = data[1]["children"]["future"]
2936 self.failUnlessEqual(f[0], "unknown")
2938 self.failUnlessEqual(f[1]["rw_uri"], future_writecap)
2940 self.failIfIn("rw_uri", f[1])
2941 self.failUnlessEqual(f[1]["ro_uri"], future_readcap)
2942 self.failUnless("metadata" in f[1])
2943 d.addCallback(_check_json, expect_writecap=True)
2944 d.addCallback(lambda ign: self.GET(expected_info_url))
2945 def _check_info(res, expect_readcap):
2946 self.failUnlessIn("Object Type: <span>unknown</span>", res)
2947 self.failUnlessIn(future_writecap, res)
2949 self.failUnlessIn(future_readcap, res)
2950 self.failIfIn("Raw data as", res)
2951 self.failIfIn("Directory writecap", res)
2952 self.failIfIn("Checker Operations", res)
2953 self.failIfIn("Mutable File Operations", res)
2954 self.failIfIn("Directory Operations", res)
2955 d.addCallback(_check_info, expect_readcap=False)
2956 d.addCallback(lambda ign: self.GET(self.rooturl+"future?t=info"))
2957 d.addCallback(_check_info, expect_readcap=True)
2959 # and make sure that a read-only version of the directory can be
2960 # rendered too. This version will not have future_writecap
2961 d.addCallback(lambda ign: self.GET(self.rourl))
2962 d.addCallback(_check_html)
2963 d.addCallback(lambda ign: self.GET(self.rourl+"?t=json"))
2964 d.addCallback(_check_json, expect_writecap=False)
2967 def test_deep_check(self):
2968 self.basedir = "web/Grid/deep_check"
2970 c0 = self.g.clients[0]
2974 d = c0.create_dirnode()
2975 def _stash_root_and_create_file(n):
2977 self.fileurls["root"] = "uri/" + urllib.quote(n.get_uri()) + "/"
2978 return n.add_file(u"good", upload.Data(DATA, convergence=""))
2979 d.addCallback(_stash_root_and_create_file)
2980 def _stash_uri(fn, which):
2981 self.uris[which] = fn.get_uri()
2983 d.addCallback(_stash_uri, "good")
2984 d.addCallback(lambda ign:
2985 self.rootnode.add_file(u"small",
2986 upload.Data("literal",
2988 d.addCallback(_stash_uri, "small")
2989 d.addCallback(lambda ign:
2990 self.rootnode.add_file(u"sick",
2991 upload.Data(DATA+"1",
2993 d.addCallback(_stash_uri, "sick")
2995 # this tests that deep-check and stream-manifest will ignore
2996 # UnknownNode instances. Hopefully this will also cover deep-stats.
2997 future_writecap = "x-tahoe-crazy://I_am_from_the_future."
2998 future_readcap = "x-tahoe-crazy-readonly://I_am_from_the_future."
2999 future_node = UnknownNode(future_writecap, future_readcap)
3000 d.addCallback(lambda ign: self.rootnode.set_node(u"future",future_node))
3002 def _clobber_shares(ignored):
3003 self.delete_shares_numbered(self.uris["sick"], [0,1])
3004 d.addCallback(_clobber_shares)
3012 d.addCallback(self.CHECK, "root", "t=stream-deep-check")
3015 units = [simplejson.loads(line)
3016 for line in res.splitlines()
3019 print "response is:", res
3020 print "undecodeable line was '%s'" % line
3022 self.failUnlessEqual(len(units), 5+1)
3023 # should be parent-first
3025 self.failUnlessEqual(u0["path"], [])
3026 self.failUnlessEqual(u0["type"], "directory")
3027 self.failUnlessEqual(u0["cap"], self.rootnode.get_uri())
3028 u0cr = u0["check-results"]
3029 self.failUnlessEqual(u0cr["results"]["count-shares-good"], 10)
3031 ugood = [u for u in units
3032 if u["type"] == "file" and u["path"] == [u"good"]][0]
3033 self.failUnlessEqual(ugood["cap"], self.uris["good"])
3034 ugoodcr = ugood["check-results"]
3035 self.failUnlessEqual(ugoodcr["results"]["count-shares-good"], 10)
3038 self.failUnlessEqual(stats["type"], "stats")
3040 self.failUnlessEqual(s["count-immutable-files"], 2)
3041 self.failUnlessEqual(s["count-literal-files"], 1)
3042 self.failUnlessEqual(s["count-directories"], 1)
3043 self.failUnlessEqual(s["count-unknown"], 1)
3044 d.addCallback(_done)
3046 d.addCallback(self.CHECK, "root", "t=stream-manifest")
3047 def _check_manifest(res):
3048 self.failUnless(res.endswith("\n"))
3049 units = [simplejson.loads(t) for t in res[:-1].split("\n")]
3050 self.failUnlessEqual(len(units), 5+1)
3051 self.failUnlessEqual(units[-1]["type"], "stats")
3053 self.failUnlessEqual(first["path"], [])
3054 self.failUnlessEqual(first["cap"], self.rootnode.get_uri())
3055 self.failUnlessEqual(first["type"], "directory")
3056 stats = units[-1]["stats"]
3057 self.failUnlessEqual(stats["count-immutable-files"], 2)
3058 self.failUnlessEqual(stats["count-literal-files"], 1)
3059 self.failUnlessEqual(stats["count-mutable-files"], 0)
3060 self.failUnlessEqual(stats["count-immutable-files"], 2)
3061 self.failUnlessEqual(stats["count-unknown"], 1)
3062 d.addCallback(_check_manifest)
3064 # now add root/subdir and root/subdir/grandchild, then make subdir
3065 # unrecoverable, then see what happens
3067 d.addCallback(lambda ign:
3068 self.rootnode.create_subdirectory(u"subdir"))
3069 d.addCallback(_stash_uri, "subdir")
3070 d.addCallback(lambda subdir_node:
3071 subdir_node.add_file(u"grandchild",
3072 upload.Data(DATA+"2",
3074 d.addCallback(_stash_uri, "grandchild")
3076 d.addCallback(lambda ign:
3077 self.delete_shares_numbered(self.uris["subdir"],
3085 # root/subdir [unrecoverable]
3086 # root/subdir/grandchild
3088 # how should a streaming-JSON API indicate fatal error?
3089 # answer: emit ERROR: instead of a JSON string
3091 d.addCallback(self.CHECK, "root", "t=stream-manifest")
3092 def _check_broken_manifest(res):
3093 lines = res.splitlines()
3095 for (i,line) in enumerate(lines)
3096 if line.startswith("ERROR:")]
3098 self.fail("no ERROR: in output: %s" % (res,))
3099 first_error = error_lines[0]
3100 error_line = lines[first_error]
3101 error_msg = lines[first_error+1:]
3102 error_msg_s = "\n".join(error_msg) + "\n"
3103 self.failUnlessIn("ERROR: UnrecoverableFileError(no recoverable versions)",
3105 self.failUnless(len(error_msg) > 2, error_msg_s) # some traceback
3106 units = [simplejson.loads(line) for line in lines[:first_error]]
3107 self.failUnlessEqual(len(units), 6) # includes subdir
3108 last_unit = units[-1]
3109 self.failUnlessEqual(last_unit["path"], ["subdir"])
3110 d.addCallback(_check_broken_manifest)
3112 d.addCallback(self.CHECK, "root", "t=stream-deep-check")
3113 def _check_broken_deepcheck(res):
3114 lines = res.splitlines()
3116 for (i,line) in enumerate(lines)
3117 if line.startswith("ERROR:")]
3119 self.fail("no ERROR: in output: %s" % (res,))
3120 first_error = error_lines[0]
3121 error_line = lines[first_error]
3122 error_msg = lines[first_error+1:]
3123 error_msg_s = "\n".join(error_msg) + "\n"
3124 self.failUnlessIn("ERROR: UnrecoverableFileError(no recoverable versions)",
3126 self.failUnless(len(error_msg) > 2, error_msg_s) # some traceback
3127 units = [simplejson.loads(line) for line in lines[:first_error]]
3128 self.failUnlessEqual(len(units), 6) # includes subdir
3129 last_unit = units[-1]
3130 self.failUnlessEqual(last_unit["path"], ["subdir"])
3131 r = last_unit["check-results"]["results"]
3132 self.failUnlessEqual(r["count-recoverable-versions"], 0)
3133 self.failUnlessEqual(r["count-shares-good"], 1)
3134 self.failUnlessEqual(r["recoverable"], False)
3135 d.addCallback(_check_broken_deepcheck)
3137 d.addErrback(self.explain_web_error)
3140 def test_deep_check_and_repair(self):
3141 self.basedir = "web/Grid/deep_check_and_repair"
3143 c0 = self.g.clients[0]
3147 d = c0.create_dirnode()
3148 def _stash_root_and_create_file(n):
3150 self.fileurls["root"] = "uri/" + urllib.quote(n.get_uri()) + "/"
3151 return n.add_file(u"good", upload.Data(DATA, convergence=""))
3152 d.addCallback(_stash_root_and_create_file)
3153 def _stash_uri(fn, which):
3154 self.uris[which] = fn.get_uri()
3155 d.addCallback(_stash_uri, "good")
3156 d.addCallback(lambda ign:
3157 self.rootnode.add_file(u"small",
3158 upload.Data("literal",
3160 d.addCallback(_stash_uri, "small")
3161 d.addCallback(lambda ign:
3162 self.rootnode.add_file(u"sick",
3163 upload.Data(DATA+"1",
3165 d.addCallback(_stash_uri, "sick")
3166 #d.addCallback(lambda ign:
3167 # self.rootnode.add_file(u"dead",
3168 # upload.Data(DATA+"2",
3170 #d.addCallback(_stash_uri, "dead")
3172 #d.addCallback(lambda ign: c0.create_mutable_file("mutable"))
3173 #d.addCallback(lambda fn: self.rootnode.set_node(u"corrupt", fn))
3174 #d.addCallback(_stash_uri, "corrupt")
3176 def _clobber_shares(ignored):
3177 good_shares = self.find_shares(self.uris["good"])
3178 self.failUnlessEqual(len(good_shares), 10)
3179 sick_shares = self.find_shares(self.uris["sick"])
3180 os.unlink(sick_shares[0][2])
3181 #dead_shares = self.find_shares(self.uris["dead"])
3182 #for i in range(1, 10):
3183 # os.unlink(dead_shares[i][2])
3185 #c_shares = self.find_shares(self.uris["corrupt"])
3186 #cso = CorruptShareOptions()
3187 #cso.stdout = StringIO()
3188 #cso.parseOptions([c_shares[0][2]])
3190 d.addCallback(_clobber_shares)
3193 # root/good CHK, 10 shares
3195 # root/sick CHK, 9 shares
3197 d.addCallback(self.CHECK, "root", "t=stream-deep-check&repair=true")
3199 units = [simplejson.loads(line)
3200 for line in res.splitlines()
3202 self.failUnlessEqual(len(units), 4+1)
3203 # should be parent-first
3205 self.failUnlessEqual(u0["path"], [])
3206 self.failUnlessEqual(u0["type"], "directory")
3207 self.failUnlessEqual(u0["cap"], self.rootnode.get_uri())
3208 u0crr = u0["check-and-repair-results"]
3209 self.failUnlessEqual(u0crr["repair-attempted"], False)
3210 self.failUnlessEqual(u0crr["pre-repair-results"]["results"]["count-shares-good"], 10)
3212 ugood = [u for u in units
3213 if u["type"] == "file" and u["path"] == [u"good"]][0]
3214 self.failUnlessEqual(ugood["cap"], self.uris["good"])
3215 ugoodcrr = ugood["check-and-repair-results"]
3216 self.failUnlessEqual(u0crr["repair-attempted"], False)
3217 self.failUnlessEqual(u0crr["pre-repair-results"]["results"]["count-shares-good"], 10)
3219 usick = [u for u in units
3220 if u["type"] == "file" and u["path"] == [u"sick"]][0]
3221 self.failUnlessEqual(usick["cap"], self.uris["sick"])
3222 usickcrr = usick["check-and-repair-results"]
3223 self.failUnlessEqual(usickcrr["repair-attempted"], True)
3224 self.failUnlessEqual(usickcrr["repair-successful"], True)
3225 self.failUnlessEqual(usickcrr["pre-repair-results"]["results"]["count-shares-good"], 9)
3226 self.failUnlessEqual(usickcrr["post-repair-results"]["results"]["count-shares-good"], 10)
3229 self.failUnlessEqual(stats["type"], "stats")
3231 self.failUnlessEqual(s["count-immutable-files"], 2)
3232 self.failUnlessEqual(s["count-literal-files"], 1)
3233 self.failUnlessEqual(s["count-directories"], 1)
3234 d.addCallback(_done)
3236 d.addErrback(self.explain_web_error)
3239 def _count_leases(self, ignored, which):
3240 u = self.uris[which]
3241 shares = self.find_shares(u)
3243 for shnum, serverid, fn in shares:
3244 sf = get_share_file(fn)
3245 num_leases = len(list(sf.get_leases()))
3246 lease_counts.append( (fn, num_leases) )
3249 def _assert_leasecount(self, lease_counts, expected):
3250 for (fn, num_leases) in lease_counts:
3251 if num_leases != expected:
3252 self.fail("expected %d leases, have %d, on %s" %
3253 (expected, num_leases, fn))
3255 def test_add_lease(self):
3256 self.basedir = "web/Grid/add_lease"
3257 self.set_up_grid(num_clients=2)
3258 c0 = self.g.clients[0]
3261 d = c0.upload(upload.Data(DATA, convergence=""))
3262 def _stash_uri(ur, which):
3263 self.uris[which] = ur.uri
3264 d.addCallback(_stash_uri, "one")
3265 d.addCallback(lambda ign:
3266 c0.upload(upload.Data(DATA+"1", convergence="")))
3267 d.addCallback(_stash_uri, "two")
3268 def _stash_mutable_uri(n, which):
3269 self.uris[which] = n.get_uri()
3270 assert isinstance(self.uris[which], str)
3271 d.addCallback(lambda ign: c0.create_mutable_file(DATA+"2"))
3272 d.addCallback(_stash_mutable_uri, "mutable")
3274 def _compute_fileurls(ignored):
3276 for which in self.uris:
3277 self.fileurls[which] = "uri/" + urllib.quote(self.uris[which])
3278 d.addCallback(_compute_fileurls)
3280 d.addCallback(self._count_leases, "one")
3281 d.addCallback(self._assert_leasecount, 1)
3282 d.addCallback(self._count_leases, "two")
3283 d.addCallback(self._assert_leasecount, 1)
3284 d.addCallback(self._count_leases, "mutable")
3285 d.addCallback(self._assert_leasecount, 1)
3287 d.addCallback(self.CHECK, "one", "t=check") # no add-lease
3288 def _got_html_good(res):
3289 self.failUnless("Healthy" in res, res)
3290 self.failIf("Not Healthy" in res, res)
3291 d.addCallback(_got_html_good)
3293 d.addCallback(self._count_leases, "one")
3294 d.addCallback(self._assert_leasecount, 1)
3295 d.addCallback(self._count_leases, "two")
3296 d.addCallback(self._assert_leasecount, 1)
3297 d.addCallback(self._count_leases, "mutable")
3298 d.addCallback(self._assert_leasecount, 1)
3300 # this CHECK uses the original client, which uses the same
3301 # lease-secrets, so it will just renew the original lease
3302 d.addCallback(self.CHECK, "one", "t=check&add-lease=true")
3303 d.addCallback(_got_html_good)
3305 d.addCallback(self._count_leases, "one")
3306 d.addCallback(self._assert_leasecount, 1)
3307 d.addCallback(self._count_leases, "two")
3308 d.addCallback(self._assert_leasecount, 1)
3309 d.addCallback(self._count_leases, "mutable")
3310 d.addCallback(self._assert_leasecount, 1)
3312 # this CHECK uses an alternate client, which adds a second lease
3313 d.addCallback(self.CHECK, "one", "t=check&add-lease=true", clientnum=1)
3314 d.addCallback(_got_html_good)
3316 d.addCallback(self._count_leases, "one")
3317 d.addCallback(self._assert_leasecount, 2)
3318 d.addCallback(self._count_leases, "two")
3319 d.addCallback(self._assert_leasecount, 1)
3320 d.addCallback(self._count_leases, "mutable")
3321 d.addCallback(self._assert_leasecount, 1)
3323 d.addCallback(self.CHECK, "mutable", "t=check&add-lease=true")
3324 d.addCallback(_got_html_good)
3326 d.addCallback(self._count_leases, "one")
3327 d.addCallback(self._assert_leasecount, 2)
3328 d.addCallback(self._count_leases, "two")
3329 d.addCallback(self._assert_leasecount, 1)
3330 d.addCallback(self._count_leases, "mutable")
3331 d.addCallback(self._assert_leasecount, 1)
3333 d.addCallback(self.CHECK, "mutable", "t=check&add-lease=true",
3335 d.addCallback(_got_html_good)
3337 d.addCallback(self._count_leases, "one")
3338 d.addCallback(self._assert_leasecount, 2)
3339 d.addCallback(self._count_leases, "two")
3340 d.addCallback(self._assert_leasecount, 1)
3341 d.addCallback(self._count_leases, "mutable")
3342 d.addCallback(self._assert_leasecount, 2)
3344 d.addErrback(self.explain_web_error)
3347 def test_deep_add_lease(self):
3348 self.basedir = "web/Grid/deep_add_lease"
3349 self.set_up_grid(num_clients=2)
3350 c0 = self.g.clients[0]
3354 d = c0.create_dirnode()
3355 def _stash_root_and_create_file(n):
3357 self.uris["root"] = n.get_uri()
3358 self.fileurls["root"] = "uri/" + urllib.quote(n.get_uri()) + "/"
3359 return n.add_file(u"one", upload.Data(DATA, convergence=""))
3360 d.addCallback(_stash_root_and_create_file)
3361 def _stash_uri(fn, which):
3362 self.uris[which] = fn.get_uri()
3363 d.addCallback(_stash_uri, "one")
3364 d.addCallback(lambda ign:
3365 self.rootnode.add_file(u"small",
3366 upload.Data("literal",
3368 d.addCallback(_stash_uri, "small")
3370 d.addCallback(lambda ign: c0.create_mutable_file("mutable"))
3371 d.addCallback(lambda fn: self.rootnode.set_node(u"mutable", fn))
3372 d.addCallback(_stash_uri, "mutable")
3374 d.addCallback(self.CHECK, "root", "t=stream-deep-check") # no add-lease
3376 units = [simplejson.loads(line)
3377 for line in res.splitlines()
3379 # root, one, small, mutable, stats
3380 self.failUnlessEqual(len(units), 4+1)
3381 d.addCallback(_done)
3383 d.addCallback(self._count_leases, "root")
3384 d.addCallback(self._assert_leasecount, 1)
3385 d.addCallback(self._count_leases, "one")
3386 d.addCallback(self._assert_leasecount, 1)
3387 d.addCallback(self._count_leases, "mutable")
3388 d.addCallback(self._assert_leasecount, 1)
3390 d.addCallback(self.CHECK, "root", "t=stream-deep-check&add-lease=true")
3391 d.addCallback(_done)
3393 d.addCallback(self._count_leases, "root")
3394 d.addCallback(self._assert_leasecount, 1)
3395 d.addCallback(self._count_leases, "one")
3396 d.addCallback(self._assert_leasecount, 1)
3397 d.addCallback(self._count_leases, "mutable")
3398 d.addCallback(self._assert_leasecount, 1)
3400 d.addCallback(self.CHECK, "root", "t=stream-deep-check&add-lease=true",
3402 d.addCallback(_done)
3404 d.addCallback(self._count_leases, "root")
3405 d.addCallback(self._assert_leasecount, 2)
3406 d.addCallback(self._count_leases, "one")
3407 d.addCallback(self._assert_leasecount, 2)
3408 d.addCallback(self._count_leases, "mutable")
3409 d.addCallback(self._assert_leasecount, 2)
3411 d.addErrback(self.explain_web_error)
3415 def test_exceptions(self):
3416 self.basedir = "web/Grid/exceptions"
3417 self.set_up_grid(num_clients=1, num_servers=2)
3418 c0 = self.g.clients[0]
3421 d = c0.create_dirnode()
3423 self.fileurls["root"] = "uri/" + urllib.quote(n.get_uri()) + "/"
3424 self.fileurls["imaginary"] = self.fileurls["root"] + "imaginary"
3426 d.addCallback(_stash_root)
3427 d.addCallback(lambda ign: c0.upload(upload.Data(DATA, convergence="")))
3429 self.fileurls["1share"] = "uri/" + urllib.quote(ur.uri)
3430 self.delete_shares_numbered(ur.uri, range(1,10))
3432 u = uri.from_string(ur.uri)
3433 u.key = testutil.flip_bit(u.key, 0)
3434 baduri = u.to_string()
3435 self.fileurls["0shares"] = "uri/" + urllib.quote(baduri)
3436 d.addCallback(_stash_bad)
3437 d.addCallback(lambda ign: c0.create_dirnode())
3438 def _mangle_dirnode_1share(n):
3440 url = self.fileurls["dir-1share"] = "uri/" + urllib.quote(u) + "/"
3441 self.fileurls["dir-1share-json"] = url + "?t=json"
3442 self.delete_shares_numbered(u, range(1,10))
3443 d.addCallback(_mangle_dirnode_1share)
3444 d.addCallback(lambda ign: c0.create_dirnode())
3445 def _mangle_dirnode_0share(n):
3447 url = self.fileurls["dir-0share"] = "uri/" + urllib.quote(u) + "/"
3448 self.fileurls["dir-0share-json"] = url + "?t=json"
3449 self.delete_shares_numbered(u, range(0,10))
3450 d.addCallback(_mangle_dirnode_0share)
3452 # NotEnoughSharesError should be reported sensibly, with a
3453 # text/plain explanation of the problem, and perhaps some
3454 # information on which shares *could* be found.
3456 d.addCallback(lambda ignored:
3457 self.shouldHTTPError("GET unrecoverable",
3458 410, "Gone", "NoSharesError",
3459 self.GET, self.fileurls["0shares"]))
3460 def _check_zero_shares(body):
3461 self.failIf("<html>" in body, body)
3462 body = " ".join(body.strip().split())
3463 exp = ("NoSharesError: no shares could be found. "
3464 "Zero shares usually indicates a corrupt URI, or that "
3465 "no servers were connected, but it might also indicate "
3466 "severe corruption. You should perform a filecheck on "
3467 "this object to learn more. The full error message is: "
3468 "Failed to get enough shareholders: have 0, need 3")
3469 self.failUnlessEqual(exp, body)
3470 d.addCallback(_check_zero_shares)
3473 d.addCallback(lambda ignored:
3474 self.shouldHTTPError("GET 1share",
3475 410, "Gone", "NotEnoughSharesError",
3476 self.GET, self.fileurls["1share"]))
3477 def _check_one_share(body):
3478 self.failIf("<html>" in body, body)
3479 body = " ".join(body.strip().split())
3480 exp = ("NotEnoughSharesError: This indicates that some "
3481 "servers were unavailable, or that shares have been "
3482 "lost to server departure, hard drive failure, or disk "
3483 "corruption. You should perform a filecheck on "
3484 "this object to learn more. The full error message is:"
3485 " Failed to get enough shareholders: have 1, need 3")
3486 self.failUnlessEqual(exp, body)
3487 d.addCallback(_check_one_share)
3489 d.addCallback(lambda ignored:
3490 self.shouldHTTPError("GET imaginary",
3491 404, "Not Found", None,
3492 self.GET, self.fileurls["imaginary"]))
3493 def _missing_child(body):
3494 self.failUnless("No such child: imaginary" in body, body)
3495 d.addCallback(_missing_child)
3497 d.addCallback(lambda ignored: self.GET(self.fileurls["dir-0share"]))
3498 def _check_0shares_dir_html(body):
3499 self.failUnless("<html>" in body, body)
3500 # we should see the regular page, but without the child table or
3502 body = " ".join(body.strip().split())
3503 self.failUnlessIn('href="?t=info">More info on this directory',
3505 exp = ("UnrecoverableFileError: the directory (or mutable file) "
3506 "could not be retrieved, because there were insufficient "
3507 "good shares. This might indicate that no servers were "
3508 "connected, insufficient servers were connected, the URI "
3509 "was corrupt, or that shares have been lost due to server "
3510 "departure, hard drive failure, or disk corruption. You "
3511 "should perform a filecheck on this object to learn more.")
3512 self.failUnlessIn(exp, body)
3513 self.failUnlessIn("No upload forms: directory is unreadable", body)
3514 d.addCallback(_check_0shares_dir_html)
3516 d.addCallback(lambda ignored: self.GET(self.fileurls["dir-1share"]))
3517 def _check_1shares_dir_html(body):
3518 # at some point, we'll split UnrecoverableFileError into 0-shares
3519 # and some-shares like we did for immutable files (since there
3520 # are different sorts of advice to offer in each case). For now,
3521 # they present the same way.
3522 self.failUnless("<html>" in body, body)
3523 body = " ".join(body.strip().split())
3524 self.failUnlessIn('href="?t=info">More info on this directory',
3526 exp = ("UnrecoverableFileError: the directory (or mutable file) "
3527 "could not be retrieved, because there were insufficient "
3528 "good shares. This might indicate that no servers were "
3529 "connected, insufficient servers were connected, the URI "
3530 "was corrupt, or that shares have been lost due to server "
3531 "departure, hard drive failure, or disk corruption. You "
3532 "should perform a filecheck on this object to learn more.")
3533 self.failUnlessIn(exp, body)
3534 self.failUnlessIn("No upload forms: directory is unreadable", body)
3535 d.addCallback(_check_1shares_dir_html)
3537 d.addCallback(lambda ignored:
3538 self.shouldHTTPError("GET dir-0share-json",
3539 410, "Gone", "UnrecoverableFileError",
3541 self.fileurls["dir-0share-json"]))
3542 def _check_unrecoverable_file(body):
3543 self.failIf("<html>" in body, body)
3544 body = " ".join(body.strip().split())
3545 exp = ("UnrecoverableFileError: the directory (or mutable file) "
3546 "could not be retrieved, because there were insufficient "
3547 "good shares. This might indicate that no servers were "
3548 "connected, insufficient servers were connected, the URI "
3549 "was corrupt, or that shares have been lost due to server "
3550 "departure, hard drive failure, or disk corruption. You "
3551 "should perform a filecheck on this object to learn more.")
3552 self.failUnlessEqual(exp, body)
3553 d.addCallback(_check_unrecoverable_file)
3555 d.addCallback(lambda ignored:
3556 self.shouldHTTPError("GET dir-1share-json",
3557 410, "Gone", "UnrecoverableFileError",
3559 self.fileurls["dir-1share-json"]))
3560 d.addCallback(_check_unrecoverable_file)
3562 d.addCallback(lambda ignored:
3563 self.shouldHTTPError("GET imaginary",
3564 404, "Not Found", None,
3565 self.GET, self.fileurls["imaginary"]))
3567 # attach a webapi child that throws a random error, to test how it
3569 w = c0.getServiceNamed("webish")
3570 w.root.putChild("ERRORBOOM", ErrorBoom())
3572 d.addCallback(lambda ignored:
3573 self.shouldHTTPError("GET errorboom_html",
3574 500, "Internal Server Error", None,
3575 self.GET, "ERRORBOOM"))
3576 def _internal_error_html(body):
3577 # test that a weird exception during a webapi operation with
3578 # Accept:*/* results in a text/html stack trace, while one
3579 # without that Accept: line gets us a text/plain stack trace
3580 self.failUnless("<html>" in body, "expected HTML, not '%s'" % body)
3581 d.addCallback(_internal_error_html)
3583 d.addCallback(lambda ignored:
3584 self.shouldHTTPError("GET errorboom_text",
3585 500, "Internal Server Error", None,
3586 self.GET, "ERRORBOOM",
3587 headers={"accept": ["text/plain"]}))
3588 def _internal_error_text(body):
3589 # test that a weird exception during a webapi operation with
3590 # Accept:*/* results in a text/html stack trace, while one
3591 # without that Accept: line gets us a text/plain stack trace
3592 self.failIf("<html>" in body, body)
3593 self.failUnless(body.startswith("Traceback "), body)
3594 d.addCallback(_internal_error_text)
3596 def _flush_errors(res):
3597 # Trial: please ignore the CompletelyUnhandledError in the logs
3598 self.flushLoggedErrors(CompletelyUnhandledError)
3600 d.addBoth(_flush_errors)
3604 class CompletelyUnhandledError(Exception):
3606 class ErrorBoom(rend.Page):
3607 def beforeRender(self, ctx):
3608 raise CompletelyUnhandledError("whoops")