1 import os.path, re, urllib
3 from StringIO import StringIO
4 from twisted.application import service
5 from twisted.trial import unittest
6 from twisted.internet import defer, reactor
7 from twisted.internet.task import Clock
8 from twisted.web import client, error, http
9 from twisted.python import failure, log
10 from nevow import rend
11 from allmydata import interfaces, uri, webish, dirnode
12 from allmydata.storage.shares import get_share_file
13 from allmydata.storage_client import StorageFarmBroker
14 from allmydata.immutable import upload, download
15 from allmydata.dirnode import DirectoryNode
16 from allmydata.nodemaker import NodeMaker
17 from allmydata.unknown import UnknownNode
18 from allmydata.web import status, common
19 from allmydata.scripts.debug import CorruptShareOptions, corrupt_share
20 from allmydata.util import fileutil, base32
21 from allmydata.util.consumer import download_to_data
22 from allmydata.util.netstring import split_netstring
23 from allmydata.test.common import FakeCHKFileNode, FakeMutableFileNode, \
24 create_chk_filenode, WebErrorMixin, ShouldFailMixin, make_mutable_file_uri
25 from allmydata.interfaces import IMutableFileNode
26 from allmydata.mutable import servermap, publish, retrieve
27 import common_util as testutil
28 from allmydata.test.no_network import GridTestMixin
29 from allmydata.test.common_web import HTTPClientGETFactory, \
31 from allmydata.client import Client, SecretHolder
33 # create a fake uploader/downloader, and a couple of fake dirnodes, then
34 # create a webserver that works against them
36 timeout = 480 # Most of these take longer than 240 seconds on Francois's arm box.
38 class FakeStatsProvider:
40 stats = {'stats': {}, 'counters': {}}
43 class FakeNodeMaker(NodeMaker):
44 def _create_lit(self, cap):
45 return FakeCHKFileNode(cap)
46 def _create_immutable(self, cap):
47 return FakeCHKFileNode(cap)
48 def _create_mutable(self, cap):
49 return FakeMutableFileNode(None, None, None, None).init_from_cap(cap)
50 def create_mutable_file(self, contents="", keysize=None):
51 n = FakeMutableFileNode(None, None, None, None)
52 return n.create(contents)
54 class FakeUploader(service.Service):
56 def upload(self, uploadable, history=None):
57 d = uploadable.get_size()
58 d.addCallback(lambda size: uploadable.read(size))
61 n = create_chk_filenode(data)
62 results = upload.UploadResults()
63 results.uri = n.get_uri()
65 d.addCallback(_got_data)
67 def get_helper_info(self):
71 _all_upload_status = [upload.UploadStatus()]
72 _all_download_status = [download.DownloadStatus()]
73 _all_mapupdate_statuses = [servermap.UpdateStatus()]
74 _all_publish_statuses = [publish.PublishStatus()]
75 _all_retrieve_statuses = [retrieve.RetrieveStatus()]
77 def list_all_upload_statuses(self):
78 return self._all_upload_status
79 def list_all_download_statuses(self):
80 return self._all_download_status
81 def list_all_mapupdate_statuses(self):
82 return self._all_mapupdate_statuses
83 def list_all_publish_statuses(self):
84 return self._all_publish_statuses
85 def list_all_retrieve_statuses(self):
86 return self._all_retrieve_statuses
87 def list_all_helper_statuses(self):
90 class FakeClient(Client):
92 # don't upcall to Client.__init__, since we only want to initialize a
94 service.MultiService.__init__(self)
95 self.nodeid = "fake_nodeid"
96 self.nickname = "fake_nickname"
97 self.introducer_furl = "None"
98 self.stats_provider = FakeStatsProvider()
99 self._secret_holder = SecretHolder("lease secret", "convergence secret")
101 self.convergence = "some random string"
102 self.storage_broker = StorageFarmBroker(None, permute_peers=True)
103 self.introducer_client = None
104 self.history = FakeHistory()
105 self.uploader = FakeUploader()
106 self.uploader.setServiceParent(self)
107 self.nodemaker = FakeNodeMaker(None, self._secret_holder, None,
108 self.uploader, None, None,
111 def startService(self):
112 return service.MultiService.startService(self)
113 def stopService(self):
114 return service.MultiService.stopService(self)
116 MUTABLE_SIZELIMIT = FakeMutableFileNode.MUTABLE_SIZELIMIT
118 class WebMixin(object):
120 self.s = FakeClient()
121 self.s.startService()
122 self.staticdir = self.mktemp()
124 self.ws = webish.WebishServer(self.s, "0", staticdir=self.staticdir,
126 self.ws.setServiceParent(self.s)
127 self.webish_port = port = self.ws.listener._port.getHost().port
128 self.webish_url = "http://localhost:%d" % port
130 l = [ self.s.create_dirnode() for x in range(6) ]
131 d = defer.DeferredList(l)
133 self.public_root = res[0][1]
134 assert interfaces.IDirectoryNode.providedBy(self.public_root), res
135 self.public_url = "/uri/" + self.public_root.get_uri()
136 self.private_root = res[1][1]
140 self._foo_uri = foo.get_uri()
141 self._foo_readonly_uri = foo.get_readonly_uri()
142 self._foo_verifycap = foo.get_verify_cap().to_string()
143 # NOTE: we ignore the deferred on all set_uri() calls, because we
144 # know the fake nodes do these synchronously
145 self.public_root.set_uri(u"foo", foo.get_uri(),
146 foo.get_readonly_uri())
148 self.BAR_CONTENTS, n, self._bar_txt_uri = self.makefile(0)
149 foo.set_uri(u"bar.txt", self._bar_txt_uri, self._bar_txt_uri)
150 self._bar_txt_verifycap = n.get_verify_cap().to_string()
152 foo.set_uri(u"empty", res[3][1].get_uri(),
153 res[3][1].get_readonly_uri())
154 sub_uri = res[4][1].get_uri()
155 self._sub_uri = sub_uri
156 foo.set_uri(u"sub", sub_uri, sub_uri)
157 sub = self.s.create_node_from_uri(sub_uri)
159 _ign, n, blocking_uri = self.makefile(1)
160 foo.set_uri(u"blockingfile", blocking_uri, blocking_uri)
162 unicode_filename = u"n\u00fc.txt" # n u-umlaut . t x t
163 # ok, unicode calls it LATIN SMALL LETTER U WITH DIAERESIS but I
164 # still think of it as an umlaut
165 foo.set_uri(unicode_filename, self._bar_txt_uri, self._bar_txt_uri)
167 _ign, n, baz_file = self.makefile(2)
168 self._baz_file_uri = baz_file
169 sub.set_uri(u"baz.txt", baz_file, baz_file)
171 _ign, n, self._bad_file_uri = self.makefile(3)
172 # this uri should not be downloadable
173 del FakeCHKFileNode.all_contents[self._bad_file_uri]
176 self.public_root.set_uri(u"reedownlee", rodir.get_readonly_uri(),
177 rodir.get_readonly_uri())
178 rodir.set_uri(u"nor", baz_file, baz_file)
183 # public/foo/blockingfile
186 # public/foo/sub/baz.txt
188 # public/reedownlee/nor
189 self.NEWFILE_CONTENTS = "newfile contents\n"
191 return foo.get_metadata_for(u"bar.txt")
193 def _got_metadata(metadata):
194 self._bar_txt_metadata = metadata
195 d.addCallback(_got_metadata)
198 def makefile(self, number):
199 contents = "contents of file %s\n" % number
200 n = create_chk_filenode(contents)
201 return contents, n, n.get_uri()
204 return self.s.stopService()
206 def failUnlessIsBarDotTxt(self, res):
207 self.failUnlessEqual(res, self.BAR_CONTENTS, res)
209 def failUnlessIsBarJSON(self, res):
210 data = simplejson.loads(res)
211 self.failUnless(isinstance(data, list))
212 self.failUnlessEqual(data[0], u"filenode")
213 self.failUnless(isinstance(data[1], dict))
214 self.failIf(data[1]["mutable"])
215 self.failIf("rw_uri" in data[1]) # immutable
216 self.failUnlessEqual(data[1]["ro_uri"], self._bar_txt_uri)
217 self.failUnlessEqual(data[1]["verify_uri"], self._bar_txt_verifycap)
218 self.failUnlessEqual(data[1]["size"], len(self.BAR_CONTENTS))
220 def failUnlessIsFooJSON(self, res):
221 data = simplejson.loads(res)
222 self.failUnless(isinstance(data, list))
223 self.failUnlessEqual(data[0], "dirnode", res)
224 self.failUnless(isinstance(data[1], dict))
225 self.failUnless(data[1]["mutable"])
226 self.failUnless("rw_uri" in data[1]) # mutable
227 self.failUnlessEqual(data[1]["rw_uri"], self._foo_uri)
228 self.failUnlessEqual(data[1]["ro_uri"], self._foo_readonly_uri)
229 self.failUnlessEqual(data[1]["verify_uri"], self._foo_verifycap)
231 kidnames = sorted([unicode(n) for n in data[1]["children"]])
232 self.failUnlessEqual(kidnames,
233 [u"bar.txt", u"blockingfile", u"empty",
234 u"n\u00fc.txt", u"sub"])
235 kids = dict( [(unicode(name),value)
237 in data[1]["children"].iteritems()] )
238 self.failUnlessEqual(kids[u"sub"][0], "dirnode")
239 self.failUnless("metadata" in kids[u"sub"][1])
240 self.failUnless("ctime" in kids[u"sub"][1]["metadata"])
241 self.failUnless("mtime" in kids[u"sub"][1]["metadata"])
242 self.failUnlessEqual(kids[u"bar.txt"][0], "filenode")
243 self.failUnlessEqual(kids[u"bar.txt"][1]["size"], len(self.BAR_CONTENTS))
244 self.failUnlessEqual(kids[u"bar.txt"][1]["ro_uri"], self._bar_txt_uri)
245 self.failUnlessEqual(kids[u"bar.txt"][1]["verify_uri"],
246 self._bar_txt_verifycap)
247 self.failUnlessEqual(kids[u"bar.txt"][1]["metadata"]["ctime"],
248 self._bar_txt_metadata["ctime"])
249 self.failUnlessEqual(kids[u"n\u00fc.txt"][1]["ro_uri"],
252 def GET(self, urlpath, followRedirect=False, return_response=False,
254 # if return_response=True, this fires with (data, statuscode,
255 # respheaders) instead of just data.
256 assert not isinstance(urlpath, unicode)
257 url = self.webish_url + urlpath
258 factory = HTTPClientGETFactory(url, method="GET",
259 followRedirect=followRedirect, **kwargs)
260 reactor.connectTCP("localhost", self.webish_port, factory)
263 return (data, factory.status, factory.response_headers)
265 d.addCallback(_got_data)
266 return factory.deferred
268 def HEAD(self, urlpath, return_response=False, **kwargs):
269 # this requires some surgery, because twisted.web.client doesn't want
270 # to give us back the response headers.
271 factory = HTTPClientHEADFactory(urlpath, method="HEAD", **kwargs)
272 reactor.connectTCP("localhost", self.webish_port, factory)
275 return (data, factory.status, factory.response_headers)
277 d.addCallback(_got_data)
278 return factory.deferred
280 def PUT(self, urlpath, data, **kwargs):
281 url = self.webish_url + urlpath
282 return client.getPage(url, method="PUT", postdata=data, **kwargs)
284 def DELETE(self, urlpath):
285 url = self.webish_url + urlpath
286 return client.getPage(url, method="DELETE")
288 def POST(self, urlpath, followRedirect=False, **fields):
289 sepbase = "boogabooga"
293 form.append('Content-Disposition: form-data; name="_charset"')
297 for name, value in fields.iteritems():
298 if isinstance(value, tuple):
299 filename, value = value
300 form.append('Content-Disposition: form-data; name="%s"; '
301 'filename="%s"' % (name, filename.encode("utf-8")))
303 form.append('Content-Disposition: form-data; name="%s"' % name)
305 if isinstance(value, unicode):
306 value = value.encode("utf-8")
309 assert isinstance(value, str)
316 body = "\r\n".join(form) + "\r\n"
317 headers["content-type"] = "multipart/form-data; boundary=%s" % sepbase
318 return self.POST2(urlpath, body, headers, followRedirect)
320 def POST2(self, urlpath, body="", headers={}, followRedirect=False):
321 url = self.webish_url + urlpath
322 return client.getPage(url, method="POST", postdata=body,
323 headers=headers, followRedirect=followRedirect)
325 def shouldFail(self, res, expected_failure, which,
326 substring=None, response_substring=None):
327 if isinstance(res, failure.Failure):
328 res.trap(expected_failure)
330 self.failUnless(substring in str(res),
331 "substring '%s' not in '%s'"
332 % (substring, str(res)))
333 if response_substring:
334 self.failUnless(response_substring in res.value.response,
335 "response substring '%s' not in '%s'"
336 % (response_substring, res.value.response))
338 self.fail("%s was supposed to raise %s, not get '%s'" %
339 (which, expected_failure, res))
341 def shouldFail2(self, expected_failure, which, substring,
343 callable, *args, **kwargs):
344 assert substring is None or isinstance(substring, str)
345 assert response_substring is None or isinstance(response_substring, str)
346 d = defer.maybeDeferred(callable, *args, **kwargs)
348 if isinstance(res, failure.Failure):
349 res.trap(expected_failure)
351 self.failUnless(substring in str(res),
352 "%s: substring '%s' not in '%s'"
353 % (which, substring, str(res)))
354 if response_substring:
355 self.failUnless(response_substring in res.value.response,
356 "%s: response substring '%s' not in '%s'"
358 response_substring, res.value.response))
360 self.fail("%s was supposed to raise %s, not get '%s'" %
361 (which, expected_failure, res))
365 def should404(self, res, which):
366 if isinstance(res, failure.Failure):
367 res.trap(error.Error)
368 self.failUnlessEqual(res.value.status, "404")
370 self.fail("%s was supposed to Error(404), not get '%s'" %
373 def should302(self, res, which):
374 if isinstance(res, failure.Failure):
375 res.trap(error.Error)
376 self.failUnlessEqual(res.value.status, "302")
378 self.fail("%s was supposed to Error(302), not get '%s'" %
382 class Web(WebMixin, WebErrorMixin, testutil.StallMixin, unittest.TestCase):
383 def test_create(self):
386 def test_welcome(self):
389 self.failUnless('Welcome To Tahoe-LAFS' in res, res)
391 self.s.basedir = 'web/test_welcome'
392 fileutil.make_dirs("web/test_welcome")
393 fileutil.make_dirs("web/test_welcome/private")
395 d.addCallback(_check)
398 def test_provisioning(self):
399 d = self.GET("/provisioning/")
401 self.failUnless('Tahoe Provisioning Tool' in res)
402 fields = {'filled': True,
403 "num_users": int(50e3),
404 "files_per_user": 1000,
405 "space_per_user": int(1e9),
406 "sharing_ratio": 1.0,
407 "encoding_parameters": "3-of-10-5",
409 "ownership_mode": "A",
410 "download_rate": 100,
415 return self.POST("/provisioning/", **fields)
417 d.addCallback(_check)
419 self.failUnless('Tahoe Provisioning Tool' in res)
420 self.failUnless("Share space consumed: 167.01TB" in res)
422 fields = {'filled': True,
423 "num_users": int(50e6),
424 "files_per_user": 1000,
425 "space_per_user": int(5e9),
426 "sharing_ratio": 1.0,
427 "encoding_parameters": "25-of-100-50",
428 "num_servers": 30000,
429 "ownership_mode": "E",
430 "drive_failure_model": "U",
432 "download_rate": 1000,
437 return self.POST("/provisioning/", **fields)
438 d.addCallback(_check2)
440 self.failUnless("Share space consumed: huge!" in res)
441 fields = {'filled': True}
442 return self.POST("/provisioning/", **fields)
443 d.addCallback(_check3)
445 self.failUnless("Share space consumed:" in res)
446 d.addCallback(_check4)
449 def test_reliability_tool(self):
451 from allmydata import reliability
452 _hush_pyflakes = reliability
455 raise unittest.SkipTest("reliability tool requires NumPy")
457 d = self.GET("/reliability/")
459 self.failUnless('Tahoe Reliability Tool' in res)
460 fields = {'drive_lifetime': "8Y",
465 "check_period": "1M",
466 "report_period": "3M",
469 return self.POST("/reliability/", **fields)
471 d.addCallback(_check)
473 self.failUnless('Tahoe Reliability Tool' in res)
474 r = r'Probability of loss \(no maintenance\):\s+<span>0.033591'
475 self.failUnless(re.search(r, res), res)
476 d.addCallback(_check2)
479 def test_status(self):
480 h = self.s.get_history()
481 dl_num = h.list_all_download_statuses()[0].get_counter()
482 ul_num = h.list_all_upload_statuses()[0].get_counter()
483 mu_num = h.list_all_mapupdate_statuses()[0].get_counter()
484 pub_num = h.list_all_publish_statuses()[0].get_counter()
485 ret_num = h.list_all_retrieve_statuses()[0].get_counter()
486 d = self.GET("/status", followRedirect=True)
488 self.failUnless('Upload and Download Status' in res, res)
489 self.failUnless('"down-%d"' % dl_num in res, res)
490 self.failUnless('"up-%d"' % ul_num in res, res)
491 self.failUnless('"mapupdate-%d"' % mu_num in res, res)
492 self.failUnless('"publish-%d"' % pub_num in res, res)
493 self.failUnless('"retrieve-%d"' % ret_num in res, res)
494 d.addCallback(_check)
495 d.addCallback(lambda res: self.GET("/status/?t=json"))
496 def _check_json(res):
497 data = simplejson.loads(res)
498 self.failUnless(isinstance(data, dict))
499 #active = data["active"]
500 # TODO: test more. We need a way to fake an active operation
502 d.addCallback(_check_json)
504 d.addCallback(lambda res: self.GET("/status/down-%d" % dl_num))
506 self.failUnless("File Download Status" in res, res)
507 d.addCallback(_check_dl)
508 d.addCallback(lambda res: self.GET("/status/up-%d" % ul_num))
510 self.failUnless("File Upload Status" in res, res)
511 d.addCallback(_check_ul)
512 d.addCallback(lambda res: self.GET("/status/mapupdate-%d" % mu_num))
513 def _check_mapupdate(res):
514 self.failUnless("Mutable File Servermap Update Status" in res, res)
515 d.addCallback(_check_mapupdate)
516 d.addCallback(lambda res: self.GET("/status/publish-%d" % pub_num))
517 def _check_publish(res):
518 self.failUnless("Mutable File Publish Status" in res, res)
519 d.addCallback(_check_publish)
520 d.addCallback(lambda res: self.GET("/status/retrieve-%d" % ret_num))
521 def _check_retrieve(res):
522 self.failUnless("Mutable File Retrieve Status" in res, res)
523 d.addCallback(_check_retrieve)
527 def test_status_numbers(self):
528 drrm = status.DownloadResultsRendererMixin()
529 self.failUnlessEqual(drrm.render_time(None, None), "")
530 self.failUnlessEqual(drrm.render_time(None, 2.5), "2.50s")
531 self.failUnlessEqual(drrm.render_time(None, 0.25), "250ms")
532 self.failUnlessEqual(drrm.render_time(None, 0.0021), "2.1ms")
533 self.failUnlessEqual(drrm.render_time(None, 0.000123), "123us")
534 self.failUnlessEqual(drrm.render_rate(None, None), "")
535 self.failUnlessEqual(drrm.render_rate(None, 2500000), "2.50MBps")
536 self.failUnlessEqual(drrm.render_rate(None, 30100), "30.1kBps")
537 self.failUnlessEqual(drrm.render_rate(None, 123), "123Bps")
539 urrm = status.UploadResultsRendererMixin()
540 self.failUnlessEqual(urrm.render_time(None, None), "")
541 self.failUnlessEqual(urrm.render_time(None, 2.5), "2.50s")
542 self.failUnlessEqual(urrm.render_time(None, 0.25), "250ms")
543 self.failUnlessEqual(urrm.render_time(None, 0.0021), "2.1ms")
544 self.failUnlessEqual(urrm.render_time(None, 0.000123), "123us")
545 self.failUnlessEqual(urrm.render_rate(None, None), "")
546 self.failUnlessEqual(urrm.render_rate(None, 2500000), "2.50MBps")
547 self.failUnlessEqual(urrm.render_rate(None, 30100), "30.1kBps")
548 self.failUnlessEqual(urrm.render_rate(None, 123), "123Bps")
550 def test_GET_FILEURL(self):
551 d = self.GET(self.public_url + "/foo/bar.txt")
552 d.addCallback(self.failUnlessIsBarDotTxt)
555 def test_GET_FILEURL_range(self):
556 headers = {"range": "bytes=1-10"}
557 d = self.GET(self.public_url + "/foo/bar.txt", headers=headers,
558 return_response=True)
559 def _got((res, status, headers)):
560 self.failUnlessEqual(int(status), 206)
561 self.failUnless(headers.has_key("content-range"))
562 self.failUnlessEqual(headers["content-range"][0],
563 "bytes 1-10/%d" % len(self.BAR_CONTENTS))
564 self.failUnlessEqual(res, self.BAR_CONTENTS[1:11])
568 def test_GET_FILEURL_partial_range(self):
569 headers = {"range": "bytes=5-"}
570 length = len(self.BAR_CONTENTS)
571 d = self.GET(self.public_url + "/foo/bar.txt", headers=headers,
572 return_response=True)
573 def _got((res, status, headers)):
574 self.failUnlessEqual(int(status), 206)
575 self.failUnless(headers.has_key("content-range"))
576 self.failUnlessEqual(headers["content-range"][0],
577 "bytes 5-%d/%d" % (length-1, length))
578 self.failUnlessEqual(res, self.BAR_CONTENTS[5:])
582 def test_HEAD_FILEURL_range(self):
583 headers = {"range": "bytes=1-10"}
584 d = self.HEAD(self.public_url + "/foo/bar.txt", headers=headers,
585 return_response=True)
586 def _got((res, status, headers)):
587 self.failUnlessEqual(res, "")
588 self.failUnlessEqual(int(status), 206)
589 self.failUnless(headers.has_key("content-range"))
590 self.failUnlessEqual(headers["content-range"][0],
591 "bytes 1-10/%d" % len(self.BAR_CONTENTS))
595 def test_HEAD_FILEURL_partial_range(self):
596 headers = {"range": "bytes=5-"}
597 length = len(self.BAR_CONTENTS)
598 d = self.HEAD(self.public_url + "/foo/bar.txt", headers=headers,
599 return_response=True)
600 def _got((res, status, headers)):
601 self.failUnlessEqual(int(status), 206)
602 self.failUnless(headers.has_key("content-range"))
603 self.failUnlessEqual(headers["content-range"][0],
604 "bytes 5-%d/%d" % (length-1, length))
608 def test_GET_FILEURL_range_bad(self):
609 headers = {"range": "BOGUS=fizbop-quarnak"}
610 d = self.shouldFail2(error.Error, "test_GET_FILEURL_range_bad",
612 "Syntactically invalid http range header",
613 self.GET, self.public_url + "/foo/bar.txt",
617 def test_HEAD_FILEURL(self):
618 d = self.HEAD(self.public_url + "/foo/bar.txt", return_response=True)
619 def _got((res, status, headers)):
620 self.failUnlessEqual(res, "")
621 self.failUnlessEqual(headers["content-length"][0],
622 str(len(self.BAR_CONTENTS)))
623 self.failUnlessEqual(headers["content-type"], ["text/plain"])
627 def test_GET_FILEURL_named(self):
628 base = "/file/%s" % urllib.quote(self._bar_txt_uri)
629 base2 = "/named/%s" % urllib.quote(self._bar_txt_uri)
630 d = self.GET(base + "/@@name=/blah.txt")
631 d.addCallback(self.failUnlessIsBarDotTxt)
632 d.addCallback(lambda res: self.GET(base + "/blah.txt"))
633 d.addCallback(self.failUnlessIsBarDotTxt)
634 d.addCallback(lambda res: self.GET(base + "/ignore/lots/blah.txt"))
635 d.addCallback(self.failUnlessIsBarDotTxt)
636 d.addCallback(lambda res: self.GET(base2 + "/@@name=/blah.txt"))
637 d.addCallback(self.failUnlessIsBarDotTxt)
638 save_url = base + "?save=true&filename=blah.txt"
639 d.addCallback(lambda res: self.GET(save_url))
640 d.addCallback(self.failUnlessIsBarDotTxt) # TODO: check headers
641 u_filename = u"n\u00e9wer.txt" # n e-acute w e r . t x t
642 u_fn_e = urllib.quote(u_filename.encode("utf-8"))
643 u_url = base + "?save=true&filename=" + u_fn_e
644 d.addCallback(lambda res: self.GET(u_url))
645 d.addCallback(self.failUnlessIsBarDotTxt) # TODO: check headers
648 def test_PUT_FILEURL_named_bad(self):
649 base = "/file/%s" % urllib.quote(self._bar_txt_uri)
650 d = self.shouldFail2(error.Error, "test_PUT_FILEURL_named_bad",
652 "/file can only be used with GET or HEAD",
653 self.PUT, base + "/@@name=/blah.txt", "")
656 def test_GET_DIRURL_named_bad(self):
657 base = "/file/%s" % urllib.quote(self._foo_uri)
658 d = self.shouldFail2(error.Error, "test_PUT_DIRURL_named_bad",
661 self.GET, base + "/@@name=/blah.txt")
664 def test_GET_slash_file_bad(self):
665 d = self.shouldFail2(error.Error, "test_GET_slash_file_bad",
667 "/file must be followed by a file-cap and a name",
671 def test_GET_unhandled_URI_named(self):
672 contents, n, newuri = self.makefile(12)
673 verifier_cap = n.get_verify_cap().to_string()
674 base = "/file/%s" % urllib.quote(verifier_cap)
675 # client.create_node_from_uri() can't handle verify-caps
676 d = self.shouldFail2(error.Error, "GET_unhandled_URI_named",
677 "400 Bad Request", "is not a file-cap",
681 def test_GET_unhandled_URI(self):
682 contents, n, newuri = self.makefile(12)
683 verifier_cap = n.get_verify_cap().to_string()
684 base = "/uri/%s" % urllib.quote(verifier_cap)
685 # client.create_node_from_uri() can't handle verify-caps
686 d = self.shouldFail2(error.Error, "test_GET_unhandled_URI",
688 "GET unknown URI type: can only do t=info",
692 def test_GET_FILE_URI(self):
693 base = "/uri/%s" % urllib.quote(self._bar_txt_uri)
695 d.addCallback(self.failUnlessIsBarDotTxt)
698 def test_GET_FILE_URI_badchild(self):
699 base = "/uri/%s/boguschild" % urllib.quote(self._bar_txt_uri)
700 errmsg = "Files have no children, certainly not named 'boguschild'"
701 d = self.shouldFail2(error.Error, "test_GET_FILE_URI_badchild",
702 "400 Bad Request", errmsg,
706 def test_PUT_FILE_URI_badchild(self):
707 base = "/uri/%s/boguschild" % urllib.quote(self._bar_txt_uri)
708 errmsg = "Cannot create directory 'boguschild', because its parent is a file, not a directory"
709 d = self.shouldFail2(error.Error, "test_GET_FILE_URI_badchild",
710 "400 Bad Request", errmsg,
714 # TODO: version of this with a Unicode filename
715 def test_GET_FILEURL_save(self):
716 d = self.GET(self.public_url + "/foo/bar.txt?filename=bar.txt&save=true",
717 return_response=True)
718 def _got((res, statuscode, headers)):
719 content_disposition = headers["content-disposition"][0]
720 self.failUnless(content_disposition == 'attachment; filename="bar.txt"', content_disposition)
721 self.failUnlessIsBarDotTxt(res)
725 def test_GET_FILEURL_missing(self):
726 d = self.GET(self.public_url + "/foo/missing")
727 d.addBoth(self.should404, "test_GET_FILEURL_missing")
730 def test_PUT_overwrite_only_files(self):
731 # create a directory, put a file in that directory.
732 contents, n, filecap = self.makefile(8)
733 d = self.PUT(self.public_url + "/foo/dir?t=mkdir", "")
734 d.addCallback(lambda res:
735 self.PUT(self.public_url + "/foo/dir/file1.txt",
736 self.NEWFILE_CONTENTS))
737 # try to overwrite the file with replace=only-files
739 d.addCallback(lambda res:
740 self.PUT(self.public_url + "/foo/dir/file1.txt?t=uri&replace=only-files",
742 d.addCallback(lambda res:
743 self.shouldFail2(error.Error, "PUT_bad_t", "409 Conflict",
744 "There was already a child by that name, and you asked me "
746 self.PUT, self.public_url + "/foo/dir?t=uri&replace=only-files",
750 def test_PUT_NEWFILEURL(self):
751 d = self.PUT(self.public_url + "/foo/new.txt", self.NEWFILE_CONTENTS)
752 # TODO: we lose the response code, so we can't check this
753 #self.failUnlessEqual(responsecode, 201)
754 d.addCallback(self.failUnlessURIMatchesROChild, self._foo_node, u"new.txt")
755 d.addCallback(lambda res:
756 self.failUnlessChildContentsAre(self._foo_node, u"new.txt",
757 self.NEWFILE_CONTENTS))
760 def test_PUT_NEWFILEURL_not_mutable(self):
761 d = self.PUT(self.public_url + "/foo/new.txt?mutable=false",
762 self.NEWFILE_CONTENTS)
763 # TODO: we lose the response code, so we can't check this
764 #self.failUnlessEqual(responsecode, 201)
765 d.addCallback(self.failUnlessURIMatchesROChild, self._foo_node, u"new.txt")
766 d.addCallback(lambda res:
767 self.failUnlessChildContentsAre(self._foo_node, u"new.txt",
768 self.NEWFILE_CONTENTS))
771 def test_PUT_NEWFILEURL_range_bad(self):
772 headers = {"content-range": "bytes 1-10/%d" % len(self.NEWFILE_CONTENTS)}
773 target = self.public_url + "/foo/new.txt"
774 d = self.shouldFail2(error.Error, "test_PUT_NEWFILEURL_range_bad",
775 "501 Not Implemented",
776 "Content-Range in PUT not yet supported",
777 # (and certainly not for immutable files)
778 self.PUT, target, self.NEWFILE_CONTENTS[1:11],
780 d.addCallback(lambda res:
781 self.failIfNodeHasChild(self._foo_node, u"new.txt"))
784 def test_PUT_NEWFILEURL_mutable(self):
785 d = self.PUT(self.public_url + "/foo/new.txt?mutable=true",
786 self.NEWFILE_CONTENTS)
787 # TODO: we lose the response code, so we can't check this
788 #self.failUnlessEqual(responsecode, 201)
790 u = uri.from_string_mutable_filenode(res)
791 self.failUnless(u.is_mutable())
792 self.failIf(u.is_readonly())
794 d.addCallback(_check_uri)
795 d.addCallback(self.failUnlessURIMatchesRWChild, self._foo_node, u"new.txt")
796 d.addCallback(lambda res:
797 self.failUnlessMutableChildContentsAre(self._foo_node,
799 self.NEWFILE_CONTENTS))
802 def test_PUT_NEWFILEURL_mutable_toobig(self):
803 d = self.shouldFail2(error.Error, "test_PUT_NEWFILEURL_mutable_toobig",
804 "413 Request Entity Too Large",
805 "SDMF is limited to one segment, and 10001 > 10000",
807 self.public_url + "/foo/new.txt?mutable=true",
808 "b" * (self.s.MUTABLE_SIZELIMIT+1))
811 def test_PUT_NEWFILEURL_replace(self):
812 d = self.PUT(self.public_url + "/foo/bar.txt", self.NEWFILE_CONTENTS)
813 # TODO: we lose the response code, so we can't check this
814 #self.failUnlessEqual(responsecode, 200)
815 d.addCallback(self.failUnlessURIMatchesROChild, self._foo_node, u"bar.txt")
816 d.addCallback(lambda res:
817 self.failUnlessChildContentsAre(self._foo_node, u"bar.txt",
818 self.NEWFILE_CONTENTS))
821 def test_PUT_NEWFILEURL_bad_t(self):
822 d = self.shouldFail2(error.Error, "PUT_bad_t", "400 Bad Request",
823 "PUT to a file: bad t=bogus",
824 self.PUT, self.public_url + "/foo/bar.txt?t=bogus",
828 def test_PUT_NEWFILEURL_no_replace(self):
829 d = self.PUT(self.public_url + "/foo/bar.txt?replace=false",
830 self.NEWFILE_CONTENTS)
831 d.addBoth(self.shouldFail, error.Error, "PUT_NEWFILEURL_no_replace",
833 "There was already a child by that name, and you asked me "
837 def test_PUT_NEWFILEURL_mkdirs(self):
838 d = self.PUT(self.public_url + "/foo/newdir/new.txt", self.NEWFILE_CONTENTS)
840 d.addCallback(self.failUnlessURIMatchesROChild, fn, u"newdir/new.txt")
841 d.addCallback(lambda res: self.failIfNodeHasChild(fn, u"new.txt"))
842 d.addCallback(lambda res: self.failUnlessNodeHasChild(fn, u"newdir"))
843 d.addCallback(lambda res:
844 self.failUnlessChildContentsAre(fn, u"newdir/new.txt",
845 self.NEWFILE_CONTENTS))
848 def test_PUT_NEWFILEURL_blocked(self):
849 d = self.PUT(self.public_url + "/foo/blockingfile/new.txt",
850 self.NEWFILE_CONTENTS)
851 d.addBoth(self.shouldFail, error.Error, "PUT_NEWFILEURL_blocked",
853 "Unable to create directory 'blockingfile': a file was in the way")
856 def test_PUT_NEWFILEURL_emptyname(self):
857 # an empty pathname component (i.e. a double-slash) is disallowed
858 d = self.shouldFail2(error.Error, "test_PUT_NEWFILEURL_emptyname",
860 "The webapi does not allow empty pathname components",
861 self.PUT, self.public_url + "/foo//new.txt", "")
864 def test_DELETE_FILEURL(self):
865 d = self.DELETE(self.public_url + "/foo/bar.txt")
866 d.addCallback(lambda res:
867 self.failIfNodeHasChild(self._foo_node, u"bar.txt"))
870 def test_DELETE_FILEURL_missing(self):
871 d = self.DELETE(self.public_url + "/foo/missing")
872 d.addBoth(self.should404, "test_DELETE_FILEURL_missing")
875 def test_DELETE_FILEURL_missing2(self):
876 d = self.DELETE(self.public_url + "/missing/missing")
877 d.addBoth(self.should404, "test_DELETE_FILEURL_missing2")
880 def failUnlessHasBarDotTxtMetadata(self, res):
881 data = simplejson.loads(res)
882 self.failUnless(isinstance(data, list))
883 self.failUnless(data[1].has_key("metadata"))
884 self.failUnless(data[1]["metadata"].has_key("ctime"))
885 self.failUnless(data[1]["metadata"].has_key("mtime"))
886 self.failUnlessEqual(data[1]["metadata"]["ctime"],
887 self._bar_txt_metadata["ctime"])
889 def test_GET_FILEURL_json(self):
890 # twisted.web.http.parse_qs ignores any query args without an '=', so
891 # I can't do "GET /path?json", I have to do "GET /path/t=json"
892 # instead. This may make it tricky to emulate the S3 interface
894 d = self.GET(self.public_url + "/foo/bar.txt?t=json")
896 self.failUnlessIsBarJSON(data)
897 self.failUnlessHasBarDotTxtMetadata(data)
899 d.addCallback(_check1)
902 def test_GET_FILEURL_json_missing(self):
903 d = self.GET(self.public_url + "/foo/missing?json")
904 d.addBoth(self.should404, "test_GET_FILEURL_json_missing")
907 def test_GET_FILEURL_uri(self):
908 d = self.GET(self.public_url + "/foo/bar.txt?t=uri")
910 self.failUnlessEqual(res, self._bar_txt_uri)
911 d.addCallback(_check)
912 d.addCallback(lambda res:
913 self.GET(self.public_url + "/foo/bar.txt?t=readonly-uri"))
915 # for now, for files, uris and readonly-uris are the same
916 self.failUnlessEqual(res, self._bar_txt_uri)
917 d.addCallback(_check2)
920 def test_GET_FILEURL_badtype(self):
921 d = self.shouldHTTPError("GET t=bogus", 400, "Bad Request",
924 self.public_url + "/foo/bar.txt?t=bogus")
927 def test_GET_FILEURL_uri_missing(self):
928 d = self.GET(self.public_url + "/foo/missing?t=uri")
929 d.addBoth(self.should404, "test_GET_FILEURL_uri_missing")
932 def test_GET_DIRURL(self):
933 # the addSlash means we get a redirect here
934 # from /uri/$URI/foo/ , we need ../../../ to get back to the root
936 d = self.GET(self.public_url + "/foo", followRedirect=True)
938 self.failUnless(('<a href="%s">Return to Welcome page' % ROOT)
940 # the FILE reference points to a URI, but it should end in bar.txt
941 bar_url = ("%s/file/%s/@@named=/bar.txt" %
942 (ROOT, urllib.quote(self._bar_txt_uri)))
943 get_bar = "".join([r'<td>FILE</td>',
945 r'<a href="%s">bar.txt</a>' % bar_url,
947 r'\s+<td>%d</td>' % len(self.BAR_CONTENTS),
949 self.failUnless(re.search(get_bar, res), res)
950 for line in res.split("\n"):
951 # find the line that contains the delete button for bar.txt
952 if ("form action" in line and
953 'value="delete"' in line and
954 'value="bar.txt"' in line):
955 # the form target should use a relative URL
956 foo_url = urllib.quote("%s/uri/%s/" % (ROOT, self._foo_uri))
957 self.failUnless(('action="%s"' % foo_url) in line, line)
958 # and the when_done= should too
959 #done_url = urllib.quote(???)
960 #self.failUnless(('name="when_done" value="%s"' % done_url)
964 self.fail("unable to find delete-bar.txt line", res)
966 # the DIR reference just points to a URI
967 sub_url = ("%s/uri/%s/" % (ROOT, urllib.quote(self._sub_uri)))
968 get_sub = ((r'<td>DIR</td>')
969 +r'\s+<td><a href="%s">sub</a></td>' % sub_url)
970 self.failUnless(re.search(get_sub, res), res)
971 d.addCallback(_check)
973 # look at a readonly directory
974 d.addCallback(lambda res:
975 self.GET(self.public_url + "/reedownlee", followRedirect=True))
977 self.failUnless("(read-only)" in res, res)
978 self.failIf("Upload a file" in res, res)
979 d.addCallback(_check2)
981 # and at a directory that contains a readonly directory
982 d.addCallback(lambda res:
983 self.GET(self.public_url, followRedirect=True))
985 self.failUnless(re.search('<td>DIR-RO</td>'
986 r'\s+<td><a href="[\.\/]+/uri/URI%3ADIR2-RO%3A[^"]+">reedownlee</a></td>', res), res)
987 d.addCallback(_check3)
989 # and an empty directory
990 d.addCallback(lambda res: self.GET(self.public_url + "/foo/empty/"))
992 self.failUnless("directory is empty" in res, res)
993 MKDIR_BUTTON_RE=re.compile('<input type="hidden" name="t" value="mkdir" />.*<legend class="freeform-form-label">Create a new directory in this directory</legend>.*<input type="submit" value="Create" />', re.I)
994 self.failUnless(MKDIR_BUTTON_RE.search(res), res)
995 d.addCallback(_check4)
999 def test_GET_DIRURL_badtype(self):
1000 d = self.shouldHTTPError("test_GET_DIRURL_badtype",
1004 self.public_url + "/foo?t=bogus")
1007 def test_GET_DIRURL_json(self):
1008 d = self.GET(self.public_url + "/foo?t=json")
1009 d.addCallback(self.failUnlessIsFooJSON)
1013 def test_POST_DIRURL_manifest_no_ophandle(self):
1014 d = self.shouldFail2(error.Error,
1015 "test_POST_DIRURL_manifest_no_ophandle",
1017 "slow operation requires ophandle=",
1018 self.POST, self.public_url, t="start-manifest")
1021 def test_POST_DIRURL_manifest(self):
1022 d = defer.succeed(None)
1023 def getman(ignored, output):
1024 d = self.POST(self.public_url + "/foo/?t=start-manifest&ophandle=125",
1025 followRedirect=True)
1026 d.addCallback(self.wait_for_operation, "125")
1027 d.addCallback(self.get_operation_results, "125", output)
1029 d.addCallback(getman, None)
1030 def _got_html(manifest):
1031 self.failUnless("Manifest of SI=" in manifest)
1032 self.failUnless("<td>sub</td>" in manifest)
1033 self.failUnless(self._sub_uri in manifest)
1034 self.failUnless("<td>sub/baz.txt</td>" in manifest)
1035 d.addCallback(_got_html)
1037 # both t=status and unadorned GET should be identical
1038 d.addCallback(lambda res: self.GET("/operations/125"))
1039 d.addCallback(_got_html)
1041 d.addCallback(getman, "html")
1042 d.addCallback(_got_html)
1043 d.addCallback(getman, "text")
1044 def _got_text(manifest):
1045 self.failUnless("\nsub " + self._sub_uri + "\n" in manifest)
1046 self.failUnless("\nsub/baz.txt URI:CHK:" in manifest)
1047 d.addCallback(_got_text)
1048 d.addCallback(getman, "JSON")
1050 data = res["manifest"]
1052 for (path_list, cap) in data:
1053 got[tuple(path_list)] = cap
1054 self.failUnlessEqual(got[(u"sub",)], self._sub_uri)
1055 self.failUnless((u"sub",u"baz.txt") in got)
1056 self.failUnless("finished" in res)
1057 self.failUnless("origin" in res)
1058 self.failUnless("storage-index" in res)
1059 self.failUnless("verifycaps" in res)
1060 self.failUnless("stats" in res)
1061 d.addCallback(_got_json)
1064 def test_POST_DIRURL_deepsize_no_ophandle(self):
1065 d = self.shouldFail2(error.Error,
1066 "test_POST_DIRURL_deepsize_no_ophandle",
1068 "slow operation requires ophandle=",
1069 self.POST, self.public_url, t="start-deep-size")
1072 def test_POST_DIRURL_deepsize(self):
1073 d = self.POST(self.public_url + "/foo/?t=start-deep-size&ophandle=126",
1074 followRedirect=True)
1075 d.addCallback(self.wait_for_operation, "126")
1076 d.addCallback(self.get_operation_results, "126", "json")
1077 def _got_json(data):
1078 self.failUnlessEqual(data["finished"], True)
1080 self.failUnless(size > 1000)
1081 d.addCallback(_got_json)
1082 d.addCallback(self.get_operation_results, "126", "text")
1084 mo = re.search(r'^size: (\d+)$', res, re.M)
1085 self.failUnless(mo, res)
1086 size = int(mo.group(1))
1087 # with directories, the size varies.
1088 self.failUnless(size > 1000)
1089 d.addCallback(_got_text)
1092 def test_POST_DIRURL_deepstats_no_ophandle(self):
1093 d = self.shouldFail2(error.Error,
1094 "test_POST_DIRURL_deepstats_no_ophandle",
1096 "slow operation requires ophandle=",
1097 self.POST, self.public_url, t="start-deep-stats")
1100 def test_POST_DIRURL_deepstats(self):
1101 d = self.POST(self.public_url + "/foo/?t=start-deep-stats&ophandle=127",
1102 followRedirect=True)
1103 d.addCallback(self.wait_for_operation, "127")
1104 d.addCallback(self.get_operation_results, "127", "json")
1105 def _got_json(stats):
1106 expected = {"count-immutable-files": 3,
1107 "count-mutable-files": 0,
1108 "count-literal-files": 0,
1110 "count-directories": 3,
1111 "size-immutable-files": 57,
1112 "size-literal-files": 0,
1113 #"size-directories": 1912, # varies
1114 #"largest-directory": 1590,
1115 "largest-directory-children": 5,
1116 "largest-immutable-file": 19,
1118 for k,v in expected.iteritems():
1119 self.failUnlessEqual(stats[k], v,
1120 "stats[%s] was %s, not %s" %
1122 self.failUnlessEqual(stats["size-files-histogram"],
1124 d.addCallback(_got_json)
1127 def test_POST_DIRURL_stream_manifest(self):
1128 d = self.POST(self.public_url + "/foo/?t=stream-manifest")
1130 self.failUnless(res.endswith("\n"))
1131 units = [simplejson.loads(t) for t in res[:-1].split("\n")]
1132 self.failUnlessEqual(len(units), 7)
1133 self.failUnlessEqual(units[-1]["type"], "stats")
1135 self.failUnlessEqual(first["path"], [])
1136 self.failUnlessEqual(first["cap"], self._foo_uri)
1137 self.failUnlessEqual(first["type"], "directory")
1138 baz = [u for u in units[:-1] if u["cap"] == self._baz_file_uri][0]
1139 self.failUnlessEqual(baz["path"], ["sub", "baz.txt"])
1140 self.failIfEqual(baz["storage-index"], None)
1141 self.failIfEqual(baz["verifycap"], None)
1142 self.failIfEqual(baz["repaircap"], None)
1144 d.addCallback(_check)
1147 def test_GET_DIRURL_uri(self):
1148 d = self.GET(self.public_url + "/foo?t=uri")
1150 self.failUnlessEqual(res, self._foo_uri)
1151 d.addCallback(_check)
1154 def test_GET_DIRURL_readonly_uri(self):
1155 d = self.GET(self.public_url + "/foo?t=readonly-uri")
1157 self.failUnlessEqual(res, self._foo_readonly_uri)
1158 d.addCallback(_check)
1161 def test_PUT_NEWDIRURL(self):
1162 d = self.PUT(self.public_url + "/foo/newdir?t=mkdir", "")
1163 d.addCallback(lambda res:
1164 self.failUnlessNodeHasChild(self._foo_node, u"newdir"))
1165 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
1166 d.addCallback(self.failUnlessNodeKeysAre, [])
1169 def test_POST_NEWDIRURL(self):
1170 d = self.POST2(self.public_url + "/foo/newdir?t=mkdir", "")
1171 d.addCallback(lambda res:
1172 self.failUnlessNodeHasChild(self._foo_node, u"newdir"))
1173 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
1174 d.addCallback(self.failUnlessNodeKeysAre, [])
1177 def test_POST_NEWDIRURL_emptyname(self):
1178 # an empty pathname component (i.e. a double-slash) is disallowed
1179 d = self.shouldFail2(error.Error, "test_POST_NEWDIRURL_emptyname",
1181 "The webapi does not allow empty pathname components, i.e. a double slash",
1182 self.POST, self.public_url + "//?t=mkdir")
1185 def test_POST_NEWDIRURL_initial_children(self):
1186 (newkids, caps) = self._create_initial_children()
1187 d = self.POST2(self.public_url + "/foo/newdir?t=mkdir-with-children",
1188 simplejson.dumps(newkids))
1190 n = self.s.create_node_from_uri(uri.strip())
1191 d2 = self.failUnlessNodeKeysAre(n, newkids.keys())
1192 d2.addCallback(lambda ign:
1193 self.failUnlessROChildURIIs(n, u"child-imm",
1195 d2.addCallback(lambda ign:
1196 self.failUnlessRWChildURIIs(n, u"child-mutable",
1198 d2.addCallback(lambda ign:
1199 self.failUnlessROChildURIIs(n, u"child-mutable-ro",
1201 d2.addCallback(lambda ign:
1202 self.failUnlessROChildURIIs(n, u"unknownchild-ro",
1203 caps['unknown_rocap']))
1204 d2.addCallback(lambda ign:
1205 self.failUnlessRWChildURIIs(n, u"unknownchild-rw",
1206 caps['unknown_rwcap']))
1207 d2.addCallback(lambda ign:
1208 self.failUnlessROChildURIIs(n, u"unknownchild-imm",
1209 caps['unknown_immcap']))
1210 d2.addCallback(lambda ign:
1211 self.failUnlessRWChildURIIs(n, u"dirchild",
1214 d.addCallback(_check)
1215 d.addCallback(lambda res:
1216 self.failUnlessNodeHasChild(self._foo_node, u"newdir"))
1217 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
1218 d.addCallback(self.failUnlessNodeKeysAre, newkids.keys())
1219 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
1220 d.addCallback(self.failUnlessROChildURIIs, u"child-imm", caps['filecap1'])
1223 def test_POST_NEWDIRURL_immutable(self):
1224 (newkids, caps) = self._create_immutable_children()
1225 d = self.POST2(self.public_url + "/foo/newdir?t=mkdir-immutable",
1226 simplejson.dumps(newkids))
1228 n = self.s.create_node_from_uri(uri.strip())
1229 d2 = self.failUnlessNodeKeysAre(n, newkids.keys())
1230 d2.addCallback(lambda ign:
1231 self.failUnlessROChildURIIs(n, u"child-imm",
1233 d2.addCallback(lambda ign:
1234 self.failUnlessROChildURIIs(n, u"unknownchild-imm",
1235 caps['unknown_immcap']))
1236 d2.addCallback(lambda ign:
1237 self.failUnlessROChildURIIs(n, u"dirchild-imm",
1240 d.addCallback(_check)
1241 d.addCallback(lambda res:
1242 self.failUnlessNodeHasChild(self._foo_node, u"newdir"))
1243 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
1244 d.addCallback(self.failUnlessNodeKeysAre, newkids.keys())
1245 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
1246 d.addCallback(self.failUnlessROChildURIIs, u"child-imm", caps['filecap1'])
1247 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
1248 d.addCallback(self.failUnlessROChildURIIs, u"unknownchild-imm", caps['unknown_immcap'])
1249 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
1250 d.addCallback(self.failUnlessROChildURIIs, u"dirchild-imm", caps['immdircap'])
1251 d.addErrback(self.explain_web_error)
1254 def test_POST_NEWDIRURL_immutable_bad(self):
1255 (newkids, caps) = self._create_initial_children()
1256 d = self.shouldFail2(error.Error, "test_POST_NEWDIRURL_immutable_bad",
1258 "needed to be immutable but was not",
1260 self.public_url + "/foo/newdir?t=mkdir-immutable",
1261 simplejson.dumps(newkids))
1264 def test_PUT_NEWDIRURL_exists(self):
1265 d = self.PUT(self.public_url + "/foo/sub?t=mkdir", "")
1266 d.addCallback(lambda res:
1267 self.failUnlessNodeHasChild(self._foo_node, u"sub"))
1268 d.addCallback(lambda res: self._foo_node.get(u"sub"))
1269 d.addCallback(self.failUnlessNodeKeysAre, [u"baz.txt"])
1272 def test_PUT_NEWDIRURL_blocked(self):
1273 d = self.shouldFail2(error.Error, "PUT_NEWDIRURL_blocked",
1274 "409 Conflict", "Unable to create directory 'bar.txt': a file was in the way",
1276 self.public_url + "/foo/bar.txt/sub?t=mkdir", "")
1277 d.addCallback(lambda res:
1278 self.failUnlessNodeHasChild(self._foo_node, u"sub"))
1279 d.addCallback(lambda res: self._foo_node.get(u"sub"))
1280 d.addCallback(self.failUnlessNodeKeysAre, [u"baz.txt"])
1283 def test_PUT_NEWDIRURL_mkdir_p(self):
1284 d = defer.succeed(None)
1285 d.addCallback(lambda res: self.POST(self.public_url + "/foo", t='mkdir', name='mkp'))
1286 d.addCallback(lambda res: self.failUnlessNodeHasChild(self._foo_node, u"mkp"))
1287 d.addCallback(lambda res: self._foo_node.get(u"mkp"))
1288 def mkdir_p(mkpnode):
1289 url = '/uri/%s?t=mkdir-p&path=/sub1/sub2' % urllib.quote(mkpnode.get_uri())
1291 def made_subsub(ssuri):
1292 d = self._foo_node.get_child_at_path(u"mkp/sub1/sub2")
1293 d.addCallback(lambda ssnode: self.failUnlessEqual(ssnode.get_uri(), ssuri))
1295 d.addCallback(lambda uri2: self.failUnlessEqual(uri2, ssuri))
1297 d.addCallback(made_subsub)
1299 d.addCallback(mkdir_p)
1302 def test_PUT_NEWDIRURL_mkdirs(self):
1303 d = self.PUT(self.public_url + "/foo/subdir/newdir?t=mkdir", "")
1304 d.addCallback(lambda res:
1305 self.failIfNodeHasChild(self._foo_node, u"newdir"))
1306 d.addCallback(lambda res:
1307 self.failUnlessNodeHasChild(self._foo_node, u"subdir"))
1308 d.addCallback(lambda res:
1309 self._foo_node.get_child_at_path(u"subdir/newdir"))
1310 d.addCallback(self.failUnlessNodeKeysAre, [])
1313 def test_DELETE_DIRURL(self):
1314 d = self.DELETE(self.public_url + "/foo")
1315 d.addCallback(lambda res:
1316 self.failIfNodeHasChild(self.public_root, u"foo"))
1319 def test_DELETE_DIRURL_missing(self):
1320 d = self.DELETE(self.public_url + "/foo/missing")
1321 d.addBoth(self.should404, "test_DELETE_DIRURL_missing")
1322 d.addCallback(lambda res:
1323 self.failUnlessNodeHasChild(self.public_root, u"foo"))
1326 def test_DELETE_DIRURL_missing2(self):
1327 d = self.DELETE(self.public_url + "/missing")
1328 d.addBoth(self.should404, "test_DELETE_DIRURL_missing2")
1331 def dump_root(self):
1333 w = webish.DirnodeWalkerMixin()
1334 def visitor(childpath, childnode, metadata):
1336 d = w.walk(self.public_root, visitor)
1339 def failUnlessNodeKeysAre(self, node, expected_keys):
1340 for k in expected_keys:
1341 assert isinstance(k, unicode)
1343 def _check(children):
1344 self.failUnlessEqual(sorted(children.keys()), sorted(expected_keys))
1345 d.addCallback(_check)
1347 def failUnlessNodeHasChild(self, node, name):
1348 assert isinstance(name, unicode)
1350 def _check(children):
1351 self.failUnless(name in children)
1352 d.addCallback(_check)
1354 def failIfNodeHasChild(self, node, name):
1355 assert isinstance(name, unicode)
1357 def _check(children):
1358 self.failIf(name in children)
1359 d.addCallback(_check)
1362 def failUnlessChildContentsAre(self, node, name, expected_contents):
1363 assert isinstance(name, unicode)
1364 d = node.get_child_at_path(name)
1365 d.addCallback(lambda node: download_to_data(node))
1366 def _check(contents):
1367 self.failUnlessEqual(contents, expected_contents)
1368 d.addCallback(_check)
1371 def failUnlessMutableChildContentsAre(self, node, name, expected_contents):
1372 assert isinstance(name, unicode)
1373 d = node.get_child_at_path(name)
1374 d.addCallback(lambda node: node.download_best_version())
1375 def _check(contents):
1376 self.failUnlessEqual(contents, expected_contents)
1377 d.addCallback(_check)
1380 def failUnlessRWChildURIIs(self, node, name, expected_uri):
1381 assert isinstance(name, unicode)
1382 d = node.get_child_at_path(name)
1384 self.failUnless(child.is_unknown() or not child.is_readonly())
1385 self.failUnlessEqual(child.get_uri(), expected_uri.strip())
1386 self.failUnlessEqual(child.get_write_uri(), expected_uri.strip())
1387 expected_ro_uri = self._make_readonly(expected_uri)
1389 self.failUnlessEqual(child.get_readonly_uri(), expected_ro_uri.strip())
1390 d.addCallback(_check)
1393 def failUnlessROChildURIIs(self, node, name, expected_uri):
1394 assert isinstance(name, unicode)
1395 d = node.get_child_at_path(name)
1397 self.failUnless(child.is_unknown() or child.is_readonly())
1398 self.failUnlessEqual(child.get_write_uri(), None)
1399 self.failUnlessEqual(child.get_uri(), expected_uri.strip())
1400 self.failUnlessEqual(child.get_readonly_uri(), expected_uri.strip())
1401 d.addCallback(_check)
1404 def failUnlessURIMatchesRWChild(self, got_uri, node, name):
1405 assert isinstance(name, unicode)
1406 d = node.get_child_at_path(name)
1408 self.failUnless(child.is_unknown() or not child.is_readonly())
1409 self.failUnlessEqual(child.get_uri(), got_uri.strip())
1410 self.failUnlessEqual(child.get_write_uri(), got_uri.strip())
1411 expected_ro_uri = self._make_readonly(got_uri)
1413 self.failUnlessEqual(child.get_readonly_uri(), expected_ro_uri.strip())
1414 d.addCallback(_check)
1417 def failUnlessURIMatchesROChild(self, got_uri, node, name):
1418 assert isinstance(name, unicode)
1419 d = node.get_child_at_path(name)
1421 self.failUnless(child.is_unknown() or child.is_readonly())
1422 self.failUnlessEqual(child.get_write_uri(), None)
1423 self.failUnlessEqual(got_uri.strip(), child.get_uri())
1424 self.failUnlessEqual(got_uri.strip(), child.get_readonly_uri())
1425 d.addCallback(_check)
1428 def failUnlessCHKURIHasContents(self, got_uri, contents):
1429 self.failUnless(FakeCHKFileNode.all_contents[got_uri] == contents)
1431 def test_POST_upload(self):
1432 d = self.POST(self.public_url + "/foo", t="upload",
1433 file=("new.txt", self.NEWFILE_CONTENTS))
1435 d.addCallback(self.failUnlessURIMatchesROChild, fn, u"new.txt")
1436 d.addCallback(lambda res:
1437 self.failUnlessChildContentsAre(fn, u"new.txt",
1438 self.NEWFILE_CONTENTS))
1441 def test_POST_upload_unicode(self):
1442 filename = u"n\u00e9wer.txt" # n e-acute w e r . t x t
1443 d = self.POST(self.public_url + "/foo", t="upload",
1444 file=(filename, self.NEWFILE_CONTENTS))
1446 d.addCallback(self.failUnlessURIMatchesROChild, fn, filename)
1447 d.addCallback(lambda res:
1448 self.failUnlessChildContentsAre(fn, filename,
1449 self.NEWFILE_CONTENTS))
1450 target_url = self.public_url + "/foo/" + filename.encode("utf-8")
1451 d.addCallback(lambda res: self.GET(target_url))
1452 d.addCallback(lambda contents: self.failUnlessEqual(contents,
1453 self.NEWFILE_CONTENTS,
1457 def test_POST_upload_unicode_named(self):
1458 filename = u"n\u00e9wer.txt" # n e-acute w e r . t x t
1459 d = self.POST(self.public_url + "/foo", t="upload",
1461 file=("overridden", self.NEWFILE_CONTENTS))
1463 d.addCallback(self.failUnlessURIMatchesROChild, fn, filename)
1464 d.addCallback(lambda res:
1465 self.failUnlessChildContentsAre(fn, filename,
1466 self.NEWFILE_CONTENTS))
1467 target_url = self.public_url + "/foo/" + filename.encode("utf-8")
1468 d.addCallback(lambda res: self.GET(target_url))
1469 d.addCallback(lambda contents: self.failUnlessEqual(contents,
1470 self.NEWFILE_CONTENTS,
1474 def test_POST_upload_no_link(self):
1475 d = self.POST("/uri", t="upload",
1476 file=("new.txt", self.NEWFILE_CONTENTS))
1477 def _check_upload_results(page):
1478 # this should be a page which describes the results of the upload
1479 # that just finished.
1480 self.failUnless("Upload Results:" in page)
1481 self.failUnless("URI:" in page)
1482 uri_re = re.compile("URI: <tt><span>(.*)</span>")
1483 mo = uri_re.search(page)
1484 self.failUnless(mo, page)
1485 new_uri = mo.group(1)
1487 d.addCallback(_check_upload_results)
1488 d.addCallback(self.failUnlessCHKURIHasContents, self.NEWFILE_CONTENTS)
1491 def test_POST_upload_no_link_whendone(self):
1492 d = self.POST("/uri", t="upload", when_done="/",
1493 file=("new.txt", self.NEWFILE_CONTENTS))
1494 d.addBoth(self.shouldRedirect, "/")
1497 def shouldRedirect2(self, which, checker, callable, *args, **kwargs):
1498 d = defer.maybeDeferred(callable, *args, **kwargs)
1500 if isinstance(res, failure.Failure):
1501 res.trap(error.PageRedirect)
1502 statuscode = res.value.status
1503 target = res.value.location
1504 return checker(statuscode, target)
1505 self.fail("%s: callable was supposed to redirect, not return '%s'"
1510 def test_POST_upload_no_link_whendone_results(self):
1511 def check(statuscode, target):
1512 self.failUnlessEqual(statuscode, str(http.FOUND))
1513 self.failUnless(target.startswith(self.webish_url), target)
1514 return client.getPage(target, method="GET")
1515 d = self.shouldRedirect2("test_POST_upload_no_link_whendone_results",
1517 self.POST, "/uri", t="upload",
1518 when_done="/uri/%(uri)s",
1519 file=("new.txt", self.NEWFILE_CONTENTS))
1520 d.addCallback(lambda res:
1521 self.failUnlessEqual(res, self.NEWFILE_CONTENTS))
1524 def test_POST_upload_no_link_mutable(self):
1525 d = self.POST("/uri", t="upload", mutable="true",
1526 file=("new.txt", self.NEWFILE_CONTENTS))
1527 def _check(filecap):
1528 filecap = filecap.strip()
1529 self.failUnless(filecap.startswith("URI:SSK:"), filecap)
1530 self.filecap = filecap
1531 u = uri.WriteableSSKFileURI.init_from_string(filecap)
1532 self.failUnless(u.storage_index in FakeMutableFileNode.all_contents)
1533 n = self.s.create_node_from_uri(filecap)
1534 return n.download_best_version()
1535 d.addCallback(_check)
1537 self.failUnlessEqual(data, self.NEWFILE_CONTENTS)
1538 return self.GET("/uri/%s" % urllib.quote(self.filecap))
1539 d.addCallback(_check2)
1541 self.failUnlessEqual(data, self.NEWFILE_CONTENTS)
1542 return self.GET("/file/%s" % urllib.quote(self.filecap))
1543 d.addCallback(_check3)
1545 self.failUnlessEqual(data, self.NEWFILE_CONTENTS)
1546 d.addCallback(_check4)
1549 def test_POST_upload_no_link_mutable_toobig(self):
1550 d = self.shouldFail2(error.Error,
1551 "test_POST_upload_no_link_mutable_toobig",
1552 "413 Request Entity Too Large",
1553 "SDMF is limited to one segment, and 10001 > 10000",
1555 "/uri", t="upload", mutable="true",
1557 "b" * (self.s.MUTABLE_SIZELIMIT+1)) )
1560 def test_POST_upload_mutable(self):
1561 # this creates a mutable file
1562 d = self.POST(self.public_url + "/foo", t="upload", mutable="true",
1563 file=("new.txt", self.NEWFILE_CONTENTS))
1565 d.addCallback(self.failUnlessURIMatchesRWChild, fn, u"new.txt")
1566 d.addCallback(lambda res:
1567 self.failUnlessMutableChildContentsAre(fn, u"new.txt",
1568 self.NEWFILE_CONTENTS))
1569 d.addCallback(lambda res: self._foo_node.get(u"new.txt"))
1571 self.failUnless(IMutableFileNode.providedBy(newnode))
1572 self.failUnless(newnode.is_mutable())
1573 self.failIf(newnode.is_readonly())
1574 self._mutable_node = newnode
1575 self._mutable_uri = newnode.get_uri()
1578 # now upload it again and make sure that the URI doesn't change
1579 NEWER_CONTENTS = self.NEWFILE_CONTENTS + "newer\n"
1580 d.addCallback(lambda res:
1581 self.POST(self.public_url + "/foo", t="upload",
1583 file=("new.txt", NEWER_CONTENTS)))
1584 d.addCallback(self.failUnlessURIMatchesRWChild, fn, u"new.txt")
1585 d.addCallback(lambda res:
1586 self.failUnlessMutableChildContentsAre(fn, u"new.txt",
1588 d.addCallback(lambda res: self._foo_node.get(u"new.txt"))
1590 self.failUnless(IMutableFileNode.providedBy(newnode))
1591 self.failUnless(newnode.is_mutable())
1592 self.failIf(newnode.is_readonly())
1593 self.failUnlessEqual(self._mutable_uri, newnode.get_uri())
1594 d.addCallback(_got2)
1596 # upload a second time, using PUT instead of POST
1597 NEW2_CONTENTS = NEWER_CONTENTS + "overwrite with PUT\n"
1598 d.addCallback(lambda res:
1599 self.PUT(self.public_url + "/foo/new.txt", NEW2_CONTENTS))
1600 d.addCallback(self.failUnlessURIMatchesRWChild, fn, u"new.txt")
1601 d.addCallback(lambda res:
1602 self.failUnlessMutableChildContentsAre(fn, u"new.txt",
1605 # finally list the directory, since mutable files are displayed
1606 # slightly differently
1608 d.addCallback(lambda res:
1609 self.GET(self.public_url + "/foo/",
1610 followRedirect=True))
1611 def _check_page(res):
1612 # TODO: assert more about the contents
1613 self.failUnless("SSK" in res)
1615 d.addCallback(_check_page)
1617 d.addCallback(lambda res: self._foo_node.get(u"new.txt"))
1619 self.failUnless(IMutableFileNode.providedBy(newnode))
1620 self.failUnless(newnode.is_mutable())
1621 self.failIf(newnode.is_readonly())
1622 self.failUnlessEqual(self._mutable_uri, newnode.get_uri())
1623 d.addCallback(_got3)
1625 # look at the JSON form of the enclosing directory
1626 d.addCallback(lambda res:
1627 self.GET(self.public_url + "/foo/?t=json",
1628 followRedirect=True))
1629 def _check_page_json(res):
1630 parsed = simplejson.loads(res)
1631 self.failUnlessEqual(parsed[0], "dirnode")
1632 children = dict( [(unicode(name),value)
1634 in parsed[1]["children"].iteritems()] )
1635 self.failUnless("new.txt" in children)
1636 new_json = children["new.txt"]
1637 self.failUnlessEqual(new_json[0], "filenode")
1638 self.failUnless(new_json[1]["mutable"])
1639 self.failUnlessEqual(new_json[1]["rw_uri"], self._mutable_uri)
1640 ro_uri = unicode(self._mutable_node.get_readonly().to_string())
1641 self.failUnlessEqual(new_json[1]["ro_uri"], ro_uri)
1642 d.addCallback(_check_page_json)
1644 # and the JSON form of the file
1645 d.addCallback(lambda res:
1646 self.GET(self.public_url + "/foo/new.txt?t=json"))
1647 def _check_file_json(res):
1648 parsed = simplejson.loads(res)
1649 self.failUnlessEqual(parsed[0], "filenode")
1650 self.failUnless(parsed[1]["mutable"])
1651 self.failUnlessEqual(parsed[1]["rw_uri"], self._mutable_uri)
1652 ro_uri = unicode(self._mutable_node.get_readonly().to_string())
1653 self.failUnlessEqual(parsed[1]["ro_uri"], ro_uri)
1654 d.addCallback(_check_file_json)
1656 # and look at t=uri and t=readonly-uri
1657 d.addCallback(lambda res:
1658 self.GET(self.public_url + "/foo/new.txt?t=uri"))
1659 d.addCallback(lambda res: self.failUnlessEqual(res, self._mutable_uri))
1660 d.addCallback(lambda res:
1661 self.GET(self.public_url + "/foo/new.txt?t=readonly-uri"))
1662 def _check_ro_uri(res):
1663 ro_uri = unicode(self._mutable_node.get_readonly().to_string())
1664 self.failUnlessEqual(res, ro_uri)
1665 d.addCallback(_check_ro_uri)
1667 # make sure we can get to it from /uri/URI
1668 d.addCallback(lambda res:
1669 self.GET("/uri/%s" % urllib.quote(self._mutable_uri)))
1670 d.addCallback(lambda res:
1671 self.failUnlessEqual(res, NEW2_CONTENTS))
1673 # and that HEAD computes the size correctly
1674 d.addCallback(lambda res:
1675 self.HEAD(self.public_url + "/foo/new.txt",
1676 return_response=True))
1677 def _got_headers((res, status, headers)):
1678 self.failUnlessEqual(res, "")
1679 self.failUnlessEqual(headers["content-length"][0],
1680 str(len(NEW2_CONTENTS)))
1681 self.failUnlessEqual(headers["content-type"], ["text/plain"])
1682 d.addCallback(_got_headers)
1684 # make sure that size errors are displayed correctly for overwrite
1685 d.addCallback(lambda res:
1686 self.shouldFail2(error.Error,
1687 "test_POST_upload_mutable-toobig",
1688 "413 Request Entity Too Large",
1689 "SDMF is limited to one segment, and 10001 > 10000",
1691 self.public_url + "/foo", t="upload",
1694 "b" * (self.s.MUTABLE_SIZELIMIT+1)),
1697 d.addErrback(self.dump_error)
1700 def test_POST_upload_mutable_toobig(self):
1701 d = self.shouldFail2(error.Error,
1702 "test_POST_upload_mutable_toobig",
1703 "413 Request Entity Too Large",
1704 "SDMF is limited to one segment, and 10001 > 10000",
1706 self.public_url + "/foo",
1707 t="upload", mutable="true",
1709 "b" * (self.s.MUTABLE_SIZELIMIT+1)) )
1712 def dump_error(self, f):
1713 # if the web server returns an error code (like 400 Bad Request),
1714 # web.client.getPage puts the HTTP response body into the .response
1715 # attribute of the exception object that it gives back. It does not
1716 # appear in the Failure's repr(), so the ERROR that trial displays
1717 # will be rather terse and unhelpful. addErrback this method to the
1718 # end of your chain to get more information out of these errors.
1719 if f.check(error.Error):
1720 print "web.error.Error:"
1722 print f.value.response
1725 def test_POST_upload_replace(self):
1726 d = self.POST(self.public_url + "/foo", t="upload",
1727 file=("bar.txt", self.NEWFILE_CONTENTS))
1729 d.addCallback(self.failUnlessURIMatchesROChild, fn, u"bar.txt")
1730 d.addCallback(lambda res:
1731 self.failUnlessChildContentsAre(fn, u"bar.txt",
1732 self.NEWFILE_CONTENTS))
1735 def test_POST_upload_no_replace_ok(self):
1736 d = self.POST(self.public_url + "/foo?replace=false", t="upload",
1737 file=("new.txt", self.NEWFILE_CONTENTS))
1738 d.addCallback(lambda res: self.GET(self.public_url + "/foo/new.txt"))
1739 d.addCallback(lambda res: self.failUnlessEqual(res,
1740 self.NEWFILE_CONTENTS))
1743 def test_POST_upload_no_replace_queryarg(self):
1744 d = self.POST(self.public_url + "/foo?replace=false", t="upload",
1745 file=("bar.txt", self.NEWFILE_CONTENTS))
1746 d.addBoth(self.shouldFail, error.Error,
1747 "POST_upload_no_replace_queryarg",
1749 "There was already a child by that name, and you asked me "
1750 "to not replace it")
1751 d.addCallback(lambda res: self.GET(self.public_url + "/foo/bar.txt"))
1752 d.addCallback(self.failUnlessIsBarDotTxt)
1755 def test_POST_upload_no_replace_field(self):
1756 d = self.POST(self.public_url + "/foo", t="upload", replace="false",
1757 file=("bar.txt", self.NEWFILE_CONTENTS))
1758 d.addBoth(self.shouldFail, error.Error, "POST_upload_no_replace_field",
1760 "There was already a child by that name, and you asked me "
1761 "to not replace it")
1762 d.addCallback(lambda res: self.GET(self.public_url + "/foo/bar.txt"))
1763 d.addCallback(self.failUnlessIsBarDotTxt)
1766 def test_POST_upload_whendone(self):
1767 d = self.POST(self.public_url + "/foo", t="upload", when_done="/THERE",
1768 file=("new.txt", self.NEWFILE_CONTENTS))
1769 d.addBoth(self.shouldRedirect, "/THERE")
1771 d.addCallback(lambda res:
1772 self.failUnlessChildContentsAre(fn, u"new.txt",
1773 self.NEWFILE_CONTENTS))
1776 def test_POST_upload_named(self):
1778 d = self.POST(self.public_url + "/foo", t="upload",
1779 name="new.txt", file=self.NEWFILE_CONTENTS)
1780 d.addCallback(self.failUnlessURIMatchesROChild, fn, u"new.txt")
1781 d.addCallback(lambda res:
1782 self.failUnlessChildContentsAre(fn, u"new.txt",
1783 self.NEWFILE_CONTENTS))
1786 def test_POST_upload_named_badfilename(self):
1787 d = self.POST(self.public_url + "/foo", t="upload",
1788 name="slashes/are/bad.txt", file=self.NEWFILE_CONTENTS)
1789 d.addBoth(self.shouldFail, error.Error,
1790 "test_POST_upload_named_badfilename",
1792 "name= may not contain a slash",
1794 # make sure that nothing was added
1795 d.addCallback(lambda res:
1796 self.failUnlessNodeKeysAre(self._foo_node,
1797 [u"bar.txt", u"blockingfile",
1798 u"empty", u"n\u00fc.txt",
1802 def test_POST_FILEURL_check(self):
1803 bar_url = self.public_url + "/foo/bar.txt"
1804 d = self.POST(bar_url, t="check")
1806 self.failUnless("Healthy :" in res)
1807 d.addCallback(_check)
1808 redir_url = "http://allmydata.org/TARGET"
1809 def _check2(statuscode, target):
1810 self.failUnlessEqual(statuscode, str(http.FOUND))
1811 self.failUnlessEqual(target, redir_url)
1812 d.addCallback(lambda res:
1813 self.shouldRedirect2("test_POST_FILEURL_check",
1817 when_done=redir_url))
1818 d.addCallback(lambda res:
1819 self.POST(bar_url, t="check", return_to=redir_url))
1821 self.failUnless("Healthy :" in res)
1822 self.failUnless("Return to file" in res)
1823 self.failUnless(redir_url in res)
1824 d.addCallback(_check3)
1826 d.addCallback(lambda res:
1827 self.POST(bar_url, t="check", output="JSON"))
1828 def _check_json(res):
1829 data = simplejson.loads(res)
1830 self.failUnless("storage-index" in data)
1831 self.failUnless(data["results"]["healthy"])
1832 d.addCallback(_check_json)
1836 def test_POST_FILEURL_check_and_repair(self):
1837 bar_url = self.public_url + "/foo/bar.txt"
1838 d = self.POST(bar_url, t="check", repair="true")
1840 self.failUnless("Healthy :" in res)
1841 d.addCallback(_check)
1842 redir_url = "http://allmydata.org/TARGET"
1843 def _check2(statuscode, target):
1844 self.failUnlessEqual(statuscode, str(http.FOUND))
1845 self.failUnlessEqual(target, redir_url)
1846 d.addCallback(lambda res:
1847 self.shouldRedirect2("test_POST_FILEURL_check_and_repair",
1850 t="check", repair="true",
1851 when_done=redir_url))
1852 d.addCallback(lambda res:
1853 self.POST(bar_url, t="check", return_to=redir_url))
1855 self.failUnless("Healthy :" in res)
1856 self.failUnless("Return to file" in res)
1857 self.failUnless(redir_url in res)
1858 d.addCallback(_check3)
1861 def test_POST_DIRURL_check(self):
1862 foo_url = self.public_url + "/foo/"
1863 d = self.POST(foo_url, t="check")
1865 self.failUnless("Healthy :" in res, res)
1866 d.addCallback(_check)
1867 redir_url = "http://allmydata.org/TARGET"
1868 def _check2(statuscode, target):
1869 self.failUnlessEqual(statuscode, str(http.FOUND))
1870 self.failUnlessEqual(target, redir_url)
1871 d.addCallback(lambda res:
1872 self.shouldRedirect2("test_POST_DIRURL_check",
1876 when_done=redir_url))
1877 d.addCallback(lambda res:
1878 self.POST(foo_url, t="check", return_to=redir_url))
1880 self.failUnless("Healthy :" in res, res)
1881 self.failUnless("Return to file/directory" in res)
1882 self.failUnless(redir_url in res)
1883 d.addCallback(_check3)
1885 d.addCallback(lambda res:
1886 self.POST(foo_url, t="check", output="JSON"))
1887 def _check_json(res):
1888 data = simplejson.loads(res)
1889 self.failUnless("storage-index" in data)
1890 self.failUnless(data["results"]["healthy"])
1891 d.addCallback(_check_json)
1895 def test_POST_DIRURL_check_and_repair(self):
1896 foo_url = self.public_url + "/foo/"
1897 d = self.POST(foo_url, t="check", repair="true")
1899 self.failUnless("Healthy :" in res, res)
1900 d.addCallback(_check)
1901 redir_url = "http://allmydata.org/TARGET"
1902 def _check2(statuscode, target):
1903 self.failUnlessEqual(statuscode, str(http.FOUND))
1904 self.failUnlessEqual(target, redir_url)
1905 d.addCallback(lambda res:
1906 self.shouldRedirect2("test_POST_DIRURL_check_and_repair",
1909 t="check", repair="true",
1910 when_done=redir_url))
1911 d.addCallback(lambda res:
1912 self.POST(foo_url, t="check", return_to=redir_url))
1914 self.failUnless("Healthy :" in res)
1915 self.failUnless("Return to file/directory" in res)
1916 self.failUnless(redir_url in res)
1917 d.addCallback(_check3)
1920 def wait_for_operation(self, ignored, ophandle):
1921 url = "/operations/" + ophandle
1922 url += "?t=status&output=JSON"
1925 data = simplejson.loads(res)
1926 if not data["finished"]:
1927 d = self.stall(delay=1.0)
1928 d.addCallback(self.wait_for_operation, ophandle)
1934 def get_operation_results(self, ignored, ophandle, output=None):
1935 url = "/operations/" + ophandle
1938 url += "&output=" + output
1941 if output and output.lower() == "json":
1942 return simplejson.loads(res)
1947 def test_POST_DIRURL_deepcheck_no_ophandle(self):
1948 d = self.shouldFail2(error.Error,
1949 "test_POST_DIRURL_deepcheck_no_ophandle",
1951 "slow operation requires ophandle=",
1952 self.POST, self.public_url, t="start-deep-check")
1955 def test_POST_DIRURL_deepcheck(self):
1956 def _check_redirect(statuscode, target):
1957 self.failUnlessEqual(statuscode, str(http.FOUND))
1958 self.failUnless(target.endswith("/operations/123"))
1959 d = self.shouldRedirect2("test_POST_DIRURL_deepcheck", _check_redirect,
1960 self.POST, self.public_url,
1961 t="start-deep-check", ophandle="123")
1962 d.addCallback(self.wait_for_operation, "123")
1963 def _check_json(data):
1964 self.failUnlessEqual(data["finished"], True)
1965 self.failUnlessEqual(data["count-objects-checked"], 8)
1966 self.failUnlessEqual(data["count-objects-healthy"], 8)
1967 d.addCallback(_check_json)
1968 d.addCallback(self.get_operation_results, "123", "html")
1969 def _check_html(res):
1970 self.failUnless("Objects Checked: <span>8</span>" in res)
1971 self.failUnless("Objects Healthy: <span>8</span>" in res)
1972 d.addCallback(_check_html)
1974 d.addCallback(lambda res:
1975 self.GET("/operations/123/"))
1976 d.addCallback(_check_html) # should be the same as without the slash
1978 d.addCallback(lambda res:
1979 self.shouldFail2(error.Error, "one", "404 Not Found",
1980 "No detailed results for SI bogus",
1981 self.GET, "/operations/123/bogus"))
1983 foo_si = self._foo_node.get_storage_index()
1984 foo_si_s = base32.b2a(foo_si)
1985 d.addCallback(lambda res:
1986 self.GET("/operations/123/%s?output=JSON" % foo_si_s))
1987 def _check_foo_json(res):
1988 data = simplejson.loads(res)
1989 self.failUnlessEqual(data["storage-index"], foo_si_s)
1990 self.failUnless(data["results"]["healthy"])
1991 d.addCallback(_check_foo_json)
1994 def test_POST_DIRURL_deepcheck_and_repair(self):
1995 d = self.POST(self.public_url, t="start-deep-check", repair="true",
1996 ophandle="124", output="json", followRedirect=True)
1997 d.addCallback(self.wait_for_operation, "124")
1998 def _check_json(data):
1999 self.failUnlessEqual(data["finished"], True)
2000 self.failUnlessEqual(data["count-objects-checked"], 8)
2001 self.failUnlessEqual(data["count-objects-healthy-pre-repair"], 8)
2002 self.failUnlessEqual(data["count-objects-unhealthy-pre-repair"], 0)
2003 self.failUnlessEqual(data["count-corrupt-shares-pre-repair"], 0)
2004 self.failUnlessEqual(data["count-repairs-attempted"], 0)
2005 self.failUnlessEqual(data["count-repairs-successful"], 0)
2006 self.failUnlessEqual(data["count-repairs-unsuccessful"], 0)
2007 self.failUnlessEqual(data["count-objects-healthy-post-repair"], 8)
2008 self.failUnlessEqual(data["count-objects-unhealthy-post-repair"], 0)
2009 self.failUnlessEqual(data["count-corrupt-shares-post-repair"], 0)
2010 d.addCallback(_check_json)
2011 d.addCallback(self.get_operation_results, "124", "html")
2012 def _check_html(res):
2013 self.failUnless("Objects Checked: <span>8</span>" in res)
2015 self.failUnless("Objects Healthy (before repair): <span>8</span>" in res)
2016 self.failUnless("Objects Unhealthy (before repair): <span>0</span>" in res)
2017 self.failUnless("Corrupt Shares (before repair): <span>0</span>" in res)
2019 self.failUnless("Repairs Attempted: <span>0</span>" in res)
2020 self.failUnless("Repairs Successful: <span>0</span>" in res)
2021 self.failUnless("Repairs Unsuccessful: <span>0</span>" in res)
2023 self.failUnless("Objects Healthy (after repair): <span>8</span>" in res)
2024 self.failUnless("Objects Unhealthy (after repair): <span>0</span>" in res)
2025 self.failUnless("Corrupt Shares (after repair): <span>0</span>" in res)
2026 d.addCallback(_check_html)
2029 def test_POST_FILEURL_bad_t(self):
2030 d = self.shouldFail2(error.Error, "POST_bad_t", "400 Bad Request",
2031 "POST to file: bad t=bogus",
2032 self.POST, self.public_url + "/foo/bar.txt",
2036 def test_POST_mkdir(self): # return value?
2037 d = self.POST(self.public_url + "/foo", t="mkdir", name="newdir")
2038 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
2039 d.addCallback(self.failUnlessNodeKeysAre, [])
2042 def test_POST_mkdir_initial_children(self):
2043 (newkids, caps) = self._create_initial_children()
2044 d = self.POST2(self.public_url +
2045 "/foo?t=mkdir-with-children&name=newdir",
2046 simplejson.dumps(newkids))
2047 d.addCallback(lambda res:
2048 self.failUnlessNodeHasChild(self._foo_node, u"newdir"))
2049 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
2050 d.addCallback(self.failUnlessNodeKeysAre, newkids.keys())
2051 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
2052 d.addCallback(self.failUnlessROChildURIIs, u"child-imm", caps['filecap1'])
2055 def test_POST_mkdir_immutable(self):
2056 (newkids, caps) = self._create_immutable_children()
2057 d = self.POST2(self.public_url +
2058 "/foo?t=mkdir-immutable&name=newdir",
2059 simplejson.dumps(newkids))
2060 d.addCallback(lambda res:
2061 self.failUnlessNodeHasChild(self._foo_node, u"newdir"))
2062 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
2063 d.addCallback(self.failUnlessNodeKeysAre, newkids.keys())
2064 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
2065 d.addCallback(self.failUnlessROChildURIIs, u"child-imm", caps['filecap1'])
2066 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
2067 d.addCallback(self.failUnlessROChildURIIs, u"unknownchild-imm", caps['unknown_immcap'])
2068 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
2069 d.addCallback(self.failUnlessROChildURIIs, u"dirchild-imm", caps['immdircap'])
2072 def test_POST_mkdir_immutable_bad(self):
2073 (newkids, caps) = self._create_initial_children()
2074 d = self.shouldFail2(error.Error, "test_POST_mkdir_immutable_bad",
2076 "needed to be immutable but was not",
2079 "/foo?t=mkdir-immutable&name=newdir",
2080 simplejson.dumps(newkids))
2083 def test_POST_mkdir_2(self):
2084 d = self.POST(self.public_url + "/foo/newdir?t=mkdir", "")
2085 d.addCallback(lambda res:
2086 self.failUnlessNodeHasChild(self._foo_node, u"newdir"))
2087 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
2088 d.addCallback(self.failUnlessNodeKeysAre, [])
2091 def test_POST_mkdirs_2(self):
2092 d = self.POST(self.public_url + "/foo/bardir/newdir?t=mkdir", "")
2093 d.addCallback(lambda res:
2094 self.failUnlessNodeHasChild(self._foo_node, u"bardir"))
2095 d.addCallback(lambda res: self._foo_node.get(u"bardir"))
2096 d.addCallback(lambda bardirnode: bardirnode.get(u"newdir"))
2097 d.addCallback(self.failUnlessNodeKeysAre, [])
2100 def test_POST_mkdir_no_parentdir_noredirect(self):
2101 d = self.POST("/uri?t=mkdir")
2102 def _after_mkdir(res):
2103 uri.DirectoryURI.init_from_string(res)
2104 d.addCallback(_after_mkdir)
2107 def test_POST_mkdir_no_parentdir_noredirect2(self):
2108 # make sure form-based arguments (as on the welcome page) still work
2109 d = self.POST("/uri", t="mkdir")
2110 def _after_mkdir(res):
2111 uri.DirectoryURI.init_from_string(res)
2112 d.addCallback(_after_mkdir)
2113 d.addErrback(self.explain_web_error)
2116 def test_POST_mkdir_no_parentdir_redirect(self):
2117 d = self.POST("/uri?t=mkdir&redirect_to_result=true")
2118 d.addBoth(self.shouldRedirect, None, statuscode='303')
2119 def _check_target(target):
2120 target = urllib.unquote(target)
2121 self.failUnless(target.startswith("uri/URI:DIR2:"), target)
2122 d.addCallback(_check_target)
2125 def test_POST_mkdir_no_parentdir_redirect2(self):
2126 d = self.POST("/uri", t="mkdir", redirect_to_result="true")
2127 d.addBoth(self.shouldRedirect, None, statuscode='303')
2128 def _check_target(target):
2129 target = urllib.unquote(target)
2130 self.failUnless(target.startswith("uri/URI:DIR2:"), target)
2131 d.addCallback(_check_target)
2132 d.addErrback(self.explain_web_error)
2135 def _make_readonly(self, u):
2136 ro_uri = uri.from_string(u).get_readonly()
2139 return ro_uri.to_string()
2141 def _create_initial_children(self):
2142 contents, n, filecap1 = self.makefile(12)
2143 md1 = {"metakey1": "metavalue1"}
2144 filecap2 = make_mutable_file_uri()
2145 node3 = self.s.create_node_from_uri(make_mutable_file_uri())
2146 filecap3 = node3.get_readonly_uri()
2147 unknown_rwcap = "lafs://from_the_future"
2148 unknown_rocap = "ro.lafs://readonly_from_the_future"
2149 unknown_immcap = "imm.lafs://immutable_from_the_future"
2150 node4 = self.s.create_node_from_uri(make_mutable_file_uri())
2151 dircap = DirectoryNode(node4, None, None).get_uri()
2152 newkids = {u"child-imm": ["filenode", {"rw_uri": filecap1,
2153 "ro_uri": self._make_readonly(filecap1),
2154 "metadata": md1, }],
2155 u"child-mutable": ["filenode", {"rw_uri": filecap2,
2156 "ro_uri": self._make_readonly(filecap2)}],
2157 u"child-mutable-ro": ["filenode", {"ro_uri": filecap3}],
2158 u"unknownchild-rw": ["unknown", {"rw_uri": unknown_rwcap,
2159 "ro_uri": unknown_rocap}],
2160 u"unknownchild-ro": ["unknown", {"ro_uri": unknown_rocap}],
2161 u"unknownchild-imm": ["unknown", {"ro_uri": unknown_immcap}],
2162 u"dirchild": ["dirnode", {"rw_uri": dircap,
2163 "ro_uri": self._make_readonly(dircap)}],
2165 return newkids, {'filecap1': filecap1,
2166 'filecap2': filecap2,
2167 'filecap3': filecap3,
2168 'unknown_rwcap': unknown_rwcap,
2169 'unknown_rocap': unknown_rocap,
2170 'unknown_immcap': unknown_immcap,
2173 def _create_immutable_children(self):
2174 contents, n, filecap1 = self.makefile(12)
2175 md1 = {"metakey1": "metavalue1"}
2176 tnode = create_chk_filenode("immutable directory contents\n"*10)
2177 dnode = DirectoryNode(tnode, None, None)
2178 assert not dnode.is_mutable()
2179 unknown_immcap = "imm.lafs://immutable_from_the_future"
2180 immdircap = dnode.get_uri()
2181 newkids = {u"child-imm": ["filenode", {"ro_uri": filecap1,
2182 "metadata": md1, }],
2183 u"unknownchild-imm": ["unknown", {"ro_uri": unknown_immcap}],
2184 u"dirchild-imm": ["dirnode", {"ro_uri": immdircap}],
2186 return newkids, {'filecap1': filecap1,
2187 'unknown_immcap': unknown_immcap,
2188 'immdircap': immdircap}
2190 def test_POST_mkdir_no_parentdir_initial_children(self):
2191 (newkids, caps) = self._create_initial_children()
2192 d = self.POST2("/uri?t=mkdir-with-children", simplejson.dumps(newkids))
2193 def _after_mkdir(res):
2194 self.failUnless(res.startswith("URI:DIR"), res)
2195 n = self.s.create_node_from_uri(res)
2196 d2 = self.failUnlessNodeKeysAre(n, newkids.keys())
2197 d2.addCallback(lambda ign:
2198 self.failUnlessROChildURIIs(n, u"child-imm",
2200 d2.addCallback(lambda ign:
2201 self.failUnlessRWChildURIIs(n, u"child-mutable",
2203 d2.addCallback(lambda ign:
2204 self.failUnlessROChildURIIs(n, u"child-mutable-ro",
2206 d2.addCallback(lambda ign:
2207 self.failUnlessRWChildURIIs(n, u"unknownchild-rw",
2208 caps['unknown_rwcap']))
2209 d2.addCallback(lambda ign:
2210 self.failUnlessROChildURIIs(n, u"unknownchild-ro",
2211 caps['unknown_rocap']))
2212 d2.addCallback(lambda ign:
2213 self.failUnlessROChildURIIs(n, u"unknownchild-imm",
2214 caps['unknown_immcap']))
2215 d2.addCallback(lambda ign:
2216 self.failUnlessRWChildURIIs(n, u"dirchild",
2219 d.addCallback(_after_mkdir)
2222 def test_POST_mkdir_no_parentdir_unexpected_children(self):
2223 # the regular /uri?t=mkdir operation is specified to ignore its body.
2224 # Only t=mkdir-with-children pays attention to it.
2225 (newkids, caps) = self._create_initial_children()
2226 d = self.shouldHTTPError("POST t=mkdir unexpected children",
2228 "t=mkdir does not accept children=, "
2229 "try t=mkdir-with-children instead",
2230 self.POST2, "/uri?t=mkdir", # without children
2231 simplejson.dumps(newkids))
2234 def test_POST_noparent_bad(self):
2235 d = self.shouldHTTPError("POST /uri?t=bogus", 400, "Bad Request",
2236 "/uri accepts only PUT, PUT?t=mkdir, "
2237 "POST?t=upload, and POST?t=mkdir",
2238 self.POST, "/uri?t=bogus")
2241 def test_POST_mkdir_no_parentdir_immutable(self):
2242 (newkids, caps) = self._create_immutable_children()
2243 d = self.POST2("/uri?t=mkdir-immutable", simplejson.dumps(newkids))
2244 def _after_mkdir(res):
2245 self.failUnless(res.startswith("URI:DIR"), res)
2246 n = self.s.create_node_from_uri(res)
2247 d2 = self.failUnlessNodeKeysAre(n, newkids.keys())
2248 d2.addCallback(lambda ign:
2249 self.failUnlessROChildURIIs(n, u"child-imm",
2251 d2.addCallback(lambda ign:
2252 self.failUnlessROChildURIIs(n, u"unknownchild-imm",
2253 caps['unknown_immcap']))
2254 d2.addCallback(lambda ign:
2255 self.failUnlessROChildURIIs(n, u"dirchild-imm",
2258 d.addCallback(_after_mkdir)
2261 def test_POST_mkdir_no_parentdir_immutable_bad(self):
2262 (newkids, caps) = self._create_initial_children()
2263 d = self.shouldFail2(error.Error,
2264 "test_POST_mkdir_no_parentdir_immutable_bad",
2266 "needed to be immutable but was not",
2268 "/uri?t=mkdir-immutable",
2269 simplejson.dumps(newkids))
2272 def test_welcome_page_mkdir_button(self):
2273 # Fetch the welcome page.
2275 def _after_get_welcome_page(res):
2276 MKDIR_BUTTON_RE = re.compile(
2277 '<form action="([^"]*)" method="post".*?'
2278 '<input type="hidden" name="t" value="([^"]*)" />'
2279 '<input type="hidden" name="([^"]*)" value="([^"]*)" />'
2280 '<input type="submit" value="Create a directory" />',
2282 mo = MKDIR_BUTTON_RE.search(res)
2283 formaction = mo.group(1)
2285 formaname = mo.group(3)
2286 formavalue = mo.group(4)
2287 return (formaction, formt, formaname, formavalue)
2288 d.addCallback(_after_get_welcome_page)
2289 def _after_parse_form(res):
2290 (formaction, formt, formaname, formavalue) = res
2291 return self.POST("/%s?t=%s&%s=%s" % (formaction, formt, formaname, formavalue))
2292 d.addCallback(_after_parse_form)
2293 d.addBoth(self.shouldRedirect, None, statuscode='303')
2296 def test_POST_mkdir_replace(self): # return value?
2297 d = self.POST(self.public_url + "/foo", t="mkdir", name="sub")
2298 d.addCallback(lambda res: self._foo_node.get(u"sub"))
2299 d.addCallback(self.failUnlessNodeKeysAre, [])
2302 def test_POST_mkdir_no_replace_queryarg(self): # return value?
2303 d = self.POST(self.public_url + "/foo?replace=false", t="mkdir", name="sub")
2304 d.addBoth(self.shouldFail, error.Error,
2305 "POST_mkdir_no_replace_queryarg",
2307 "There was already a child by that name, and you asked me "
2308 "to not replace it")
2309 d.addCallback(lambda res: self._foo_node.get(u"sub"))
2310 d.addCallback(self.failUnlessNodeKeysAre, [u"baz.txt"])
2313 def test_POST_mkdir_no_replace_field(self): # return value?
2314 d = self.POST(self.public_url + "/foo", t="mkdir", name="sub",
2316 d.addBoth(self.shouldFail, error.Error, "POST_mkdir_no_replace_field",
2318 "There was already a child by that name, and you asked me "
2319 "to not replace it")
2320 d.addCallback(lambda res: self._foo_node.get(u"sub"))
2321 d.addCallback(self.failUnlessNodeKeysAre, [u"baz.txt"])
2324 def test_POST_mkdir_whendone_field(self):
2325 d = self.POST(self.public_url + "/foo",
2326 t="mkdir", name="newdir", when_done="/THERE")
2327 d.addBoth(self.shouldRedirect, "/THERE")
2328 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
2329 d.addCallback(self.failUnlessNodeKeysAre, [])
2332 def test_POST_mkdir_whendone_queryarg(self):
2333 d = self.POST(self.public_url + "/foo?when_done=/THERE",
2334 t="mkdir", name="newdir")
2335 d.addBoth(self.shouldRedirect, "/THERE")
2336 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
2337 d.addCallback(self.failUnlessNodeKeysAre, [])
2340 def test_POST_bad_t(self):
2341 d = self.shouldFail2(error.Error, "POST_bad_t", "400 Bad Request",
2342 "POST to a directory with bad t=BOGUS",
2343 self.POST, self.public_url + "/foo", t="BOGUS")
2346 def test_POST_set_children(self, command_name="set_children"):
2347 contents9, n9, newuri9 = self.makefile(9)
2348 contents10, n10, newuri10 = self.makefile(10)
2349 contents11, n11, newuri11 = self.makefile(11)
2352 "atomic_added_1": [ "filenode", { "rw_uri": "%s",
2355 "ctime": 1002777696.7564139,
2356 "mtime": 1002777696.7564139
2359 "atomic_added_2": [ "filenode", { "rw_uri": "%s",
2362 "ctime": 1002777696.7564139,
2363 "mtime": 1002777696.7564139
2366 "atomic_added_3": [ "filenode", { "rw_uri": "%s",
2369 "ctime": 1002777696.7564139,
2370 "mtime": 1002777696.7564139
2373 }""" % (newuri9, newuri10, newuri11)
2375 url = self.webish_url + self.public_url + "/foo" + "?t=" + command_name
2377 d = client.getPage(url, method="POST", postdata=reqbody)
2379 self.failUnlessURIMatchesROChild(newuri9, self._foo_node, u"atomic_added_1")
2380 self.failUnlessURIMatchesROChild(newuri10, self._foo_node, u"atomic_added_2")
2381 self.failUnlessURIMatchesROChild(newuri11, self._foo_node, u"atomic_added_3")
2383 d.addCallback(_then)
2384 d.addErrback(self.dump_error)
2387 def test_POST_set_children_with_hyphen(self):
2388 return self.test_POST_set_children(command_name="set-children")
2390 def test_POST_link_uri(self):
2391 contents, n, newuri = self.makefile(8)
2392 d = self.POST(self.public_url + "/foo", t="uri", name="new.txt", uri=newuri)
2393 d.addCallback(self.failUnlessURIMatchesROChild, self._foo_node, u"new.txt")
2394 d.addCallback(lambda res:
2395 self.failUnlessChildContentsAre(self._foo_node, u"new.txt",
2399 def test_POST_link_uri_replace(self):
2400 contents, n, newuri = self.makefile(8)
2401 d = self.POST(self.public_url + "/foo", t="uri", name="bar.txt", uri=newuri)
2402 d.addCallback(self.failUnlessURIMatchesROChild, self._foo_node, u"bar.txt")
2403 d.addCallback(lambda res:
2404 self.failUnlessChildContentsAre(self._foo_node, u"bar.txt",
2408 def test_POST_link_uri_unknown_bad(self):
2409 newuri = "lafs://from_the_future"
2410 d = self.POST(self.public_url + "/foo", t="uri", name="future.txt", uri=newuri)
2411 d.addBoth(self.shouldFail, error.Error,
2412 "POST_link_uri_unknown_bad",
2414 "unknown cap in a write slot")
2417 def test_POST_link_uri_unknown_ro_good(self):
2418 newuri = "ro.lafs://readonly_from_the_future"
2419 d = self.POST(self.public_url + "/foo", t="uri", name="future-ro.txt", uri=newuri)
2420 d.addCallback(self.failUnlessURIMatchesROChild, self._foo_node, u"future-ro.txt")
2423 def test_POST_link_uri_unknown_imm_good(self):
2424 newuri = "imm.lafs://immutable_from_the_future"
2425 d = self.POST(self.public_url + "/foo", t="uri", name="future-imm.txt", uri=newuri)
2426 d.addCallback(self.failUnlessURIMatchesROChild, self._foo_node, u"future-imm.txt")
2429 def test_POST_link_uri_no_replace_queryarg(self):
2430 contents, n, newuri = self.makefile(8)
2431 d = self.POST(self.public_url + "/foo?replace=false", t="uri",
2432 name="bar.txt", uri=newuri)
2433 d.addBoth(self.shouldFail, error.Error,
2434 "POST_link_uri_no_replace_queryarg",
2436 "There was already a child by that name, and you asked me "
2437 "to not replace it")
2438 d.addCallback(lambda res: self.GET(self.public_url + "/foo/bar.txt"))
2439 d.addCallback(self.failUnlessIsBarDotTxt)
2442 def test_POST_link_uri_no_replace_field(self):
2443 contents, n, newuri = self.makefile(8)
2444 d = self.POST(self.public_url + "/foo", t="uri", replace="false",
2445 name="bar.txt", uri=newuri)
2446 d.addBoth(self.shouldFail, error.Error,
2447 "POST_link_uri_no_replace_field",
2449 "There was already a child by that name, and you asked me "
2450 "to not replace it")
2451 d.addCallback(lambda res: self.GET(self.public_url + "/foo/bar.txt"))
2452 d.addCallback(self.failUnlessIsBarDotTxt)
2455 def test_POST_delete(self):
2456 d = self.POST(self.public_url + "/foo", t="delete", name="bar.txt")
2457 d.addCallback(lambda res: self._foo_node.list())
2458 def _check(children):
2459 self.failIf(u"bar.txt" in children)
2460 d.addCallback(_check)
2463 def test_POST_rename_file(self):
2464 d = self.POST(self.public_url + "/foo", t="rename",
2465 from_name="bar.txt", to_name='wibble.txt')
2466 d.addCallback(lambda res:
2467 self.failIfNodeHasChild(self._foo_node, u"bar.txt"))
2468 d.addCallback(lambda res:
2469 self.failUnlessNodeHasChild(self._foo_node, u"wibble.txt"))
2470 d.addCallback(lambda res: self.GET(self.public_url + "/foo/wibble.txt"))
2471 d.addCallback(self.failUnlessIsBarDotTxt)
2472 d.addCallback(lambda res: self.GET(self.public_url + "/foo/wibble.txt?t=json"))
2473 d.addCallback(self.failUnlessIsBarJSON)
2476 def test_POST_rename_file_redundant(self):
2477 d = self.POST(self.public_url + "/foo", t="rename",
2478 from_name="bar.txt", to_name='bar.txt')
2479 d.addCallback(lambda res:
2480 self.failUnlessNodeHasChild(self._foo_node, u"bar.txt"))
2481 d.addCallback(lambda res: self.GET(self.public_url + "/foo/bar.txt"))
2482 d.addCallback(self.failUnlessIsBarDotTxt)
2483 d.addCallback(lambda res: self.GET(self.public_url + "/foo/bar.txt?t=json"))
2484 d.addCallback(self.failUnlessIsBarJSON)
2487 def test_POST_rename_file_replace(self):
2488 # rename a file and replace a directory with it
2489 d = self.POST(self.public_url + "/foo", t="rename",
2490 from_name="bar.txt", to_name='empty')
2491 d.addCallback(lambda res:
2492 self.failIfNodeHasChild(self._foo_node, u"bar.txt"))
2493 d.addCallback(lambda res:
2494 self.failUnlessNodeHasChild(self._foo_node, u"empty"))
2495 d.addCallback(lambda res: self.GET(self.public_url + "/foo/empty"))
2496 d.addCallback(self.failUnlessIsBarDotTxt)
2497 d.addCallback(lambda res: self.GET(self.public_url + "/foo/empty?t=json"))
2498 d.addCallback(self.failUnlessIsBarJSON)
2501 def test_POST_rename_file_no_replace_queryarg(self):
2502 # rename a file and replace a directory with it
2503 d = self.POST(self.public_url + "/foo?replace=false", t="rename",
2504 from_name="bar.txt", to_name='empty')
2505 d.addBoth(self.shouldFail, error.Error,
2506 "POST_rename_file_no_replace_queryarg",
2508 "There was already a child by that name, and you asked me "
2509 "to not replace it")
2510 d.addCallback(lambda res: self.GET(self.public_url + "/foo/empty?t=json"))
2511 d.addCallback(self.failUnlessIsEmptyJSON)
2514 def test_POST_rename_file_no_replace_field(self):
2515 # rename a file and replace a directory with it
2516 d = self.POST(self.public_url + "/foo", t="rename", replace="false",
2517 from_name="bar.txt", to_name='empty')
2518 d.addBoth(self.shouldFail, error.Error,
2519 "POST_rename_file_no_replace_field",
2521 "There was already a child by that name, and you asked me "
2522 "to not replace it")
2523 d.addCallback(lambda res: self.GET(self.public_url + "/foo/empty?t=json"))
2524 d.addCallback(self.failUnlessIsEmptyJSON)
2527 def failUnlessIsEmptyJSON(self, res):
2528 data = simplejson.loads(res)
2529 self.failUnlessEqual(data[0], "dirnode", data)
2530 self.failUnlessEqual(len(data[1]["children"]), 0)
2532 def test_POST_rename_file_slash_fail(self):
2533 d = self.POST(self.public_url + "/foo", t="rename",
2534 from_name="bar.txt", to_name='kirk/spock.txt')
2535 d.addBoth(self.shouldFail, error.Error,
2536 "test_POST_rename_file_slash_fail",
2538 "to_name= may not contain a slash",
2540 d.addCallback(lambda res:
2541 self.failUnlessNodeHasChild(self._foo_node, u"bar.txt"))
2544 def test_POST_rename_dir(self):
2545 d = self.POST(self.public_url, t="rename",
2546 from_name="foo", to_name='plunk')
2547 d.addCallback(lambda res:
2548 self.failIfNodeHasChild(self.public_root, u"foo"))
2549 d.addCallback(lambda res:
2550 self.failUnlessNodeHasChild(self.public_root, u"plunk"))
2551 d.addCallback(lambda res: self.GET(self.public_url + "/plunk?t=json"))
2552 d.addCallback(self.failUnlessIsFooJSON)
2555 def shouldRedirect(self, res, target=None, statuscode=None, which=""):
2556 """ If target is not None then the redirection has to go to target. If
2557 statuscode is not None then the redirection has to be accomplished with
2558 that HTTP status code."""
2559 if not isinstance(res, failure.Failure):
2560 to_where = (target is None) and "somewhere" or ("to " + target)
2561 self.fail("%s: we were expecting to get redirected %s, not get an"
2562 " actual page: %s" % (which, to_where, res))
2563 res.trap(error.PageRedirect)
2564 if statuscode is not None:
2565 self.failUnlessEqual(res.value.status, statuscode,
2566 "%s: not a redirect" % which)
2567 if target is not None:
2568 # the PageRedirect does not seem to capture the uri= query arg
2569 # properly, so we can't check for it.
2570 realtarget = self.webish_url + target
2571 self.failUnlessEqual(res.value.location, realtarget,
2572 "%s: wrong target" % which)
2573 return res.value.location
2575 def test_GET_URI_form(self):
2576 base = "/uri?uri=%s" % self._bar_txt_uri
2577 # this is supposed to give us a redirect to /uri/$URI, plus arguments
2578 targetbase = "/uri/%s" % urllib.quote(self._bar_txt_uri)
2580 d.addBoth(self.shouldRedirect, targetbase)
2581 d.addCallback(lambda res: self.GET(base+"&filename=bar.txt"))
2582 d.addBoth(self.shouldRedirect, targetbase+"?filename=bar.txt")
2583 d.addCallback(lambda res: self.GET(base+"&t=json"))
2584 d.addBoth(self.shouldRedirect, targetbase+"?t=json")
2585 d.addCallback(self.log, "about to get file by uri")
2586 d.addCallback(lambda res: self.GET(base, followRedirect=True))
2587 d.addCallback(self.failUnlessIsBarDotTxt)
2588 d.addCallback(self.log, "got file by uri, about to get dir by uri")
2589 d.addCallback(lambda res: self.GET("/uri?uri=%s&t=json" % self._foo_uri,
2590 followRedirect=True))
2591 d.addCallback(self.failUnlessIsFooJSON)
2592 d.addCallback(self.log, "got dir by uri")
2596 def test_GET_URI_form_bad(self):
2597 d = self.shouldFail2(error.Error, "test_GET_URI_form_bad",
2598 "400 Bad Request", "GET /uri requires uri=",
2602 def test_GET_rename_form(self):
2603 d = self.GET(self.public_url + "/foo?t=rename-form&name=bar.txt",
2604 followRedirect=True)
2606 self.failUnless('name="when_done" value="."' in res, res)
2607 self.failUnless(re.search(r'name="from_name" value="bar\.txt"', res))
2608 d.addCallback(_check)
2611 def log(self, res, msg):
2612 #print "MSG: %s RES: %s" % (msg, res)
2616 def test_GET_URI_URL(self):
2617 base = "/uri/%s" % self._bar_txt_uri
2619 d.addCallback(self.failUnlessIsBarDotTxt)
2620 d.addCallback(lambda res: self.GET(base+"?filename=bar.txt"))
2621 d.addCallback(self.failUnlessIsBarDotTxt)
2622 d.addCallback(lambda res: self.GET(base+"?filename=bar.txt&save=true"))
2623 d.addCallback(self.failUnlessIsBarDotTxt)
2626 def test_GET_URI_URL_dir(self):
2627 base = "/uri/%s?t=json" % self._foo_uri
2629 d.addCallback(self.failUnlessIsFooJSON)
2632 def test_GET_URI_URL_missing(self):
2633 base = "/uri/%s" % self._bad_file_uri
2634 d = self.shouldHTTPError("test_GET_URI_URL_missing",
2635 http.GONE, None, "NotEnoughSharesError",
2637 # TODO: how can we exercise both sides of WebDownloadTarget.fail
2638 # here? we must arrange for a download to fail after target.open()
2639 # has been called, and then inspect the response to see that it is
2640 # shorter than we expected.
2643 def test_PUT_DIRURL_uri(self):
2644 d = self.s.create_dirnode()
2646 new_uri = dn.get_uri()
2647 # replace /foo with a new (empty) directory
2648 d = self.PUT(self.public_url + "/foo?t=uri", new_uri)
2649 d.addCallback(lambda res:
2650 self.failUnlessEqual(res.strip(), new_uri))
2651 d.addCallback(lambda res:
2652 self.failUnlessRWChildURIIs(self.public_root,
2656 d.addCallback(_made_dir)
2659 def test_PUT_DIRURL_uri_noreplace(self):
2660 d = self.s.create_dirnode()
2662 new_uri = dn.get_uri()
2663 # replace /foo with a new (empty) directory, but ask that
2664 # replace=false, so it should fail
2665 d = self.shouldFail2(error.Error, "test_PUT_DIRURL_uri_noreplace",
2666 "409 Conflict", "There was already a child by that name, and you asked me to not replace it",
2668 self.public_url + "/foo?t=uri&replace=false",
2670 d.addCallback(lambda res:
2671 self.failUnlessRWChildURIIs(self.public_root,
2675 d.addCallback(_made_dir)
2678 def test_PUT_DIRURL_bad_t(self):
2679 d = self.shouldFail2(error.Error, "test_PUT_DIRURL_bad_t",
2680 "400 Bad Request", "PUT to a directory",
2681 self.PUT, self.public_url + "/foo?t=BOGUS", "")
2682 d.addCallback(lambda res:
2683 self.failUnlessRWChildURIIs(self.public_root,
2688 def test_PUT_NEWFILEURL_uri(self):
2689 contents, n, new_uri = self.makefile(8)
2690 d = self.PUT(self.public_url + "/foo/new.txt?t=uri", new_uri)
2691 d.addCallback(lambda res: self.failUnlessEqual(res.strip(), new_uri))
2692 d.addCallback(lambda res:
2693 self.failUnlessChildContentsAre(self._foo_node, u"new.txt",
2697 def test_PUT_NEWFILEURL_uri_replace(self):
2698 contents, n, new_uri = self.makefile(8)
2699 d = self.PUT(self.public_url + "/foo/bar.txt?t=uri", new_uri)
2700 d.addCallback(lambda res: self.failUnlessEqual(res.strip(), new_uri))
2701 d.addCallback(lambda res:
2702 self.failUnlessChildContentsAre(self._foo_node, u"bar.txt",
2706 def test_PUT_NEWFILEURL_uri_no_replace(self):
2707 contents, n, new_uri = self.makefile(8)
2708 d = self.PUT(self.public_url + "/foo/bar.txt?t=uri&replace=false", new_uri)
2709 d.addBoth(self.shouldFail, error.Error, "PUT_NEWFILEURL_uri_no_replace",
2711 "There was already a child by that name, and you asked me "
2712 "to not replace it")
2715 def test_PUT_NEWFILEURL_uri_unknown_bad(self):
2716 new_uri = "lafs://from_the_future"
2717 d = self.PUT(self.public_url + "/foo/put-future.txt?t=uri", new_uri)
2718 d.addBoth(self.shouldFail, error.Error,
2719 "POST_put_uri_unknown_bad",
2721 "unknown cap in a write slot")
2724 def test_PUT_NEWFILEURL_uri_unknown_ro_good(self):
2725 new_uri = "ro.lafs://readonly_from_the_future"
2726 d = self.PUT(self.public_url + "/foo/put-future-ro.txt?t=uri", new_uri)
2727 d.addCallback(self.failUnlessURIMatchesROChild, self._foo_node,
2728 u"put-future-ro.txt")
2731 def test_PUT_NEWFILEURL_uri_unknown_imm_good(self):
2732 new_uri = "imm.lafs://immutable_from_the_future"
2733 d = self.PUT(self.public_url + "/foo/put-future-imm.txt?t=uri", new_uri)
2734 d.addCallback(self.failUnlessURIMatchesROChild, self._foo_node,
2735 u"put-future-imm.txt")
2738 def test_PUT_NEWFILE_URI(self):
2739 file_contents = "New file contents here\n"
2740 d = self.PUT("/uri", file_contents)
2742 assert isinstance(uri, str), uri
2743 self.failUnless(uri in FakeCHKFileNode.all_contents)
2744 self.failUnlessEqual(FakeCHKFileNode.all_contents[uri],
2746 return self.GET("/uri/%s" % uri)
2747 d.addCallback(_check)
2749 self.failUnlessEqual(res, file_contents)
2750 d.addCallback(_check2)
2753 def test_PUT_NEWFILE_URI_not_mutable(self):
2754 file_contents = "New file contents here\n"
2755 d = self.PUT("/uri?mutable=false", file_contents)
2757 assert isinstance(uri, str), uri
2758 self.failUnless(uri in FakeCHKFileNode.all_contents)
2759 self.failUnlessEqual(FakeCHKFileNode.all_contents[uri],
2761 return self.GET("/uri/%s" % uri)
2762 d.addCallback(_check)
2764 self.failUnlessEqual(res, file_contents)
2765 d.addCallback(_check2)
2768 def test_PUT_NEWFILE_URI_only_PUT(self):
2769 d = self.PUT("/uri?t=bogus", "")
2770 d.addBoth(self.shouldFail, error.Error,
2771 "PUT_NEWFILE_URI_only_PUT",
2773 "/uri accepts only PUT, PUT?t=mkdir, POST?t=upload, and POST?t=mkdir")
2776 def test_PUT_NEWFILE_URI_mutable(self):
2777 file_contents = "New file contents here\n"
2778 d = self.PUT("/uri?mutable=true", file_contents)
2779 def _check1(filecap):
2780 filecap = filecap.strip()
2781 self.failUnless(filecap.startswith("URI:SSK:"), filecap)
2782 self.filecap = filecap
2783 u = uri.WriteableSSKFileURI.init_from_string(filecap)
2784 self.failUnless(u.storage_index in FakeMutableFileNode.all_contents)
2785 n = self.s.create_node_from_uri(filecap)
2786 return n.download_best_version()
2787 d.addCallback(_check1)
2789 self.failUnlessEqual(data, file_contents)
2790 return self.GET("/uri/%s" % urllib.quote(self.filecap))
2791 d.addCallback(_check2)
2793 self.failUnlessEqual(res, file_contents)
2794 d.addCallback(_check3)
2797 def test_PUT_mkdir(self):
2798 d = self.PUT("/uri?t=mkdir", "")
2800 n = self.s.create_node_from_uri(uri.strip())
2801 d2 = self.failUnlessNodeKeysAre(n, [])
2802 d2.addCallback(lambda res:
2803 self.GET("/uri/%s?t=json" % uri))
2805 d.addCallback(_check)
2806 d.addCallback(self.failUnlessIsEmptyJSON)
2809 def test_POST_check(self):
2810 d = self.POST(self.public_url + "/foo", t="check", name="bar.txt")
2812 # this returns a string form of the results, which are probably
2813 # None since we're using fake filenodes.
2814 # TODO: verify that the check actually happened, by changing
2815 # FakeCHKFileNode to count how many times .check() has been
2818 d.addCallback(_done)
2821 def test_bad_method(self):
2822 url = self.webish_url + self.public_url + "/foo/bar.txt"
2823 d = self.shouldHTTPError("test_bad_method",
2824 501, "Not Implemented",
2825 "I don't know how to treat a BOGUS request.",
2826 client.getPage, url, method="BOGUS")
2829 def test_short_url(self):
2830 url = self.webish_url + "/uri"
2831 d = self.shouldHTTPError("test_short_url", 501, "Not Implemented",
2832 "I don't know how to treat a DELETE request.",
2833 client.getPage, url, method="DELETE")
2836 def test_ophandle_bad(self):
2837 url = self.webish_url + "/operations/bogus?t=status"
2838 d = self.shouldHTTPError("test_ophandle_bad", 404, "404 Not Found",
2839 "unknown/expired handle 'bogus'",
2840 client.getPage, url)
2843 def test_ophandle_cancel(self):
2844 d = self.POST(self.public_url + "/foo/?t=start-manifest&ophandle=128",
2845 followRedirect=True)
2846 d.addCallback(lambda ignored:
2847 self.GET("/operations/128?t=status&output=JSON"))
2849 data = simplejson.loads(res)
2850 self.failUnless("finished" in data, res)
2851 monitor = self.ws.root.child_operations.handles["128"][0]
2852 d = self.POST("/operations/128?t=cancel&output=JSON")
2854 data = simplejson.loads(res)
2855 self.failUnless("finished" in data, res)
2856 # t=cancel causes the handle to be forgotten
2857 self.failUnless(monitor.is_cancelled())
2858 d.addCallback(_check2)
2860 d.addCallback(_check1)
2861 d.addCallback(lambda ignored:
2862 self.shouldHTTPError("test_ophandle_cancel",
2863 404, "404 Not Found",
2864 "unknown/expired handle '128'",
2866 "/operations/128?t=status&output=JSON"))
2869 def test_ophandle_retainfor(self):
2870 d = self.POST(self.public_url + "/foo/?t=start-manifest&ophandle=129&retain-for=60",
2871 followRedirect=True)
2872 d.addCallback(lambda ignored:
2873 self.GET("/operations/129?t=status&output=JSON&retain-for=0"))
2875 data = simplejson.loads(res)
2876 self.failUnless("finished" in data, res)
2877 d.addCallback(_check1)
2878 # the retain-for=0 will cause the handle to be expired very soon
2879 d.addCallback(lambda ign:
2880 self.clock.advance(2.0))
2881 d.addCallback(lambda ignored:
2882 self.shouldHTTPError("test_ophandle_retainfor",
2883 404, "404 Not Found",
2884 "unknown/expired handle '129'",
2886 "/operations/129?t=status&output=JSON"))
2889 def test_ophandle_release_after_complete(self):
2890 d = self.POST(self.public_url + "/foo/?t=start-manifest&ophandle=130",
2891 followRedirect=True)
2892 d.addCallback(self.wait_for_operation, "130")
2893 d.addCallback(lambda ignored:
2894 self.GET("/operations/130?t=status&output=JSON&release-after-complete=true"))
2895 # the release-after-complete=true will cause the handle to be expired
2896 d.addCallback(lambda ignored:
2897 self.shouldHTTPError("test_ophandle_release_after_complete",
2898 404, "404 Not Found",
2899 "unknown/expired handle '130'",
2901 "/operations/130?t=status&output=JSON"))
2904 def test_uncollected_ophandle_expiration(self):
2905 # uncollected ophandles should expire after 4 days
2906 def _make_uncollected_ophandle(ophandle):
2907 d = self.POST(self.public_url +
2908 "/foo/?t=start-manifest&ophandle=%d" % ophandle,
2909 followRedirect=False)
2910 # When we start the operation, the webapi server will want
2911 # to redirect us to the page for the ophandle, so we get
2912 # confirmation that the operation has started. If the
2913 # manifest operation has finished by the time we get there,
2914 # following that redirect (by setting followRedirect=True
2915 # above) has the side effect of collecting the ophandle that
2916 # we've just created, which means that we can't use the
2917 # ophandle to test the uncollected timeout anymore. So,
2918 # instead, catch the 302 here and don't follow it.
2919 d.addBoth(self.should302, "uncollected_ophandle_creation")
2921 # Create an ophandle, don't collect it, then advance the clock by
2922 # 4 days - 1 second and make sure that the ophandle is still there.
2923 d = _make_uncollected_ophandle(131)
2924 d.addCallback(lambda ign:
2925 self.clock.advance((96*60*60) - 1)) # 96 hours = 4 days
2926 d.addCallback(lambda ign:
2927 self.GET("/operations/131?t=status&output=JSON"))
2929 data = simplejson.loads(res)
2930 self.failUnless("finished" in data, res)
2931 d.addCallback(_check1)
2932 # Create an ophandle, don't collect it, then try to collect it
2933 # after 4 days. It should be gone.
2934 d.addCallback(lambda ign:
2935 _make_uncollected_ophandle(132))
2936 d.addCallback(lambda ign:
2937 self.clock.advance(96*60*60))
2938 d.addCallback(lambda ign:
2939 self.shouldHTTPError("test_uncollected_ophandle_expired_after_100_hours",
2940 404, "404 Not Found",
2941 "unknown/expired handle '132'",
2943 "/operations/132?t=status&output=JSON"))
2946 def test_collected_ophandle_expiration(self):
2947 # collected ophandles should expire after 1 day
2948 def _make_collected_ophandle(ophandle):
2949 d = self.POST(self.public_url +
2950 "/foo/?t=start-manifest&ophandle=%d" % ophandle,
2951 followRedirect=True)
2952 # By following the initial redirect, we collect the ophandle
2953 # we've just created.
2955 # Create a collected ophandle, then collect it after 23 hours
2956 # and 59 seconds to make sure that it is still there.
2957 d = _make_collected_ophandle(133)
2958 d.addCallback(lambda ign:
2959 self.clock.advance((24*60*60) - 1))
2960 d.addCallback(lambda ign:
2961 self.GET("/operations/133?t=status&output=JSON"))
2963 data = simplejson.loads(res)
2964 self.failUnless("finished" in data, res)
2965 d.addCallback(_check1)
2966 # Create another uncollected ophandle, then try to collect it
2967 # after 24 hours to make sure that it is gone.
2968 d.addCallback(lambda ign:
2969 _make_collected_ophandle(134))
2970 d.addCallback(lambda ign:
2971 self.clock.advance(24*60*60))
2972 d.addCallback(lambda ign:
2973 self.shouldHTTPError("test_collected_ophandle_expired_after_1000_minutes",
2974 404, "404 Not Found",
2975 "unknown/expired handle '134'",
2977 "/operations/134?t=status&output=JSON"))
2980 def test_incident(self):
2981 d = self.POST("/report_incident", details="eek")
2983 self.failUnless("Thank you for your report!" in res, res)
2984 d.addCallback(_done)
2987 def test_static(self):
2988 webdir = os.path.join(self.staticdir, "subdir")
2989 fileutil.make_dirs(webdir)
2990 f = open(os.path.join(webdir, "hello.txt"), "wb")
2994 d = self.GET("/static/subdir/hello.txt")
2996 self.failUnlessEqual(res, "hello")
2997 d.addCallback(_check)
3001 class Util(unittest.TestCase, ShouldFailMixin):
3002 def test_parse_replace_arg(self):
3003 self.failUnlessEqual(common.parse_replace_arg("true"), True)
3004 self.failUnlessEqual(common.parse_replace_arg("false"), False)
3005 self.failUnlessEqual(common.parse_replace_arg("only-files"),
3007 self.shouldFail(AssertionError, "test_parse_replace_arg", "",
3008 common.parse_replace_arg, "only_fles")
3010 def test_abbreviate_time(self):
3011 self.failUnlessEqual(common.abbreviate_time(None), "")
3012 self.failUnlessEqual(common.abbreviate_time(1.234), "1.23s")
3013 self.failUnlessEqual(common.abbreviate_time(0.123), "123ms")
3014 self.failUnlessEqual(common.abbreviate_time(0.00123), "1.2ms")
3015 self.failUnlessEqual(common.abbreviate_time(0.000123), "123us")
3017 def test_abbreviate_rate(self):
3018 self.failUnlessEqual(common.abbreviate_rate(None), "")
3019 self.failUnlessEqual(common.abbreviate_rate(1234000), "1.23MBps")
3020 self.failUnlessEqual(common.abbreviate_rate(12340), "12.3kBps")
3021 self.failUnlessEqual(common.abbreviate_rate(123), "123Bps")
3023 def test_abbreviate_size(self):
3024 self.failUnlessEqual(common.abbreviate_size(None), "")
3025 self.failUnlessEqual(common.abbreviate_size(1.23*1000*1000*1000), "1.23GB")
3026 self.failUnlessEqual(common.abbreviate_size(1.23*1000*1000), "1.23MB")
3027 self.failUnlessEqual(common.abbreviate_size(1230), "1.2kB")
3028 self.failUnlessEqual(common.abbreviate_size(123), "123B")
3030 def test_plural(self):
3032 return "%d second%s" % (s, status.plural(s))
3033 self.failUnlessEqual(convert(0), "0 seconds")
3034 self.failUnlessEqual(convert(1), "1 second")
3035 self.failUnlessEqual(convert(2), "2 seconds")
3037 return "has share%s: %s" % (status.plural(s), ",".join(s))
3038 self.failUnlessEqual(convert2([]), "has shares: ")
3039 self.failUnlessEqual(convert2(["1"]), "has share: 1")
3040 self.failUnlessEqual(convert2(["1","2"]), "has shares: 1,2")
3043 class Grid(GridTestMixin, WebErrorMixin, unittest.TestCase, ShouldFailMixin):
3045 def CHECK(self, ign, which, args, clientnum=0):
3046 fileurl = self.fileurls[which]
3047 url = fileurl + "?" + args
3048 return self.GET(url, method="POST", clientnum=clientnum)
3050 def test_filecheck(self):
3051 self.basedir = "web/Grid/filecheck"
3053 c0 = self.g.clients[0]
3056 d = c0.upload(upload.Data(DATA, convergence=""))
3057 def _stash_uri(ur, which):
3058 self.uris[which] = ur.uri
3059 d.addCallback(_stash_uri, "good")
3060 d.addCallback(lambda ign:
3061 c0.upload(upload.Data(DATA+"1", convergence="")))
3062 d.addCallback(_stash_uri, "sick")
3063 d.addCallback(lambda ign:
3064 c0.upload(upload.Data(DATA+"2", convergence="")))
3065 d.addCallback(_stash_uri, "dead")
3066 def _stash_mutable_uri(n, which):
3067 self.uris[which] = n.get_uri()
3068 assert isinstance(self.uris[which], str)
3069 d.addCallback(lambda ign: c0.create_mutable_file(DATA+"3"))
3070 d.addCallback(_stash_mutable_uri, "corrupt")
3071 d.addCallback(lambda ign:
3072 c0.upload(upload.Data("literal", convergence="")))
3073 d.addCallback(_stash_uri, "small")
3074 d.addCallback(lambda ign: c0.create_immutable_dirnode({}))
3075 d.addCallback(_stash_mutable_uri, "smalldir")
3077 def _compute_fileurls(ignored):
3079 for which in self.uris:
3080 self.fileurls[which] = "uri/" + urllib.quote(self.uris[which])
3081 d.addCallback(_compute_fileurls)
3083 def _clobber_shares(ignored):
3084 good_shares = self.find_shares(self.uris["good"])
3085 self.failUnlessEqual(len(good_shares), 10)
3086 sick_shares = self.find_shares(self.uris["sick"])
3087 os.unlink(sick_shares[0][2])
3088 dead_shares = self.find_shares(self.uris["dead"])
3089 for i in range(1, 10):
3090 os.unlink(dead_shares[i][2])
3091 c_shares = self.find_shares(self.uris["corrupt"])
3092 cso = CorruptShareOptions()
3093 cso.stdout = StringIO()
3094 cso.parseOptions([c_shares[0][2]])
3096 d.addCallback(_clobber_shares)
3098 d.addCallback(self.CHECK, "good", "t=check")
3099 def _got_html_good(res):
3100 self.failUnless("Healthy" in res, res)
3101 self.failIf("Not Healthy" in res, res)
3102 d.addCallback(_got_html_good)
3103 d.addCallback(self.CHECK, "good", "t=check&return_to=somewhere")
3104 def _got_html_good_return_to(res):
3105 self.failUnless("Healthy" in res, res)
3106 self.failIf("Not Healthy" in res, res)
3107 self.failUnless('<a href="somewhere">Return to file'
3109 d.addCallback(_got_html_good_return_to)
3110 d.addCallback(self.CHECK, "good", "t=check&output=json")
3111 def _got_json_good(res):
3112 r = simplejson.loads(res)
3113 self.failUnlessEqual(r["summary"], "Healthy")
3114 self.failUnless(r["results"]["healthy"])
3115 self.failIf(r["results"]["needs-rebalancing"])
3116 self.failUnless(r["results"]["recoverable"])
3117 d.addCallback(_got_json_good)
3119 d.addCallback(self.CHECK, "small", "t=check")
3120 def _got_html_small(res):
3121 self.failUnless("Literal files are always healthy" in res, res)
3122 self.failIf("Not Healthy" in res, res)
3123 d.addCallback(_got_html_small)
3124 d.addCallback(self.CHECK, "small", "t=check&return_to=somewhere")
3125 def _got_html_small_return_to(res):
3126 self.failUnless("Literal files are always healthy" in res, res)
3127 self.failIf("Not Healthy" in res, res)
3128 self.failUnless('<a href="somewhere">Return to file'
3130 d.addCallback(_got_html_small_return_to)
3131 d.addCallback(self.CHECK, "small", "t=check&output=json")
3132 def _got_json_small(res):
3133 r = simplejson.loads(res)
3134 self.failUnlessEqual(r["storage-index"], "")
3135 self.failUnless(r["results"]["healthy"])
3136 d.addCallback(_got_json_small)
3138 d.addCallback(self.CHECK, "smalldir", "t=check")
3139 def _got_html_smalldir(res):
3140 self.failUnless("Literal files are always healthy" in res, res)
3141 self.failIf("Not Healthy" in res, res)
3142 d.addCallback(_got_html_smalldir)
3143 d.addCallback(self.CHECK, "smalldir", "t=check&output=json")
3144 def _got_json_smalldir(res):
3145 r = simplejson.loads(res)
3146 self.failUnlessEqual(r["storage-index"], "")
3147 self.failUnless(r["results"]["healthy"])
3148 d.addCallback(_got_json_smalldir)
3150 d.addCallback(self.CHECK, "sick", "t=check")
3151 def _got_html_sick(res):
3152 self.failUnless("Not Healthy" in res, res)
3153 d.addCallback(_got_html_sick)
3154 d.addCallback(self.CHECK, "sick", "t=check&output=json")
3155 def _got_json_sick(res):
3156 r = simplejson.loads(res)
3157 self.failUnlessEqual(r["summary"],
3158 "Not Healthy: 9 shares (enc 3-of-10)")
3159 self.failIf(r["results"]["healthy"])
3160 self.failIf(r["results"]["needs-rebalancing"])
3161 self.failUnless(r["results"]["recoverable"])
3162 d.addCallback(_got_json_sick)
3164 d.addCallback(self.CHECK, "dead", "t=check")
3165 def _got_html_dead(res):
3166 self.failUnless("Not Healthy" in res, res)
3167 d.addCallback(_got_html_dead)
3168 d.addCallback(self.CHECK, "dead", "t=check&output=json")
3169 def _got_json_dead(res):
3170 r = simplejson.loads(res)
3171 self.failUnlessEqual(r["summary"],
3172 "Not Healthy: 1 shares (enc 3-of-10)")
3173 self.failIf(r["results"]["healthy"])
3174 self.failIf(r["results"]["needs-rebalancing"])
3175 self.failIf(r["results"]["recoverable"])
3176 d.addCallback(_got_json_dead)
3178 d.addCallback(self.CHECK, "corrupt", "t=check&verify=true")
3179 def _got_html_corrupt(res):
3180 self.failUnless("Not Healthy! : Unhealthy" in res, res)
3181 d.addCallback(_got_html_corrupt)
3182 d.addCallback(self.CHECK, "corrupt", "t=check&verify=true&output=json")
3183 def _got_json_corrupt(res):
3184 r = simplejson.loads(res)
3185 self.failUnless("Unhealthy: 9 shares (enc 3-of-10)" in r["summary"],
3187 self.failIf(r["results"]["healthy"])
3188 self.failUnless(r["results"]["recoverable"])
3189 self.failUnlessEqual(r["results"]["count-shares-good"], 9)
3190 self.failUnlessEqual(r["results"]["count-corrupt-shares"], 1)
3191 d.addCallback(_got_json_corrupt)
3193 d.addErrback(self.explain_web_error)
3196 def test_repair_html(self):
3197 self.basedir = "web/Grid/repair_html"
3199 c0 = self.g.clients[0]
3202 d = c0.upload(upload.Data(DATA, convergence=""))
3203 def _stash_uri(ur, which):
3204 self.uris[which] = ur.uri
3205 d.addCallback(_stash_uri, "good")
3206 d.addCallback(lambda ign:
3207 c0.upload(upload.Data(DATA+"1", convergence="")))
3208 d.addCallback(_stash_uri, "sick")
3209 d.addCallback(lambda ign:
3210 c0.upload(upload.Data(DATA+"2", convergence="")))
3211 d.addCallback(_stash_uri, "dead")
3212 def _stash_mutable_uri(n, which):
3213 self.uris[which] = n.get_uri()
3214 assert isinstance(self.uris[which], str)
3215 d.addCallback(lambda ign: c0.create_mutable_file(DATA+"3"))
3216 d.addCallback(_stash_mutable_uri, "corrupt")
3218 def _compute_fileurls(ignored):
3220 for which in self.uris:
3221 self.fileurls[which] = "uri/" + urllib.quote(self.uris[which])
3222 d.addCallback(_compute_fileurls)
3224 def _clobber_shares(ignored):
3225 good_shares = self.find_shares(self.uris["good"])
3226 self.failUnlessEqual(len(good_shares), 10)
3227 sick_shares = self.find_shares(self.uris["sick"])
3228 os.unlink(sick_shares[0][2])
3229 dead_shares = self.find_shares(self.uris["dead"])
3230 for i in range(1, 10):
3231 os.unlink(dead_shares[i][2])
3232 c_shares = self.find_shares(self.uris["corrupt"])
3233 cso = CorruptShareOptions()
3234 cso.stdout = StringIO()
3235 cso.parseOptions([c_shares[0][2]])
3237 d.addCallback(_clobber_shares)
3239 d.addCallback(self.CHECK, "good", "t=check&repair=true")
3240 def _got_html_good(res):
3241 self.failUnless("Healthy" in res, res)
3242 self.failIf("Not Healthy" in res, res)
3243 self.failUnless("No repair necessary" in res, res)
3244 d.addCallback(_got_html_good)
3246 d.addCallback(self.CHECK, "sick", "t=check&repair=true")
3247 def _got_html_sick(res):
3248 self.failUnless("Healthy : healthy" in res, res)
3249 self.failIf("Not Healthy" in res, res)
3250 self.failUnless("Repair successful" in res, res)
3251 d.addCallback(_got_html_sick)
3253 # repair of a dead file will fail, of course, but it isn't yet
3254 # clear how this should be reported. Right now it shows up as
3257 #d.addCallback(self.CHECK, "dead", "t=check&repair=true")
3258 #def _got_html_dead(res):
3260 # self.failUnless("Healthy : healthy" in res, res)
3261 # self.failIf("Not Healthy" in res, res)
3262 # self.failUnless("No repair necessary" in res, res)
3263 #d.addCallback(_got_html_dead)
3265 d.addCallback(self.CHECK, "corrupt", "t=check&verify=true&repair=true")
3266 def _got_html_corrupt(res):
3267 self.failUnless("Healthy : Healthy" in res, res)
3268 self.failIf("Not Healthy" in res, res)
3269 self.failUnless("Repair successful" in res, res)
3270 d.addCallback(_got_html_corrupt)
3272 d.addErrback(self.explain_web_error)
3275 def test_repair_json(self):
3276 self.basedir = "web/Grid/repair_json"
3278 c0 = self.g.clients[0]
3281 d = c0.upload(upload.Data(DATA+"1", convergence=""))
3282 def _stash_uri(ur, which):
3283 self.uris[which] = ur.uri
3284 d.addCallback(_stash_uri, "sick")
3286 def _compute_fileurls(ignored):
3288 for which in self.uris:
3289 self.fileurls[which] = "uri/" + urllib.quote(self.uris[which])
3290 d.addCallback(_compute_fileurls)
3292 def _clobber_shares(ignored):
3293 sick_shares = self.find_shares(self.uris["sick"])
3294 os.unlink(sick_shares[0][2])
3295 d.addCallback(_clobber_shares)
3297 d.addCallback(self.CHECK, "sick", "t=check&repair=true&output=json")
3298 def _got_json_sick(res):
3299 r = simplejson.loads(res)
3300 self.failUnlessEqual(r["repair-attempted"], True)
3301 self.failUnlessEqual(r["repair-successful"], True)
3302 self.failUnlessEqual(r["pre-repair-results"]["summary"],
3303 "Not Healthy: 9 shares (enc 3-of-10)")
3304 self.failIf(r["pre-repair-results"]["results"]["healthy"])
3305 self.failUnlessEqual(r["post-repair-results"]["summary"], "healthy")
3306 self.failUnless(r["post-repair-results"]["results"]["healthy"])
3307 d.addCallback(_got_json_sick)
3309 d.addErrback(self.explain_web_error)
3312 def test_unknown(self, immutable=False):
3313 self.basedir = "web/Grid/unknown"
3315 self.basedir = "web/Grid/unknown-immutable"
3318 c0 = self.g.clients[0]
3322 future_write_uri = "x-tahoe-crazy://I_am_from_the_future."
3323 future_read_uri = "x-tahoe-crazy-readonly://I_am_from_the_future."
3324 # the future cap format may contain slashes, which must be tolerated
3325 expected_info_url = "uri/%s?t=info" % urllib.quote(future_write_uri,
3329 name = u"future-imm"
3330 future_node = UnknownNode(None, future_read_uri, deep_immutable=True)
3331 d = c0.create_immutable_dirnode({name: (future_node, {})})
3334 future_node = UnknownNode(future_write_uri, future_read_uri)
3335 d = c0.create_dirnode()
3337 def _stash_root_and_create_file(n):
3339 self.rooturl = "uri/" + urllib.quote(n.get_uri()) + "/"
3340 self.rourl = "uri/" + urllib.quote(n.get_readonly_uri()) + "/"
3342 return self.rootnode.set_node(name, future_node)
3343 d.addCallback(_stash_root_and_create_file)
3345 # make sure directory listing tolerates unknown nodes
3346 d.addCallback(lambda ign: self.GET(self.rooturl))
3347 def _check_directory_html(res, expected_type_suffix):
3348 pattern = re.compile(r'<td>\?%s</td>[ \t\n\r]*'
3349 '<td>%s</td>' % (expected_type_suffix, str(name)),
3351 self.failUnless(re.search(pattern, res), res)
3352 # find the More Info link for name, should be relative
3353 mo = re.search(r'<a href="([^"]+)">More Info</a>', res)
3354 info_url = mo.group(1)
3355 self.failUnlessEqual(info_url, "%s?t=info" % (str(name),))
3357 d.addCallback(_check_directory_html, "-IMM")
3359 d.addCallback(_check_directory_html, "")
3361 d.addCallback(lambda ign: self.GET(self.rooturl+"?t=json"))
3362 def _check_directory_json(res, expect_rw_uri):
3363 data = simplejson.loads(res)
3364 self.failUnlessEqual(data[0], "dirnode")
3365 f = data[1]["children"][name]
3366 self.failUnlessEqual(f[0], "unknown")
3368 self.failUnlessEqual(f[1]["rw_uri"], future_write_uri)
3370 self.failIfIn("rw_uri", f[1])
3372 self.failUnlessEqual(f[1]["ro_uri"], "imm." + future_read_uri)
3374 self.failUnlessEqual(f[1]["ro_uri"], "ro." + future_read_uri)
3375 self.failUnless("metadata" in f[1])
3376 d.addCallback(_check_directory_json, expect_rw_uri=not immutable)
3378 def _check_info(res, expect_rw_uri, expect_ro_uri):
3379 self.failUnlessIn("Object Type: <span>unknown</span>", res)
3381 self.failUnlessIn(future_write_uri, res)
3383 self.failUnlessIn(future_read_uri, res)
3385 self.failIfIn(future_read_uri, res)
3386 self.failIfIn("Raw data as", res)
3387 self.failIfIn("Directory writecap", res)
3388 self.failIfIn("Checker Operations", res)
3389 self.failIfIn("Mutable File Operations", res)
3390 self.failIfIn("Directory Operations", res)
3392 # FIXME: these should have expect_rw_uri=not immutable; I don't know
3393 # why they fail. Possibly related to ticket #922.
3395 d.addCallback(lambda ign: self.GET(expected_info_url))
3396 d.addCallback(_check_info, expect_rw_uri=False, expect_ro_uri=False)
3397 d.addCallback(lambda ign: self.GET("%s%s?t=info" % (self.rooturl, str(name))))
3398 d.addCallback(_check_info, expect_rw_uri=False, expect_ro_uri=True)
3400 def _check_json(res, expect_rw_uri):
3401 data = simplejson.loads(res)
3402 self.failUnlessEqual(data[0], "unknown")
3404 self.failUnlessEqual(data[1]["rw_uri"], future_write_uri)
3406 self.failIfIn("rw_uri", data[1])
3409 self.failUnlessEqual(data[1]["ro_uri"], "imm." + future_read_uri)
3410 self.failUnlessEqual(data[1]["mutable"], False)
3412 self.failUnlessEqual(data[1]["ro_uri"], "ro." + future_read_uri)
3413 self.failUnlessEqual(data[1]["mutable"], True)
3415 self.failUnlessEqual(data[1]["ro_uri"], "ro." + future_read_uri)
3416 self.failIf("mutable" in data[1], data[1])
3418 # TODO: check metadata contents
3419 self.failUnless("metadata" in data[1])
3421 d.addCallback(lambda ign: self.GET("%s%s?t=json" % (self.rooturl, str(name))))
3422 d.addCallback(_check_json, expect_rw_uri=not immutable)
3424 # and make sure that a read-only version of the directory can be
3425 # rendered too. This version will not have future_write_uri, whether
3426 # or not future_node was immutable.
3427 d.addCallback(lambda ign: self.GET(self.rourl))
3429 d.addCallback(_check_directory_html, "-IMM")
3431 d.addCallback(_check_directory_html, "-RO")
3433 d.addCallback(lambda ign: self.GET(self.rourl+"?t=json"))
3434 d.addCallback(_check_directory_json, expect_rw_uri=False)
3436 d.addCallback(lambda ign: self.GET("%s%s?t=json" % (self.rourl, str(name))))
3437 d.addCallback(_check_json, expect_rw_uri=False)
3439 # TODO: check that getting t=info from the Info link in the ro directory
3440 # works, and does not include the writecap URI.
3443 def test_immutable_unknown(self):
3444 return self.test_unknown(immutable=True)
3446 def test_mutant_dirnodes_are_omitted(self):
3447 self.basedir = "web/Grid/mutant_dirnodes_are_omitted"
3450 c = self.g.clients[0]
3455 lonely_uri = "URI:LIT:n5xgk" # LIT for "one"
3456 mut_write_uri = "URI:SSK:vfvcbdfbszyrsaxchgevhmmlii:euw4iw7bbnkrrwpzuburbhppuxhc3gwxv26f6imekhz7zyw2ojnq"
3457 mut_read_uri = "URI:SSK-RO:e3mdrzfwhoq42hy5ubcz6rp3o4:ybyibhnp3vvwuq2vaw2ckjmesgkklfs6ghxleztqidihjyofgw7q"
3459 # This method tests mainly dirnode, but we'd have to duplicate code in order to
3460 # test the dirnode and web layers separately.
3462 # 'lonely' is a valid LIT child, 'ro' is a mutant child with an SSK-RO readcap,
3463 # and 'write-in-ro' is a mutant child with an SSK writecap in the ro_uri field.
3464 # When the directory is read, the mutants should be silently disposed of, leaving
3465 # their lonely sibling.
3466 # We don't test the case of a retrieving a cap from the encrypted rw_uri field,
3467 # because immutable directories don't have a writecap and therefore that field
3468 # isn't (and can't be) decrypted.
3469 # TODO: The field still exists in the netstring. Technically we should check what
3470 # happens if something is put there (_unpack_contents should raise ValueError),
3471 # but that can wait.
3473 lonely_child = nm.create_from_cap(lonely_uri)
3474 mutant_ro_child = nm.create_from_cap(mut_read_uri)
3475 mutant_write_in_ro_child = nm.create_from_cap(mut_write_uri)
3477 def _by_hook_or_by_crook():
3479 for n in [mutant_ro_child, mutant_write_in_ro_child]:
3480 n.is_allowed_in_immutable_directory = _by_hook_or_by_crook
3482 mutant_write_in_ro_child.get_write_uri = lambda: None
3483 mutant_write_in_ro_child.get_readonly_uri = lambda: mut_write_uri
3485 kids = {u"lonely": (lonely_child, {}),
3486 u"ro": (mutant_ro_child, {}),
3487 u"write-in-ro": (mutant_write_in_ro_child, {}),
3489 d = c.create_immutable_dirnode(kids)
3492 self.failUnless(isinstance(dn, dirnode.DirectoryNode))
3493 self.failIf(dn.is_mutable())
3494 self.failUnless(dn.is_readonly())
3495 # This checks that if we somehow ended up calling dn._decrypt_rwcapdata, it would fail.
3496 self.failIf(hasattr(dn._node, 'get_writekey'))
3498 self.failUnless("RO-IMM" in rep)
3500 self.failUnlessIn("CHK", cap.to_string())
3503 self.rooturl = "uri/" + urllib.quote(dn.get_uri()) + "/"
3504 return download_to_data(dn._node)
3505 d.addCallback(_created)
3507 def _check_data(data):
3508 # Decode the netstring representation of the directory to check that all children
3509 # are present. This is a bit of an abstraction violation, but there's not really
3510 # any other way to do it given that the real DirectoryNode._unpack_contents would
3511 # strip the mutant children out (which is what we're trying to test, later).
3514 while position < len(data):
3515 entries, position = split_netstring(data, 1, position)
3517 (name_utf8, ro_uri, rwcapdata, metadata_s), subpos = split_netstring(entry, 4)
3518 name = name_utf8.decode("utf-8")
3519 self.failUnless(rwcapdata == "")
3520 self.failUnless(name in kids)
3521 (expected_child, ign) = kids[name]
3522 self.failUnlessEqual(ro_uri, expected_child.get_readonly_uri())
3525 self.failUnlessEqual(numkids, 3)
3526 return self.rootnode.list()
3527 d.addCallback(_check_data)
3529 # Now when we use the real directory listing code, the mutants should be absent.
3530 def _check_kids(children):
3531 self.failUnlessEqual(sorted(children.keys()), [u"lonely"])
3532 lonely_node, lonely_metadata = children[u"lonely"]
3534 self.failUnlessEqual(lonely_node.get_write_uri(), None)
3535 self.failUnlessEqual(lonely_node.get_readonly_uri(), lonely_uri)
3536 d.addCallback(_check_kids)
3538 d.addCallback(lambda ign: nm.create_from_cap(self.cap.to_string()))
3539 d.addCallback(lambda n: n.list())
3540 d.addCallback(_check_kids) # again with dirnode recreated from cap
3542 # Make sure the lonely child can be listed in HTML...
3543 d.addCallback(lambda ign: self.GET(self.rooturl))
3544 def _check_html(res):
3545 self.failIfIn("URI:SSK", res)
3546 get_lonely = "".join([r'<td>FILE</td>',
3548 r'<a href="[^"]+%s[^"]+">lonely</a>' % (urllib.quote(lonely_uri),),
3550 r'\s+<td>%d</td>' % len("one"),
3552 self.failUnless(re.search(get_lonely, res), res)
3554 # find the More Info link for name, should be relative
3555 mo = re.search(r'<a href="([^"]+)">More Info</a>', res)
3556 info_url = mo.group(1)
3557 self.failUnless(info_url.endswith(urllib.quote(lonely_uri) + "?t=info"), info_url)
3558 d.addCallback(_check_html)
3561 d.addCallback(lambda ign: self.GET(self.rooturl+"?t=json"))
3562 def _check_json(res):
3563 data = simplejson.loads(res)
3564 self.failUnlessEqual(data[0], "dirnode")
3565 listed_children = data[1]["children"]
3566 self.failUnlessEqual(sorted(listed_children.keys()), [u"lonely"])
3567 ll_type, ll_data = listed_children[u"lonely"]
3568 self.failUnlessEqual(ll_type, "filenode")
3569 self.failIf("rw_uri" in ll_data)
3570 self.failUnlessEqual(ll_data["ro_uri"], lonely_uri)
3571 d.addCallback(_check_json)
3574 def test_deep_check(self):
3575 self.basedir = "web/Grid/deep_check"
3577 c0 = self.g.clients[0]
3581 d = c0.create_dirnode()
3582 def _stash_root_and_create_file(n):
3584 self.fileurls["root"] = "uri/" + urllib.quote(n.get_uri()) + "/"
3585 return n.add_file(u"good", upload.Data(DATA, convergence=""))
3586 d.addCallback(_stash_root_and_create_file)
3587 def _stash_uri(fn, which):
3588 self.uris[which] = fn.get_uri()
3590 d.addCallback(_stash_uri, "good")
3591 d.addCallback(lambda ign:
3592 self.rootnode.add_file(u"small",
3593 upload.Data("literal",
3595 d.addCallback(_stash_uri, "small")
3596 d.addCallback(lambda ign:
3597 self.rootnode.add_file(u"sick",
3598 upload.Data(DATA+"1",
3600 d.addCallback(_stash_uri, "sick")
3602 # this tests that deep-check and stream-manifest will ignore
3603 # UnknownNode instances. Hopefully this will also cover deep-stats.
3604 future_write_uri = "x-tahoe-crazy://I_am_from_the_future."
3605 future_read_uri = "x-tahoe-crazy-readonly://I_am_from_the_future."
3606 future_node = UnknownNode(future_write_uri, future_read_uri)
3607 d.addCallback(lambda ign: self.rootnode.set_node(u"future", future_node))
3609 def _clobber_shares(ignored):
3610 self.delete_shares_numbered(self.uris["sick"], [0,1])
3611 d.addCallback(_clobber_shares)
3619 d.addCallback(self.CHECK, "root", "t=stream-deep-check")
3622 units = [simplejson.loads(line)
3623 for line in res.splitlines()
3626 print "response is:", res
3627 print "undecodeable line was '%s'" % line
3629 self.failUnlessEqual(len(units), 5+1)
3630 # should be parent-first
3632 self.failUnlessEqual(u0["path"], [])
3633 self.failUnlessEqual(u0["type"], "directory")
3634 self.failUnlessEqual(u0["cap"], self.rootnode.get_uri())
3635 u0cr = u0["check-results"]
3636 self.failUnlessEqual(u0cr["results"]["count-shares-good"], 10)
3638 ugood = [u for u in units
3639 if u["type"] == "file" and u["path"] == [u"good"]][0]
3640 self.failUnlessEqual(ugood["cap"], self.uris["good"])
3641 ugoodcr = ugood["check-results"]
3642 self.failUnlessEqual(ugoodcr["results"]["count-shares-good"], 10)
3645 self.failUnlessEqual(stats["type"], "stats")
3647 self.failUnlessEqual(s["count-immutable-files"], 2)
3648 self.failUnlessEqual(s["count-literal-files"], 1)
3649 self.failUnlessEqual(s["count-directories"], 1)
3650 self.failUnlessEqual(s["count-unknown"], 1)
3651 d.addCallback(_done)
3653 d.addCallback(self.CHECK, "root", "t=stream-manifest")
3654 def _check_manifest(res):
3655 self.failUnless(res.endswith("\n"))
3656 units = [simplejson.loads(t) for t in res[:-1].split("\n")]
3657 self.failUnlessEqual(len(units), 5+1)
3658 self.failUnlessEqual(units[-1]["type"], "stats")
3660 self.failUnlessEqual(first["path"], [])
3661 self.failUnlessEqual(first["cap"], self.rootnode.get_uri())
3662 self.failUnlessEqual(first["type"], "directory")
3663 stats = units[-1]["stats"]
3664 self.failUnlessEqual(stats["count-immutable-files"], 2)
3665 self.failUnlessEqual(stats["count-literal-files"], 1)
3666 self.failUnlessEqual(stats["count-mutable-files"], 0)
3667 self.failUnlessEqual(stats["count-immutable-files"], 2)
3668 self.failUnlessEqual(stats["count-unknown"], 1)
3669 d.addCallback(_check_manifest)
3671 # now add root/subdir and root/subdir/grandchild, then make subdir
3672 # unrecoverable, then see what happens
3674 d.addCallback(lambda ign:
3675 self.rootnode.create_subdirectory(u"subdir"))
3676 d.addCallback(_stash_uri, "subdir")
3677 d.addCallback(lambda subdir_node:
3678 subdir_node.add_file(u"grandchild",
3679 upload.Data(DATA+"2",
3681 d.addCallback(_stash_uri, "grandchild")
3683 d.addCallback(lambda ign:
3684 self.delete_shares_numbered(self.uris["subdir"],
3692 # root/subdir [unrecoverable]
3693 # root/subdir/grandchild
3695 # how should a streaming-JSON API indicate fatal error?
3696 # answer: emit ERROR: instead of a JSON string
3698 d.addCallback(self.CHECK, "root", "t=stream-manifest")
3699 def _check_broken_manifest(res):
3700 lines = res.splitlines()
3702 for (i,line) in enumerate(lines)
3703 if line.startswith("ERROR:")]
3705 self.fail("no ERROR: in output: %s" % (res,))
3706 first_error = error_lines[0]
3707 error_line = lines[first_error]
3708 error_msg = lines[first_error+1:]
3709 error_msg_s = "\n".join(error_msg) + "\n"
3710 self.failUnlessIn("ERROR: UnrecoverableFileError(no recoverable versions)",
3712 self.failUnless(len(error_msg) > 2, error_msg_s) # some traceback
3713 units = [simplejson.loads(line) for line in lines[:first_error]]
3714 self.failUnlessEqual(len(units), 6) # includes subdir
3715 last_unit = units[-1]
3716 self.failUnlessEqual(last_unit["path"], ["subdir"])
3717 d.addCallback(_check_broken_manifest)
3719 d.addCallback(self.CHECK, "root", "t=stream-deep-check")
3720 def _check_broken_deepcheck(res):
3721 lines = res.splitlines()
3723 for (i,line) in enumerate(lines)
3724 if line.startswith("ERROR:")]
3726 self.fail("no ERROR: in output: %s" % (res,))
3727 first_error = error_lines[0]
3728 error_line = lines[first_error]
3729 error_msg = lines[first_error+1:]
3730 error_msg_s = "\n".join(error_msg) + "\n"
3731 self.failUnlessIn("ERROR: UnrecoverableFileError(no recoverable versions)",
3733 self.failUnless(len(error_msg) > 2, error_msg_s) # some traceback
3734 units = [simplejson.loads(line) for line in lines[:first_error]]
3735 self.failUnlessEqual(len(units), 6) # includes subdir
3736 last_unit = units[-1]
3737 self.failUnlessEqual(last_unit["path"], ["subdir"])
3738 r = last_unit["check-results"]["results"]
3739 self.failUnlessEqual(r["count-recoverable-versions"], 0)
3740 self.failUnlessEqual(r["count-shares-good"], 1)
3741 self.failUnlessEqual(r["recoverable"], False)
3742 d.addCallback(_check_broken_deepcheck)
3744 d.addErrback(self.explain_web_error)
3747 def test_deep_check_and_repair(self):
3748 self.basedir = "web/Grid/deep_check_and_repair"
3750 c0 = self.g.clients[0]
3754 d = c0.create_dirnode()
3755 def _stash_root_and_create_file(n):
3757 self.fileurls["root"] = "uri/" + urllib.quote(n.get_uri()) + "/"
3758 return n.add_file(u"good", upload.Data(DATA, convergence=""))
3759 d.addCallback(_stash_root_and_create_file)
3760 def _stash_uri(fn, which):
3761 self.uris[which] = fn.get_uri()
3762 d.addCallback(_stash_uri, "good")
3763 d.addCallback(lambda ign:
3764 self.rootnode.add_file(u"small",
3765 upload.Data("literal",
3767 d.addCallback(_stash_uri, "small")
3768 d.addCallback(lambda ign:
3769 self.rootnode.add_file(u"sick",
3770 upload.Data(DATA+"1",
3772 d.addCallback(_stash_uri, "sick")
3773 #d.addCallback(lambda ign:
3774 # self.rootnode.add_file(u"dead",
3775 # upload.Data(DATA+"2",
3777 #d.addCallback(_stash_uri, "dead")
3779 #d.addCallback(lambda ign: c0.create_mutable_file("mutable"))
3780 #d.addCallback(lambda fn: self.rootnode.set_node(u"corrupt", fn))
3781 #d.addCallback(_stash_uri, "corrupt")
3783 def _clobber_shares(ignored):
3784 good_shares = self.find_shares(self.uris["good"])
3785 self.failUnlessEqual(len(good_shares), 10)
3786 sick_shares = self.find_shares(self.uris["sick"])
3787 os.unlink(sick_shares[0][2])
3788 #dead_shares = self.find_shares(self.uris["dead"])
3789 #for i in range(1, 10):
3790 # os.unlink(dead_shares[i][2])
3792 #c_shares = self.find_shares(self.uris["corrupt"])
3793 #cso = CorruptShareOptions()
3794 #cso.stdout = StringIO()
3795 #cso.parseOptions([c_shares[0][2]])
3797 d.addCallback(_clobber_shares)
3800 # root/good CHK, 10 shares
3802 # root/sick CHK, 9 shares
3804 d.addCallback(self.CHECK, "root", "t=stream-deep-check&repair=true")
3806 units = [simplejson.loads(line)
3807 for line in res.splitlines()
3809 self.failUnlessEqual(len(units), 4+1)
3810 # should be parent-first
3812 self.failUnlessEqual(u0["path"], [])
3813 self.failUnlessEqual(u0["type"], "directory")
3814 self.failUnlessEqual(u0["cap"], self.rootnode.get_uri())
3815 u0crr = u0["check-and-repair-results"]
3816 self.failUnlessEqual(u0crr["repair-attempted"], False)
3817 self.failUnlessEqual(u0crr["pre-repair-results"]["results"]["count-shares-good"], 10)
3819 ugood = [u for u in units
3820 if u["type"] == "file" and u["path"] == [u"good"]][0]
3821 self.failUnlessEqual(ugood["cap"], self.uris["good"])
3822 ugoodcrr = ugood["check-and-repair-results"]
3823 self.failUnlessEqual(ugoodcrr["repair-attempted"], False)
3824 self.failUnlessEqual(ugoodcrr["pre-repair-results"]["results"]["count-shares-good"], 10)
3826 usick = [u for u in units
3827 if u["type"] == "file" and u["path"] == [u"sick"]][0]
3828 self.failUnlessEqual(usick["cap"], self.uris["sick"])
3829 usickcrr = usick["check-and-repair-results"]
3830 self.failUnlessEqual(usickcrr["repair-attempted"], True)
3831 self.failUnlessEqual(usickcrr["repair-successful"], True)
3832 self.failUnlessEqual(usickcrr["pre-repair-results"]["results"]["count-shares-good"], 9)
3833 self.failUnlessEqual(usickcrr["post-repair-results"]["results"]["count-shares-good"], 10)
3836 self.failUnlessEqual(stats["type"], "stats")
3838 self.failUnlessEqual(s["count-immutable-files"], 2)
3839 self.failUnlessEqual(s["count-literal-files"], 1)
3840 self.failUnlessEqual(s["count-directories"], 1)
3841 d.addCallback(_done)
3843 d.addErrback(self.explain_web_error)
3846 def _count_leases(self, ignored, which):
3847 u = self.uris[which]
3848 shares = self.find_shares(u)
3850 for shnum, serverid, fn in shares:
3851 sf = get_share_file(fn)
3852 num_leases = len(list(sf.get_leases()))
3853 lease_counts.append( (fn, num_leases) )
3856 def _assert_leasecount(self, lease_counts, expected):
3857 for (fn, num_leases) in lease_counts:
3858 if num_leases != expected:
3859 self.fail("expected %d leases, have %d, on %s" %
3860 (expected, num_leases, fn))
3862 def test_add_lease(self):
3863 self.basedir = "web/Grid/add_lease"
3864 self.set_up_grid(num_clients=2)
3865 c0 = self.g.clients[0]
3868 d = c0.upload(upload.Data(DATA, convergence=""))
3869 def _stash_uri(ur, which):
3870 self.uris[which] = ur.uri
3871 d.addCallback(_stash_uri, "one")
3872 d.addCallback(lambda ign:
3873 c0.upload(upload.Data(DATA+"1", convergence="")))
3874 d.addCallback(_stash_uri, "two")
3875 def _stash_mutable_uri(n, which):
3876 self.uris[which] = n.get_uri()
3877 assert isinstance(self.uris[which], str)
3878 d.addCallback(lambda ign: c0.create_mutable_file(DATA+"2"))
3879 d.addCallback(_stash_mutable_uri, "mutable")
3881 def _compute_fileurls(ignored):
3883 for which in self.uris:
3884 self.fileurls[which] = "uri/" + urllib.quote(self.uris[which])
3885 d.addCallback(_compute_fileurls)
3887 d.addCallback(self._count_leases, "one")
3888 d.addCallback(self._assert_leasecount, 1)
3889 d.addCallback(self._count_leases, "two")
3890 d.addCallback(self._assert_leasecount, 1)
3891 d.addCallback(self._count_leases, "mutable")
3892 d.addCallback(self._assert_leasecount, 1)
3894 d.addCallback(self.CHECK, "one", "t=check") # no add-lease
3895 def _got_html_good(res):
3896 self.failUnless("Healthy" in res, res)
3897 self.failIf("Not Healthy" in res, res)
3898 d.addCallback(_got_html_good)
3900 d.addCallback(self._count_leases, "one")
3901 d.addCallback(self._assert_leasecount, 1)
3902 d.addCallback(self._count_leases, "two")
3903 d.addCallback(self._assert_leasecount, 1)
3904 d.addCallback(self._count_leases, "mutable")
3905 d.addCallback(self._assert_leasecount, 1)
3907 # this CHECK uses the original client, which uses the same
3908 # lease-secrets, so it will just renew the original lease
3909 d.addCallback(self.CHECK, "one", "t=check&add-lease=true")
3910 d.addCallback(_got_html_good)
3912 d.addCallback(self._count_leases, "one")
3913 d.addCallback(self._assert_leasecount, 1)
3914 d.addCallback(self._count_leases, "two")
3915 d.addCallback(self._assert_leasecount, 1)
3916 d.addCallback(self._count_leases, "mutable")
3917 d.addCallback(self._assert_leasecount, 1)
3919 # this CHECK uses an alternate client, which adds a second lease
3920 d.addCallback(self.CHECK, "one", "t=check&add-lease=true", clientnum=1)
3921 d.addCallback(_got_html_good)
3923 d.addCallback(self._count_leases, "one")
3924 d.addCallback(self._assert_leasecount, 2)
3925 d.addCallback(self._count_leases, "two")
3926 d.addCallback(self._assert_leasecount, 1)
3927 d.addCallback(self._count_leases, "mutable")
3928 d.addCallback(self._assert_leasecount, 1)
3930 d.addCallback(self.CHECK, "mutable", "t=check&add-lease=true")
3931 d.addCallback(_got_html_good)
3933 d.addCallback(self._count_leases, "one")
3934 d.addCallback(self._assert_leasecount, 2)
3935 d.addCallback(self._count_leases, "two")
3936 d.addCallback(self._assert_leasecount, 1)
3937 d.addCallback(self._count_leases, "mutable")
3938 d.addCallback(self._assert_leasecount, 1)
3940 d.addCallback(self.CHECK, "mutable", "t=check&add-lease=true",
3942 d.addCallback(_got_html_good)
3944 d.addCallback(self._count_leases, "one")
3945 d.addCallback(self._assert_leasecount, 2)
3946 d.addCallback(self._count_leases, "two")
3947 d.addCallback(self._assert_leasecount, 1)
3948 d.addCallback(self._count_leases, "mutable")
3949 d.addCallback(self._assert_leasecount, 2)
3951 d.addErrback(self.explain_web_error)
3954 def test_deep_add_lease(self):
3955 self.basedir = "web/Grid/deep_add_lease"
3956 self.set_up_grid(num_clients=2)
3957 c0 = self.g.clients[0]
3961 d = c0.create_dirnode()
3962 def _stash_root_and_create_file(n):
3964 self.uris["root"] = n.get_uri()
3965 self.fileurls["root"] = "uri/" + urllib.quote(n.get_uri()) + "/"
3966 return n.add_file(u"one", upload.Data(DATA, convergence=""))
3967 d.addCallback(_stash_root_and_create_file)
3968 def _stash_uri(fn, which):
3969 self.uris[which] = fn.get_uri()
3970 d.addCallback(_stash_uri, "one")
3971 d.addCallback(lambda ign:
3972 self.rootnode.add_file(u"small",
3973 upload.Data("literal",
3975 d.addCallback(_stash_uri, "small")
3977 d.addCallback(lambda ign: c0.create_mutable_file("mutable"))
3978 d.addCallback(lambda fn: self.rootnode.set_node(u"mutable", fn))
3979 d.addCallback(_stash_uri, "mutable")
3981 d.addCallback(self.CHECK, "root", "t=stream-deep-check") # no add-lease
3983 units = [simplejson.loads(line)
3984 for line in res.splitlines()
3986 # root, one, small, mutable, stats
3987 self.failUnlessEqual(len(units), 4+1)
3988 d.addCallback(_done)
3990 d.addCallback(self._count_leases, "root")
3991 d.addCallback(self._assert_leasecount, 1)
3992 d.addCallback(self._count_leases, "one")
3993 d.addCallback(self._assert_leasecount, 1)
3994 d.addCallback(self._count_leases, "mutable")
3995 d.addCallback(self._assert_leasecount, 1)
3997 d.addCallback(self.CHECK, "root", "t=stream-deep-check&add-lease=true")
3998 d.addCallback(_done)
4000 d.addCallback(self._count_leases, "root")
4001 d.addCallback(self._assert_leasecount, 1)
4002 d.addCallback(self._count_leases, "one")
4003 d.addCallback(self._assert_leasecount, 1)
4004 d.addCallback(self._count_leases, "mutable")
4005 d.addCallback(self._assert_leasecount, 1)
4007 d.addCallback(self.CHECK, "root", "t=stream-deep-check&add-lease=true",
4009 d.addCallback(_done)
4011 d.addCallback(self._count_leases, "root")
4012 d.addCallback(self._assert_leasecount, 2)
4013 d.addCallback(self._count_leases, "one")
4014 d.addCallback(self._assert_leasecount, 2)
4015 d.addCallback(self._count_leases, "mutable")
4016 d.addCallback(self._assert_leasecount, 2)
4018 d.addErrback(self.explain_web_error)
4022 def test_exceptions(self):
4023 self.basedir = "web/Grid/exceptions"
4024 self.set_up_grid(num_clients=1, num_servers=2)
4025 c0 = self.g.clients[0]
4028 d = c0.create_dirnode()
4030 self.fileurls["root"] = "uri/" + urllib.quote(n.get_uri()) + "/"
4031 self.fileurls["imaginary"] = self.fileurls["root"] + "imaginary"
4033 d.addCallback(_stash_root)
4034 d.addCallback(lambda ign: c0.upload(upload.Data(DATA, convergence="")))
4036 self.fileurls["1share"] = "uri/" + urllib.quote(ur.uri)
4037 self.delete_shares_numbered(ur.uri, range(1,10))
4039 u = uri.from_string(ur.uri)
4040 u.key = testutil.flip_bit(u.key, 0)
4041 baduri = u.to_string()
4042 self.fileurls["0shares"] = "uri/" + urllib.quote(baduri)
4043 d.addCallback(_stash_bad)
4044 d.addCallback(lambda ign: c0.create_dirnode())
4045 def _mangle_dirnode_1share(n):
4047 url = self.fileurls["dir-1share"] = "uri/" + urllib.quote(u) + "/"
4048 self.fileurls["dir-1share-json"] = url + "?t=json"
4049 self.delete_shares_numbered(u, range(1,10))
4050 d.addCallback(_mangle_dirnode_1share)
4051 d.addCallback(lambda ign: c0.create_dirnode())
4052 def _mangle_dirnode_0share(n):
4054 url = self.fileurls["dir-0share"] = "uri/" + urllib.quote(u) + "/"
4055 self.fileurls["dir-0share-json"] = url + "?t=json"
4056 self.delete_shares_numbered(u, range(0,10))
4057 d.addCallback(_mangle_dirnode_0share)
4059 # NotEnoughSharesError should be reported sensibly, with a
4060 # text/plain explanation of the problem, and perhaps some
4061 # information on which shares *could* be found.
4063 d.addCallback(lambda ignored:
4064 self.shouldHTTPError("GET unrecoverable",
4065 410, "Gone", "NoSharesError",
4066 self.GET, self.fileurls["0shares"]))
4067 def _check_zero_shares(body):
4068 self.failIf("<html>" in body, body)
4069 body = " ".join(body.strip().split())
4070 exp = ("NoSharesError: no shares could be found. "
4071 "Zero shares usually indicates a corrupt URI, or that "
4072 "no servers were connected, but it might also indicate "
4073 "severe corruption. You should perform a filecheck on "
4074 "this object to learn more. The full error message is: "
4075 "Failed to get enough shareholders: have 0, need 3")
4076 self.failUnlessEqual(exp, body)
4077 d.addCallback(_check_zero_shares)
4080 d.addCallback(lambda ignored:
4081 self.shouldHTTPError("GET 1share",
4082 410, "Gone", "NotEnoughSharesError",
4083 self.GET, self.fileurls["1share"]))
4084 def _check_one_share(body):
4085 self.failIf("<html>" in body, body)
4086 body = " ".join(body.strip().split())
4087 exp = ("NotEnoughSharesError: This indicates that some "
4088 "servers were unavailable, or that shares have been "
4089 "lost to server departure, hard drive failure, or disk "
4090 "corruption. You should perform a filecheck on "
4091 "this object to learn more. The full error message is:"
4092 " Failed to get enough shareholders: have 1, need 3")
4093 self.failUnlessEqual(exp, body)
4094 d.addCallback(_check_one_share)
4096 d.addCallback(lambda ignored:
4097 self.shouldHTTPError("GET imaginary",
4098 404, "Not Found", None,
4099 self.GET, self.fileurls["imaginary"]))
4100 def _missing_child(body):
4101 self.failUnless("No such child: imaginary" in body, body)
4102 d.addCallback(_missing_child)
4104 d.addCallback(lambda ignored: self.GET(self.fileurls["dir-0share"]))
4105 def _check_0shares_dir_html(body):
4106 self.failUnless("<html>" in body, body)
4107 # we should see the regular page, but without the child table or
4109 body = " ".join(body.strip().split())
4110 self.failUnlessIn('href="?t=info">More info on this directory',
4112 exp = ("UnrecoverableFileError: the directory (or mutable file) "
4113 "could not be retrieved, because there were insufficient "
4114 "good shares. This might indicate that no servers were "
4115 "connected, insufficient servers were connected, the URI "
4116 "was corrupt, or that shares have been lost due to server "
4117 "departure, hard drive failure, or disk corruption. You "
4118 "should perform a filecheck on this object to learn more.")
4119 self.failUnlessIn(exp, body)
4120 self.failUnlessIn("No upload forms: directory is unreadable", body)
4121 d.addCallback(_check_0shares_dir_html)
4123 d.addCallback(lambda ignored: self.GET(self.fileurls["dir-1share"]))
4124 def _check_1shares_dir_html(body):
4125 # at some point, we'll split UnrecoverableFileError into 0-shares
4126 # and some-shares like we did for immutable files (since there
4127 # are different sorts of advice to offer in each case). For now,
4128 # they present the same way.
4129 self.failUnless("<html>" in body, body)
4130 body = " ".join(body.strip().split())
4131 self.failUnlessIn('href="?t=info">More info on this directory',
4133 exp = ("UnrecoverableFileError: the directory (or mutable file) "
4134 "could not be retrieved, because there were insufficient "
4135 "good shares. This might indicate that no servers were "
4136 "connected, insufficient servers were connected, the URI "
4137 "was corrupt, or that shares have been lost due to server "
4138 "departure, hard drive failure, or disk corruption. You "
4139 "should perform a filecheck on this object to learn more.")
4140 self.failUnlessIn(exp, body)
4141 self.failUnlessIn("No upload forms: directory is unreadable", body)
4142 d.addCallback(_check_1shares_dir_html)
4144 d.addCallback(lambda ignored:
4145 self.shouldHTTPError("GET dir-0share-json",
4146 410, "Gone", "UnrecoverableFileError",
4148 self.fileurls["dir-0share-json"]))
4149 def _check_unrecoverable_file(body):
4150 self.failIf("<html>" in body, body)
4151 body = " ".join(body.strip().split())
4152 exp = ("UnrecoverableFileError: the directory (or mutable file) "
4153 "could not be retrieved, because there were insufficient "
4154 "good shares. This might indicate that no servers were "
4155 "connected, insufficient servers were connected, the URI "
4156 "was corrupt, or that shares have been lost due to server "
4157 "departure, hard drive failure, or disk corruption. You "
4158 "should perform a filecheck on this object to learn more.")
4159 self.failUnlessEqual(exp, body)
4160 d.addCallback(_check_unrecoverable_file)
4162 d.addCallback(lambda ignored:
4163 self.shouldHTTPError("GET dir-1share-json",
4164 410, "Gone", "UnrecoverableFileError",
4166 self.fileurls["dir-1share-json"]))
4167 d.addCallback(_check_unrecoverable_file)
4169 d.addCallback(lambda ignored:
4170 self.shouldHTTPError("GET imaginary",
4171 404, "Not Found", None,
4172 self.GET, self.fileurls["imaginary"]))
4174 # attach a webapi child that throws a random error, to test how it
4176 w = c0.getServiceNamed("webish")
4177 w.root.putChild("ERRORBOOM", ErrorBoom())
4179 # "Accept: */*" : should get a text/html stack trace
4180 # "Accept: text/plain" : should get a text/plain stack trace
4181 # "Accept: text/plain, application/octet-stream" : text/plain (CLI)
4182 # no Accept header: should get a text/html stack trace
4184 d.addCallback(lambda ignored:
4185 self.shouldHTTPError("GET errorboom_html",
4186 500, "Internal Server Error", None,
4187 self.GET, "ERRORBOOM",
4188 headers={"accept": ["*/*"]}))
4189 def _internal_error_html1(body):
4190 self.failUnless("<html>" in body, "expected HTML, not '%s'" % body)
4191 d.addCallback(_internal_error_html1)
4193 d.addCallback(lambda ignored:
4194 self.shouldHTTPError("GET errorboom_text",
4195 500, "Internal Server Error", None,
4196 self.GET, "ERRORBOOM",
4197 headers={"accept": ["text/plain"]}))
4198 def _internal_error_text2(body):
4199 self.failIf("<html>" in body, body)
4200 self.failUnless(body.startswith("Traceback "), body)
4201 d.addCallback(_internal_error_text2)
4203 CLI_accepts = "text/plain, application/octet-stream"
4204 d.addCallback(lambda ignored:
4205 self.shouldHTTPError("GET errorboom_text",
4206 500, "Internal Server Error", None,
4207 self.GET, "ERRORBOOM",
4208 headers={"accept": [CLI_accepts]}))
4209 def _internal_error_text3(body):
4210 self.failIf("<html>" in body, body)
4211 self.failUnless(body.startswith("Traceback "), body)
4212 d.addCallback(_internal_error_text3)
4214 d.addCallback(lambda ignored:
4215 self.shouldHTTPError("GET errorboom_text",
4216 500, "Internal Server Error", None,
4217 self.GET, "ERRORBOOM"))
4218 def _internal_error_html4(body):
4219 self.failUnless("<html>" in body, "expected HTML, not '%s'" % body)
4220 d.addCallback(_internal_error_html4)
4222 def _flush_errors(res):
4223 # Trial: please ignore the CompletelyUnhandledError in the logs
4224 self.flushLoggedErrors(CompletelyUnhandledError)
4226 d.addBoth(_flush_errors)
4230 class CompletelyUnhandledError(Exception):
4232 class ErrorBoom(rend.Page):
4233 def beforeRender(self, ctx):
4234 raise CompletelyUnhandledError("whoops")