1 import os.path, re, urllib
3 from StringIO import StringIO
4 from twisted.application import service
5 from twisted.trial import unittest
6 from twisted.internet import defer, reactor
7 from twisted.internet.task import Clock
8 from twisted.web import client, error, http
9 from twisted.python import failure, log
10 from nevow import rend
11 from allmydata import interfaces, uri, webish, dirnode
12 from allmydata.storage.shares import get_share_file
13 from allmydata.storage_client import StorageFarmBroker
14 from allmydata.immutable import upload, download
15 from allmydata.dirnode import DirectoryNode
16 from allmydata.nodemaker import NodeMaker
17 from allmydata.unknown import UnknownNode
18 from allmydata.web import status, common
19 from allmydata.scripts.debug import CorruptShareOptions, corrupt_share
20 from allmydata.util import fileutil, base32
21 from allmydata.util.consumer import download_to_data
22 from allmydata.util.netstring import split_netstring
23 from allmydata.test.common import FakeCHKFileNode, FakeMutableFileNode, \
24 create_chk_filenode, WebErrorMixin, ShouldFailMixin, make_mutable_file_uri
25 from allmydata.interfaces import IMutableFileNode
26 from allmydata.mutable import servermap, publish, retrieve
27 import allmydata.test.common_util as testutil
28 from allmydata.test.no_network import GridTestMixin
29 from allmydata.test.common_web import HTTPClientGETFactory, \
31 from allmydata.client import Client, SecretHolder
33 # create a fake uploader/downloader, and a couple of fake dirnodes, then
34 # create a webserver that works against them
36 timeout = 480 # Most of these take longer than 240 seconds on Francois's arm box.
38 unknown_rwcap = "lafs://from_the_future"
39 unknown_rocap = "ro.lafs://readonly_from_the_future"
40 unknown_immcap = "imm.lafs://immutable_from_the_future"
42 class FakeStatsProvider:
44 stats = {'stats': {}, 'counters': {}}
47 class FakeNodeMaker(NodeMaker):
48 def _create_lit(self, cap):
49 return FakeCHKFileNode(cap)
50 def _create_immutable(self, cap):
51 return FakeCHKFileNode(cap)
52 def _create_mutable(self, cap):
53 return FakeMutableFileNode(None, None, None, None).init_from_cap(cap)
54 def create_mutable_file(self, contents="", keysize=None):
55 n = FakeMutableFileNode(None, None, None, None)
56 return n.create(contents)
58 class FakeUploader(service.Service):
60 def upload(self, uploadable, history=None):
61 d = uploadable.get_size()
62 d.addCallback(lambda size: uploadable.read(size))
65 n = create_chk_filenode(data)
66 results = upload.UploadResults()
67 results.uri = n.get_uri()
69 d.addCallback(_got_data)
71 def get_helper_info(self):
75 _all_upload_status = [upload.UploadStatus()]
76 _all_download_status = [download.DownloadStatus()]
77 _all_mapupdate_statuses = [servermap.UpdateStatus()]
78 _all_publish_statuses = [publish.PublishStatus()]
79 _all_retrieve_statuses = [retrieve.RetrieveStatus()]
81 def list_all_upload_statuses(self):
82 return self._all_upload_status
83 def list_all_download_statuses(self):
84 return self._all_download_status
85 def list_all_mapupdate_statuses(self):
86 return self._all_mapupdate_statuses
87 def list_all_publish_statuses(self):
88 return self._all_publish_statuses
89 def list_all_retrieve_statuses(self):
90 return self._all_retrieve_statuses
91 def list_all_helper_statuses(self):
94 class FakeClient(Client):
96 # don't upcall to Client.__init__, since we only want to initialize a
98 service.MultiService.__init__(self)
99 self.nodeid = "fake_nodeid"
100 self.nickname = "fake_nickname"
101 self.introducer_furl = "None"
102 self.stats_provider = FakeStatsProvider()
103 self._secret_holder = SecretHolder("lease secret", "convergence secret")
105 self.convergence = "some random string"
106 self.storage_broker = StorageFarmBroker(None, permute_peers=True)
107 self.introducer_client = None
108 self.history = FakeHistory()
109 self.uploader = FakeUploader()
110 self.uploader.setServiceParent(self)
111 self.nodemaker = FakeNodeMaker(None, self._secret_holder, None,
112 self.uploader, None, None,
115 def startService(self):
116 return service.MultiService.startService(self)
117 def stopService(self):
118 return service.MultiService.stopService(self)
120 MUTABLE_SIZELIMIT = FakeMutableFileNode.MUTABLE_SIZELIMIT
122 class WebMixin(object):
124 self.s = FakeClient()
125 self.s.startService()
126 self.staticdir = self.mktemp()
128 self.ws = webish.WebishServer(self.s, "0", staticdir=self.staticdir,
130 self.ws.setServiceParent(self.s)
131 self.webish_port = port = self.ws.listener._port.getHost().port
132 self.webish_url = "http://localhost:%d" % port
134 l = [ self.s.create_dirnode() for x in range(6) ]
135 d = defer.DeferredList(l)
137 self.public_root = res[0][1]
138 assert interfaces.IDirectoryNode.providedBy(self.public_root), res
139 self.public_url = "/uri/" + self.public_root.get_uri()
140 self.private_root = res[1][1]
144 self._foo_uri = foo.get_uri()
145 self._foo_readonly_uri = foo.get_readonly_uri()
146 self._foo_verifycap = foo.get_verify_cap().to_string()
147 # NOTE: we ignore the deferred on all set_uri() calls, because we
148 # know the fake nodes do these synchronously
149 self.public_root.set_uri(u"foo", foo.get_uri(),
150 foo.get_readonly_uri())
152 self.BAR_CONTENTS, n, self._bar_txt_uri = self.makefile(0)
153 foo.set_uri(u"bar.txt", self._bar_txt_uri, self._bar_txt_uri)
154 self._bar_txt_verifycap = n.get_verify_cap().to_string()
156 foo.set_uri(u"empty", res[3][1].get_uri(),
157 res[3][1].get_readonly_uri())
158 sub_uri = res[4][1].get_uri()
159 self._sub_uri = sub_uri
160 foo.set_uri(u"sub", sub_uri, sub_uri)
161 sub = self.s.create_node_from_uri(sub_uri)
163 _ign, n, blocking_uri = self.makefile(1)
164 foo.set_uri(u"blockingfile", blocking_uri, blocking_uri)
166 unicode_filename = u"n\u00fc.txt" # n u-umlaut . t x t
167 # ok, unicode calls it LATIN SMALL LETTER U WITH DIAERESIS but I
168 # still think of it as an umlaut
169 foo.set_uri(unicode_filename, self._bar_txt_uri, self._bar_txt_uri)
171 _ign, n, baz_file = self.makefile(2)
172 self._baz_file_uri = baz_file
173 sub.set_uri(u"baz.txt", baz_file, baz_file)
175 _ign, n, self._bad_file_uri = self.makefile(3)
176 # this uri should not be downloadable
177 del FakeCHKFileNode.all_contents[self._bad_file_uri]
180 self.public_root.set_uri(u"reedownlee", rodir.get_readonly_uri(),
181 rodir.get_readonly_uri())
182 rodir.set_uri(u"nor", baz_file, baz_file)
187 # public/foo/blockingfile
190 # public/foo/sub/baz.txt
192 # public/reedownlee/nor
193 self.NEWFILE_CONTENTS = "newfile contents\n"
195 return foo.get_metadata_for(u"bar.txt")
197 def _got_metadata(metadata):
198 self._bar_txt_metadata = metadata
199 d.addCallback(_got_metadata)
202 def makefile(self, number):
203 contents = "contents of file %s\n" % number
204 n = create_chk_filenode(contents)
205 return contents, n, n.get_uri()
208 return self.s.stopService()
210 def failUnlessIsBarDotTxt(self, res):
211 self.failUnlessEqual(res, self.BAR_CONTENTS, res)
213 def failUnlessIsBarJSON(self, res):
214 data = simplejson.loads(res)
215 self.failUnless(isinstance(data, list))
216 self.failUnlessEqual(data[0], u"filenode")
217 self.failUnless(isinstance(data[1], dict))
218 self.failIf(data[1]["mutable"])
219 self.failIf("rw_uri" in data[1]) # immutable
220 self.failUnlessEqual(data[1]["ro_uri"], self._bar_txt_uri)
221 self.failUnlessEqual(data[1]["verify_uri"], self._bar_txt_verifycap)
222 self.failUnlessEqual(data[1]["size"], len(self.BAR_CONTENTS))
224 def failUnlessIsFooJSON(self, res):
225 data = simplejson.loads(res)
226 self.failUnless(isinstance(data, list))
227 self.failUnlessEqual(data[0], "dirnode", res)
228 self.failUnless(isinstance(data[1], dict))
229 self.failUnless(data[1]["mutable"])
230 self.failUnless("rw_uri" in data[1]) # mutable
231 self.failUnlessEqual(data[1]["rw_uri"], self._foo_uri)
232 self.failUnlessEqual(data[1]["ro_uri"], self._foo_readonly_uri)
233 self.failUnlessEqual(data[1]["verify_uri"], self._foo_verifycap)
235 kidnames = sorted([unicode(n) for n in data[1]["children"]])
236 self.failUnlessEqual(kidnames,
237 [u"bar.txt", u"blockingfile", u"empty",
238 u"n\u00fc.txt", u"sub"])
239 kids = dict( [(unicode(name),value)
241 in data[1]["children"].iteritems()] )
242 self.failUnlessEqual(kids[u"sub"][0], "dirnode")
243 self.failUnlessIn("metadata", kids[u"sub"][1])
244 self.failUnlessIn("tahoe", kids[u"sub"][1]["metadata"])
245 tahoe_md = kids[u"sub"][1]["metadata"]["tahoe"]
246 self.failUnlessIn("linkcrtime", tahoe_md)
247 self.failUnlessIn("linkmotime", tahoe_md)
248 self.failUnlessEqual(kids[u"bar.txt"][0], "filenode")
249 self.failUnlessEqual(kids[u"bar.txt"][1]["size"], len(self.BAR_CONTENTS))
250 self.failUnlessEqual(kids[u"bar.txt"][1]["ro_uri"], self._bar_txt_uri)
251 self.failUnlessEqual(kids[u"bar.txt"][1]["verify_uri"],
252 self._bar_txt_verifycap)
253 self.failUnlessIn("metadata", kids[u"bar.txt"][1])
254 self.failUnlessIn("tahoe", kids[u"bar.txt"][1]["metadata"])
255 self.failUnlessEqual(kids[u"bar.txt"][1]["metadata"]["tahoe"]["linkcrtime"],
256 self._bar_txt_metadata["tahoe"]["linkcrtime"])
257 self.failUnlessEqual(kids[u"n\u00fc.txt"][1]["ro_uri"],
260 def GET(self, urlpath, followRedirect=False, return_response=False,
262 # if return_response=True, this fires with (data, statuscode,
263 # respheaders) instead of just data.
264 assert not isinstance(urlpath, unicode)
265 url = self.webish_url + urlpath
266 factory = HTTPClientGETFactory(url, method="GET",
267 followRedirect=followRedirect, **kwargs)
268 reactor.connectTCP("localhost", self.webish_port, factory)
271 return (data, factory.status, factory.response_headers)
273 d.addCallback(_got_data)
274 return factory.deferred
276 def HEAD(self, urlpath, return_response=False, **kwargs):
277 # this requires some surgery, because twisted.web.client doesn't want
278 # to give us back the response headers.
279 factory = HTTPClientHEADFactory(urlpath, method="HEAD", **kwargs)
280 reactor.connectTCP("localhost", self.webish_port, factory)
283 return (data, factory.status, factory.response_headers)
285 d.addCallback(_got_data)
286 return factory.deferred
288 def PUT(self, urlpath, data, **kwargs):
289 url = self.webish_url + urlpath
290 return client.getPage(url, method="PUT", postdata=data, **kwargs)
292 def DELETE(self, urlpath):
293 url = self.webish_url + urlpath
294 return client.getPage(url, method="DELETE")
296 def POST(self, urlpath, followRedirect=False, **fields):
297 sepbase = "boogabooga"
301 form.append('Content-Disposition: form-data; name="_charset"')
305 for name, value in fields.iteritems():
306 if isinstance(value, tuple):
307 filename, value = value
308 form.append('Content-Disposition: form-data; name="%s"; '
309 'filename="%s"' % (name, filename.encode("utf-8")))
311 form.append('Content-Disposition: form-data; name="%s"' % name)
313 if isinstance(value, unicode):
314 value = value.encode("utf-8")
317 assert isinstance(value, str)
324 body = "\r\n".join(form) + "\r\n"
325 headers["content-type"] = "multipart/form-data; boundary=%s" % sepbase
326 return self.POST2(urlpath, body, headers, followRedirect)
328 def POST2(self, urlpath, body="", headers={}, followRedirect=False):
329 url = self.webish_url + urlpath
330 return client.getPage(url, method="POST", postdata=body,
331 headers=headers, followRedirect=followRedirect)
333 def shouldFail(self, res, expected_failure, which,
334 substring=None, response_substring=None):
335 if isinstance(res, failure.Failure):
336 res.trap(expected_failure)
338 self.failUnless(substring in str(res),
339 "substring '%s' not in '%s'"
340 % (substring, str(res)))
341 if response_substring:
342 self.failUnless(response_substring in res.value.response,
343 "response substring '%s' not in '%s'"
344 % (response_substring, res.value.response))
346 self.fail("%s was supposed to raise %s, not get '%s'" %
347 (which, expected_failure, res))
349 def shouldFail2(self, expected_failure, which, substring,
351 callable, *args, **kwargs):
352 assert substring is None or isinstance(substring, str)
353 assert response_substring is None or isinstance(response_substring, str)
354 d = defer.maybeDeferred(callable, *args, **kwargs)
356 if isinstance(res, failure.Failure):
357 res.trap(expected_failure)
359 self.failUnless(substring in str(res),
360 "%s: substring '%s' not in '%s'"
361 % (which, substring, str(res)))
362 if response_substring:
363 self.failUnless(response_substring in res.value.response,
364 "%s: response substring '%s' not in '%s'"
366 response_substring, res.value.response))
368 self.fail("%s was supposed to raise %s, not get '%s'" %
369 (which, expected_failure, res))
373 def should404(self, res, which):
374 if isinstance(res, failure.Failure):
375 res.trap(error.Error)
376 self.failUnlessEqual(res.value.status, "404")
378 self.fail("%s was supposed to Error(404), not get '%s'" %
381 def should302(self, res, which):
382 if isinstance(res, failure.Failure):
383 res.trap(error.Error)
384 self.failUnlessEqual(res.value.status, "302")
386 self.fail("%s was supposed to Error(302), not get '%s'" %
390 class Web(WebMixin, WebErrorMixin, testutil.StallMixin, unittest.TestCase):
391 def test_create(self):
394 def test_welcome(self):
397 self.failUnless('Welcome To Tahoe-LAFS' in res, res)
399 self.s.basedir = 'web/test_welcome'
400 fileutil.make_dirs("web/test_welcome")
401 fileutil.make_dirs("web/test_welcome/private")
403 d.addCallback(_check)
406 def test_provisioning(self):
407 d = self.GET("/provisioning/")
409 self.failUnless('Provisioning Tool' in res)
410 fields = {'filled': True,
411 "num_users": int(50e3),
412 "files_per_user": 1000,
413 "space_per_user": int(1e9),
414 "sharing_ratio": 1.0,
415 "encoding_parameters": "3-of-10-5",
417 "ownership_mode": "A",
418 "download_rate": 100,
423 return self.POST("/provisioning/", **fields)
425 d.addCallback(_check)
427 self.failUnless('Provisioning Tool' in res)
428 self.failUnless("Share space consumed: 167.01TB" in res)
430 fields = {'filled': True,
431 "num_users": int(50e6),
432 "files_per_user": 1000,
433 "space_per_user": int(5e9),
434 "sharing_ratio": 1.0,
435 "encoding_parameters": "25-of-100-50",
436 "num_servers": 30000,
437 "ownership_mode": "E",
438 "drive_failure_model": "U",
440 "download_rate": 1000,
445 return self.POST("/provisioning/", **fields)
446 d.addCallback(_check2)
448 self.failUnless("Share space consumed: huge!" in res)
449 fields = {'filled': True}
450 return self.POST("/provisioning/", **fields)
451 d.addCallback(_check3)
453 self.failUnless("Share space consumed:" in res)
454 d.addCallback(_check4)
457 def test_reliability_tool(self):
459 from allmydata import reliability
460 _hush_pyflakes = reliability
463 raise unittest.SkipTest("reliability tool requires NumPy")
465 d = self.GET("/reliability/")
467 self.failUnless('Reliability Tool' in res)
468 fields = {'drive_lifetime': "8Y",
473 "check_period": "1M",
474 "report_period": "3M",
477 return self.POST("/reliability/", **fields)
479 d.addCallback(_check)
481 self.failUnless('Reliability Tool' in res)
482 r = r'Probability of loss \(no maintenance\):\s+<span>0.033591'
483 self.failUnless(re.search(r, res), res)
484 d.addCallback(_check2)
487 def test_status(self):
488 h = self.s.get_history()
489 dl_num = h.list_all_download_statuses()[0].get_counter()
490 ul_num = h.list_all_upload_statuses()[0].get_counter()
491 mu_num = h.list_all_mapupdate_statuses()[0].get_counter()
492 pub_num = h.list_all_publish_statuses()[0].get_counter()
493 ret_num = h.list_all_retrieve_statuses()[0].get_counter()
494 d = self.GET("/status", followRedirect=True)
496 self.failUnless('Upload and Download Status' in res, res)
497 self.failUnless('"down-%d"' % dl_num in res, res)
498 self.failUnless('"up-%d"' % ul_num in res, res)
499 self.failUnless('"mapupdate-%d"' % mu_num in res, res)
500 self.failUnless('"publish-%d"' % pub_num in res, res)
501 self.failUnless('"retrieve-%d"' % ret_num in res, res)
502 d.addCallback(_check)
503 d.addCallback(lambda res: self.GET("/status/?t=json"))
504 def _check_json(res):
505 data = simplejson.loads(res)
506 self.failUnless(isinstance(data, dict))
507 #active = data["active"]
508 # TODO: test more. We need a way to fake an active operation
510 d.addCallback(_check_json)
512 d.addCallback(lambda res: self.GET("/status/down-%d" % dl_num))
514 self.failUnless("File Download Status" in res, res)
515 d.addCallback(_check_dl)
516 d.addCallback(lambda res: self.GET("/status/up-%d" % ul_num))
518 self.failUnless("File Upload Status" in res, res)
519 d.addCallback(_check_ul)
520 d.addCallback(lambda res: self.GET("/status/mapupdate-%d" % mu_num))
521 def _check_mapupdate(res):
522 self.failUnless("Mutable File Servermap Update Status" in res, res)
523 d.addCallback(_check_mapupdate)
524 d.addCallback(lambda res: self.GET("/status/publish-%d" % pub_num))
525 def _check_publish(res):
526 self.failUnless("Mutable File Publish Status" in res, res)
527 d.addCallback(_check_publish)
528 d.addCallback(lambda res: self.GET("/status/retrieve-%d" % ret_num))
529 def _check_retrieve(res):
530 self.failUnless("Mutable File Retrieve Status" in res, res)
531 d.addCallback(_check_retrieve)
535 def test_status_numbers(self):
536 drrm = status.DownloadResultsRendererMixin()
537 self.failUnlessEqual(drrm.render_time(None, None), "")
538 self.failUnlessEqual(drrm.render_time(None, 2.5), "2.50s")
539 self.failUnlessEqual(drrm.render_time(None, 0.25), "250ms")
540 self.failUnlessEqual(drrm.render_time(None, 0.0021), "2.1ms")
541 self.failUnlessEqual(drrm.render_time(None, 0.000123), "123us")
542 self.failUnlessEqual(drrm.render_rate(None, None), "")
543 self.failUnlessEqual(drrm.render_rate(None, 2500000), "2.50MBps")
544 self.failUnlessEqual(drrm.render_rate(None, 30100), "30.1kBps")
545 self.failUnlessEqual(drrm.render_rate(None, 123), "123Bps")
547 urrm = status.UploadResultsRendererMixin()
548 self.failUnlessEqual(urrm.render_time(None, None), "")
549 self.failUnlessEqual(urrm.render_time(None, 2.5), "2.50s")
550 self.failUnlessEqual(urrm.render_time(None, 0.25), "250ms")
551 self.failUnlessEqual(urrm.render_time(None, 0.0021), "2.1ms")
552 self.failUnlessEqual(urrm.render_time(None, 0.000123), "123us")
553 self.failUnlessEqual(urrm.render_rate(None, None), "")
554 self.failUnlessEqual(urrm.render_rate(None, 2500000), "2.50MBps")
555 self.failUnlessEqual(urrm.render_rate(None, 30100), "30.1kBps")
556 self.failUnlessEqual(urrm.render_rate(None, 123), "123Bps")
558 def test_GET_FILEURL(self):
559 d = self.GET(self.public_url + "/foo/bar.txt")
560 d.addCallback(self.failUnlessIsBarDotTxt)
563 def test_GET_FILEURL_range(self):
564 headers = {"range": "bytes=1-10"}
565 d = self.GET(self.public_url + "/foo/bar.txt", headers=headers,
566 return_response=True)
567 def _got((res, status, headers)):
568 self.failUnlessEqual(int(status), 206)
569 self.failUnless(headers.has_key("content-range"))
570 self.failUnlessEqual(headers["content-range"][0],
571 "bytes 1-10/%d" % len(self.BAR_CONTENTS))
572 self.failUnlessEqual(res, self.BAR_CONTENTS[1:11])
576 def test_GET_FILEURL_partial_range(self):
577 headers = {"range": "bytes=5-"}
578 length = len(self.BAR_CONTENTS)
579 d = self.GET(self.public_url + "/foo/bar.txt", headers=headers,
580 return_response=True)
581 def _got((res, status, headers)):
582 self.failUnlessEqual(int(status), 206)
583 self.failUnless(headers.has_key("content-range"))
584 self.failUnlessEqual(headers["content-range"][0],
585 "bytes 5-%d/%d" % (length-1, length))
586 self.failUnlessEqual(res, self.BAR_CONTENTS[5:])
590 def test_GET_FILEURL_partial_end_range(self):
591 headers = {"range": "bytes=-5"}
592 length = len(self.BAR_CONTENTS)
593 d = self.GET(self.public_url + "/foo/bar.txt", headers=headers,
594 return_response=True)
595 def _got((res, status, headers)):
596 self.failUnlessEqual(int(status), 206)
597 self.failUnless(headers.has_key("content-range"))
598 self.failUnlessEqual(headers["content-range"][0],
599 "bytes %d-%d/%d" % (length-5, length-1, length))
600 self.failUnlessEqual(res, self.BAR_CONTENTS[-5:])
604 def test_GET_FILEURL_partial_range_overrun(self):
605 headers = {"range": "bytes=100-200"}
606 d = self.shouldFail2(error.Error, "test_GET_FILEURL_range_overrun",
607 "416 Requested Range not satisfiable",
608 "First beyond end of file",
609 self.GET, self.public_url + "/foo/bar.txt",
613 def test_HEAD_FILEURL_range(self):
614 headers = {"range": "bytes=1-10"}
615 d = self.HEAD(self.public_url + "/foo/bar.txt", headers=headers,
616 return_response=True)
617 def _got((res, status, headers)):
618 self.failUnlessEqual(res, "")
619 self.failUnlessEqual(int(status), 206)
620 self.failUnless(headers.has_key("content-range"))
621 self.failUnlessEqual(headers["content-range"][0],
622 "bytes 1-10/%d" % len(self.BAR_CONTENTS))
626 def test_HEAD_FILEURL_partial_range(self):
627 headers = {"range": "bytes=5-"}
628 length = len(self.BAR_CONTENTS)
629 d = self.HEAD(self.public_url + "/foo/bar.txt", headers=headers,
630 return_response=True)
631 def _got((res, status, headers)):
632 self.failUnlessEqual(int(status), 206)
633 self.failUnless(headers.has_key("content-range"))
634 self.failUnlessEqual(headers["content-range"][0],
635 "bytes 5-%d/%d" % (length-1, length))
639 def test_HEAD_FILEURL_partial_end_range(self):
640 headers = {"range": "bytes=-5"}
641 length = len(self.BAR_CONTENTS)
642 d = self.HEAD(self.public_url + "/foo/bar.txt", headers=headers,
643 return_response=True)
644 def _got((res, status, headers)):
645 self.failUnlessEqual(int(status), 206)
646 self.failUnless(headers.has_key("content-range"))
647 self.failUnlessEqual(headers["content-range"][0],
648 "bytes %d-%d/%d" % (length-5, length-1, length))
652 def test_HEAD_FILEURL_partial_range_overrun(self):
653 headers = {"range": "bytes=100-200"}
654 d = self.shouldFail2(error.Error, "test_HEAD_FILEURL_range_overrun",
655 "416 Requested Range not satisfiable",
657 self.HEAD, self.public_url + "/foo/bar.txt",
661 def test_GET_FILEURL_range_bad(self):
662 headers = {"range": "BOGUS=fizbop-quarnak"}
663 d = self.GET(self.public_url + "/foo/bar.txt", headers=headers,
664 return_response=True)
665 def _got((res, status, headers)):
666 self.failUnlessEqual(int(status), 200)
667 self.failUnless(not headers.has_key("content-range"))
668 self.failUnlessEqual(res, self.BAR_CONTENTS)
672 def test_HEAD_FILEURL(self):
673 d = self.HEAD(self.public_url + "/foo/bar.txt", return_response=True)
674 def _got((res, status, headers)):
675 self.failUnlessEqual(res, "")
676 self.failUnlessEqual(headers["content-length"][0],
677 str(len(self.BAR_CONTENTS)))
678 self.failUnlessEqual(headers["content-type"], ["text/plain"])
682 def test_GET_FILEURL_named(self):
683 base = "/file/%s" % urllib.quote(self._bar_txt_uri)
684 base2 = "/named/%s" % urllib.quote(self._bar_txt_uri)
685 d = self.GET(base + "/@@name=/blah.txt")
686 d.addCallback(self.failUnlessIsBarDotTxt)
687 d.addCallback(lambda res: self.GET(base + "/blah.txt"))
688 d.addCallback(self.failUnlessIsBarDotTxt)
689 d.addCallback(lambda res: self.GET(base + "/ignore/lots/blah.txt"))
690 d.addCallback(self.failUnlessIsBarDotTxt)
691 d.addCallback(lambda res: self.GET(base2 + "/@@name=/blah.txt"))
692 d.addCallback(self.failUnlessIsBarDotTxt)
693 save_url = base + "?save=true&filename=blah.txt"
694 d.addCallback(lambda res: self.GET(save_url))
695 d.addCallback(self.failUnlessIsBarDotTxt) # TODO: check headers
696 u_filename = u"n\u00e9wer.txt" # n e-acute w e r . t x t
697 u_fn_e = urllib.quote(u_filename.encode("utf-8"))
698 u_url = base + "?save=true&filename=" + u_fn_e
699 d.addCallback(lambda res: self.GET(u_url))
700 d.addCallback(self.failUnlessIsBarDotTxt) # TODO: check headers
703 def test_PUT_FILEURL_named_bad(self):
704 base = "/file/%s" % urllib.quote(self._bar_txt_uri)
705 d = self.shouldFail2(error.Error, "test_PUT_FILEURL_named_bad",
707 "/file can only be used with GET or HEAD",
708 self.PUT, base + "/@@name=/blah.txt", "")
711 def test_GET_DIRURL_named_bad(self):
712 base = "/file/%s" % urllib.quote(self._foo_uri)
713 d = self.shouldFail2(error.Error, "test_PUT_DIRURL_named_bad",
716 self.GET, base + "/@@name=/blah.txt")
719 def test_GET_slash_file_bad(self):
720 d = self.shouldFail2(error.Error, "test_GET_slash_file_bad",
722 "/file must be followed by a file-cap and a name",
726 def test_GET_unhandled_URI_named(self):
727 contents, n, newuri = self.makefile(12)
728 verifier_cap = n.get_verify_cap().to_string()
729 base = "/file/%s" % urllib.quote(verifier_cap)
730 # client.create_node_from_uri() can't handle verify-caps
731 d = self.shouldFail2(error.Error, "GET_unhandled_URI_named",
732 "400 Bad Request", "is not a file-cap",
736 def test_GET_unhandled_URI(self):
737 contents, n, newuri = self.makefile(12)
738 verifier_cap = n.get_verify_cap().to_string()
739 base = "/uri/%s" % urllib.quote(verifier_cap)
740 # client.create_node_from_uri() can't handle verify-caps
741 d = self.shouldFail2(error.Error, "test_GET_unhandled_URI",
743 "GET unknown URI type: can only do t=info",
747 def test_GET_FILE_URI(self):
748 base = "/uri/%s" % urllib.quote(self._bar_txt_uri)
750 d.addCallback(self.failUnlessIsBarDotTxt)
753 def test_GET_FILE_URI_badchild(self):
754 base = "/uri/%s/boguschild" % urllib.quote(self._bar_txt_uri)
755 errmsg = "Files have no children, certainly not named 'boguschild'"
756 d = self.shouldFail2(error.Error, "test_GET_FILE_URI_badchild",
757 "400 Bad Request", errmsg,
761 def test_PUT_FILE_URI_badchild(self):
762 base = "/uri/%s/boguschild" % urllib.quote(self._bar_txt_uri)
763 errmsg = "Cannot create directory 'boguschild', because its parent is a file, not a directory"
764 d = self.shouldFail2(error.Error, "test_GET_FILE_URI_badchild",
765 "400 Bad Request", errmsg,
769 # TODO: version of this with a Unicode filename
770 def test_GET_FILEURL_save(self):
771 d = self.GET(self.public_url + "/foo/bar.txt?filename=bar.txt&save=true",
772 return_response=True)
773 def _got((res, statuscode, headers)):
774 content_disposition = headers["content-disposition"][0]
775 self.failUnless(content_disposition == 'attachment; filename="bar.txt"', content_disposition)
776 self.failUnlessIsBarDotTxt(res)
780 def test_GET_FILEURL_missing(self):
781 d = self.GET(self.public_url + "/foo/missing")
782 d.addBoth(self.should404, "test_GET_FILEURL_missing")
785 def test_PUT_overwrite_only_files(self):
786 # create a directory, put a file in that directory.
787 contents, n, filecap = self.makefile(8)
788 d = self.PUT(self.public_url + "/foo/dir?t=mkdir", "")
789 d.addCallback(lambda res:
790 self.PUT(self.public_url + "/foo/dir/file1.txt",
791 self.NEWFILE_CONTENTS))
792 # try to overwrite the file with replace=only-files
794 d.addCallback(lambda res:
795 self.PUT(self.public_url + "/foo/dir/file1.txt?t=uri&replace=only-files",
797 d.addCallback(lambda res:
798 self.shouldFail2(error.Error, "PUT_bad_t", "409 Conflict",
799 "There was already a child by that name, and you asked me "
801 self.PUT, self.public_url + "/foo/dir?t=uri&replace=only-files",
805 def test_PUT_NEWFILEURL(self):
806 d = self.PUT(self.public_url + "/foo/new.txt", self.NEWFILE_CONTENTS)
807 # TODO: we lose the response code, so we can't check this
808 #self.failUnlessEqual(responsecode, 201)
809 d.addCallback(self.failUnlessURIMatchesROChild, self._foo_node, u"new.txt")
810 d.addCallback(lambda res:
811 self.failUnlessChildContentsAre(self._foo_node, u"new.txt",
812 self.NEWFILE_CONTENTS))
815 def test_PUT_NEWFILEURL_not_mutable(self):
816 d = self.PUT(self.public_url + "/foo/new.txt?mutable=false",
817 self.NEWFILE_CONTENTS)
818 # TODO: we lose the response code, so we can't check this
819 #self.failUnlessEqual(responsecode, 201)
820 d.addCallback(self.failUnlessURIMatchesROChild, self._foo_node, u"new.txt")
821 d.addCallback(lambda res:
822 self.failUnlessChildContentsAre(self._foo_node, u"new.txt",
823 self.NEWFILE_CONTENTS))
826 def test_PUT_NEWFILEURL_range_bad(self):
827 headers = {"content-range": "bytes 1-10/%d" % len(self.NEWFILE_CONTENTS)}
828 target = self.public_url + "/foo/new.txt"
829 d = self.shouldFail2(error.Error, "test_PUT_NEWFILEURL_range_bad",
830 "501 Not Implemented",
831 "Content-Range in PUT not yet supported",
832 # (and certainly not for immutable files)
833 self.PUT, target, self.NEWFILE_CONTENTS[1:11],
835 d.addCallback(lambda res:
836 self.failIfNodeHasChild(self._foo_node, u"new.txt"))
839 def test_PUT_NEWFILEURL_mutable(self):
840 d = self.PUT(self.public_url + "/foo/new.txt?mutable=true",
841 self.NEWFILE_CONTENTS)
842 # TODO: we lose the response code, so we can't check this
843 #self.failUnlessEqual(responsecode, 201)
845 u = uri.from_string_mutable_filenode(res)
846 self.failUnless(u.is_mutable())
847 self.failIf(u.is_readonly())
849 d.addCallback(_check_uri)
850 d.addCallback(self.failUnlessURIMatchesRWChild, self._foo_node, u"new.txt")
851 d.addCallback(lambda res:
852 self.failUnlessMutableChildContentsAre(self._foo_node,
854 self.NEWFILE_CONTENTS))
857 def test_PUT_NEWFILEURL_mutable_toobig(self):
858 d = self.shouldFail2(error.Error, "test_PUT_NEWFILEURL_mutable_toobig",
859 "413 Request Entity Too Large",
860 "SDMF is limited to one segment, and 10001 > 10000",
862 self.public_url + "/foo/new.txt?mutable=true",
863 "b" * (self.s.MUTABLE_SIZELIMIT+1))
866 def test_PUT_NEWFILEURL_replace(self):
867 d = self.PUT(self.public_url + "/foo/bar.txt", self.NEWFILE_CONTENTS)
868 # TODO: we lose the response code, so we can't check this
869 #self.failUnlessEqual(responsecode, 200)
870 d.addCallback(self.failUnlessURIMatchesROChild, self._foo_node, u"bar.txt")
871 d.addCallback(lambda res:
872 self.failUnlessChildContentsAre(self._foo_node, u"bar.txt",
873 self.NEWFILE_CONTENTS))
876 def test_PUT_NEWFILEURL_bad_t(self):
877 d = self.shouldFail2(error.Error, "PUT_bad_t", "400 Bad Request",
878 "PUT to a file: bad t=bogus",
879 self.PUT, self.public_url + "/foo/bar.txt?t=bogus",
883 def test_PUT_NEWFILEURL_no_replace(self):
884 d = self.PUT(self.public_url + "/foo/bar.txt?replace=false",
885 self.NEWFILE_CONTENTS)
886 d.addBoth(self.shouldFail, error.Error, "PUT_NEWFILEURL_no_replace",
888 "There was already a child by that name, and you asked me "
892 def test_PUT_NEWFILEURL_mkdirs(self):
893 d = self.PUT(self.public_url + "/foo/newdir/new.txt", self.NEWFILE_CONTENTS)
895 d.addCallback(self.failUnlessURIMatchesROChild, fn, u"newdir/new.txt")
896 d.addCallback(lambda res: self.failIfNodeHasChild(fn, u"new.txt"))
897 d.addCallback(lambda res: self.failUnlessNodeHasChild(fn, u"newdir"))
898 d.addCallback(lambda res:
899 self.failUnlessChildContentsAre(fn, u"newdir/new.txt",
900 self.NEWFILE_CONTENTS))
903 def test_PUT_NEWFILEURL_blocked(self):
904 d = self.PUT(self.public_url + "/foo/blockingfile/new.txt",
905 self.NEWFILE_CONTENTS)
906 d.addBoth(self.shouldFail, error.Error, "PUT_NEWFILEURL_blocked",
908 "Unable to create directory 'blockingfile': a file was in the way")
911 def test_PUT_NEWFILEURL_emptyname(self):
912 # an empty pathname component (i.e. a double-slash) is disallowed
913 d = self.shouldFail2(error.Error, "test_PUT_NEWFILEURL_emptyname",
915 "The webapi does not allow empty pathname components",
916 self.PUT, self.public_url + "/foo//new.txt", "")
919 def test_DELETE_FILEURL(self):
920 d = self.DELETE(self.public_url + "/foo/bar.txt")
921 d.addCallback(lambda res:
922 self.failIfNodeHasChild(self._foo_node, u"bar.txt"))
925 def test_DELETE_FILEURL_missing(self):
926 d = self.DELETE(self.public_url + "/foo/missing")
927 d.addBoth(self.should404, "test_DELETE_FILEURL_missing")
930 def test_DELETE_FILEURL_missing2(self):
931 d = self.DELETE(self.public_url + "/missing/missing")
932 d.addBoth(self.should404, "test_DELETE_FILEURL_missing2")
935 def failUnlessHasBarDotTxtMetadata(self, res):
936 data = simplejson.loads(res)
937 self.failUnless(isinstance(data, list))
938 self.failUnlessIn("metadata", data[1])
939 self.failUnlessIn("tahoe", data[1]["metadata"])
940 self.failUnlessIn("linkcrtime", data[1]["metadata"]["tahoe"])
941 self.failUnlessIn("linkmotime", data[1]["metadata"]["tahoe"])
942 self.failUnlessEqual(data[1]["metadata"]["tahoe"]["linkcrtime"],
943 self._bar_txt_metadata["tahoe"]["linkcrtime"])
945 def test_GET_FILEURL_json(self):
946 # twisted.web.http.parse_qs ignores any query args without an '=', so
947 # I can't do "GET /path?json", I have to do "GET /path/t=json"
948 # instead. This may make it tricky to emulate the S3 interface
950 d = self.GET(self.public_url + "/foo/bar.txt?t=json")
952 self.failUnlessIsBarJSON(data)
953 self.failUnlessHasBarDotTxtMetadata(data)
955 d.addCallback(_check1)
958 def test_GET_FILEURL_json_missing(self):
959 d = self.GET(self.public_url + "/foo/missing?json")
960 d.addBoth(self.should404, "test_GET_FILEURL_json_missing")
963 def test_GET_FILEURL_uri(self):
964 d = self.GET(self.public_url + "/foo/bar.txt?t=uri")
966 self.failUnlessEqual(res, self._bar_txt_uri)
967 d.addCallback(_check)
968 d.addCallback(lambda res:
969 self.GET(self.public_url + "/foo/bar.txt?t=readonly-uri"))
971 # for now, for files, uris and readonly-uris are the same
972 self.failUnlessEqual(res, self._bar_txt_uri)
973 d.addCallback(_check2)
976 def test_GET_FILEURL_badtype(self):
977 d = self.shouldHTTPError("GET t=bogus", 400, "Bad Request",
980 self.public_url + "/foo/bar.txt?t=bogus")
983 def test_GET_FILEURL_uri_missing(self):
984 d = self.GET(self.public_url + "/foo/missing?t=uri")
985 d.addBoth(self.should404, "test_GET_FILEURL_uri_missing")
988 def test_GET_DIRURL(self):
989 # the addSlash means we get a redirect here
990 # from /uri/$URI/foo/ , we need ../../../ to get back to the root
992 d = self.GET(self.public_url + "/foo", followRedirect=True)
994 self.failUnless(('<a href="%s">Return to Welcome page' % ROOT)
996 # the FILE reference points to a URI, but it should end in bar.txt
997 bar_url = ("%s/file/%s/@@named=/bar.txt" %
998 (ROOT, urllib.quote(self._bar_txt_uri)))
999 get_bar = "".join([r'<td>FILE</td>',
1001 r'<a href="%s">bar.txt</a>' % bar_url,
1003 r'\s+<td>%d</td>' % len(self.BAR_CONTENTS),
1005 self.failUnless(re.search(get_bar, res), res)
1006 for line in res.split("\n"):
1007 # find the line that contains the delete button for bar.txt
1008 if ("form action" in line and
1009 'value="delete"' in line and
1010 'value="bar.txt"' in line):
1011 # the form target should use a relative URL
1012 foo_url = urllib.quote("%s/uri/%s/" % (ROOT, self._foo_uri))
1013 self.failUnless(('action="%s"' % foo_url) in line, line)
1014 # and the when_done= should too
1015 #done_url = urllib.quote(???)
1016 #self.failUnless(('name="when_done" value="%s"' % done_url)
1020 self.fail("unable to find delete-bar.txt line", res)
1022 # the DIR reference just points to a URI
1023 sub_url = ("%s/uri/%s/" % (ROOT, urllib.quote(self._sub_uri)))
1024 get_sub = ((r'<td>DIR</td>')
1025 +r'\s+<td><a href="%s">sub</a></td>' % sub_url)
1026 self.failUnless(re.search(get_sub, res), res)
1027 d.addCallback(_check)
1029 # look at a readonly directory
1030 d.addCallback(lambda res:
1031 self.GET(self.public_url + "/reedownlee", followRedirect=True))
1033 self.failUnless("(read-only)" in res, res)
1034 self.failIf("Upload a file" in res, res)
1035 d.addCallback(_check2)
1037 # and at a directory that contains a readonly directory
1038 d.addCallback(lambda res:
1039 self.GET(self.public_url, followRedirect=True))
1041 self.failUnless(re.search('<td>DIR-RO</td>'
1042 r'\s+<td><a href="[\.\/]+/uri/URI%3ADIR2-RO%3A[^"]+">reedownlee</a></td>', res), res)
1043 d.addCallback(_check3)
1045 # and an empty directory
1046 d.addCallback(lambda res: self.GET(self.public_url + "/foo/empty/"))
1048 self.failUnless("directory is empty" in res, res)
1049 MKDIR_BUTTON_RE=re.compile('<input type="hidden" name="t" value="mkdir" />.*<legend class="freeform-form-label">Create a new directory in this directory</legend>.*<input type="submit" value="Create" />', re.I)
1050 self.failUnless(MKDIR_BUTTON_RE.search(res), res)
1051 d.addCallback(_check4)
1053 # and at a literal directory
1054 tiny_litdir_uri = "URI:DIR2-LIT:gqytunj2onug64tufqzdcosvkjetutcjkq5gw4tvm5vwszdgnz5hgyzufqydulbshj5x2lbm" # contains one child which is itself also LIT
1055 d.addCallback(lambda res:
1056 self.GET("/uri/" + tiny_litdir_uri + "/", followRedirect=True))
1058 self.failUnless('(immutable)' in res, res)
1059 self.failUnless(re.search('<td>FILE</td>'
1060 r'\s+<td><a href="[\.\/]+/file/URI%3ALIT%3Akrugkidfnzsc4/@@named=/short">short</a></td>', res), res)
1061 d.addCallback(_check5)
1064 def test_GET_DIRURL_badtype(self):
1065 d = self.shouldHTTPError("test_GET_DIRURL_badtype",
1069 self.public_url + "/foo?t=bogus")
1072 def test_GET_DIRURL_json(self):
1073 d = self.GET(self.public_url + "/foo?t=json")
1074 d.addCallback(self.failUnlessIsFooJSON)
1078 def test_POST_DIRURL_manifest_no_ophandle(self):
1079 d = self.shouldFail2(error.Error,
1080 "test_POST_DIRURL_manifest_no_ophandle",
1082 "slow operation requires ophandle=",
1083 self.POST, self.public_url, t="start-manifest")
1086 def test_POST_DIRURL_manifest(self):
1087 d = defer.succeed(None)
1088 def getman(ignored, output):
1089 d = self.POST(self.public_url + "/foo/?t=start-manifest&ophandle=125",
1090 followRedirect=True)
1091 d.addCallback(self.wait_for_operation, "125")
1092 d.addCallback(self.get_operation_results, "125", output)
1094 d.addCallback(getman, None)
1095 def _got_html(manifest):
1096 self.failUnless("Manifest of SI=" in manifest)
1097 self.failUnless("<td>sub</td>" in manifest)
1098 self.failUnless(self._sub_uri in manifest)
1099 self.failUnless("<td>sub/baz.txt</td>" in manifest)
1100 d.addCallback(_got_html)
1102 # both t=status and unadorned GET should be identical
1103 d.addCallback(lambda res: self.GET("/operations/125"))
1104 d.addCallback(_got_html)
1106 d.addCallback(getman, "html")
1107 d.addCallback(_got_html)
1108 d.addCallback(getman, "text")
1109 def _got_text(manifest):
1110 self.failUnless("\nsub " + self._sub_uri + "\n" in manifest)
1111 self.failUnless("\nsub/baz.txt URI:CHK:" in manifest)
1112 d.addCallback(_got_text)
1113 d.addCallback(getman, "JSON")
1115 data = res["manifest"]
1117 for (path_list, cap) in data:
1118 got[tuple(path_list)] = cap
1119 self.failUnlessEqual(got[(u"sub",)], self._sub_uri)
1120 self.failUnless((u"sub",u"baz.txt") in got)
1121 self.failUnless("finished" in res)
1122 self.failUnless("origin" in res)
1123 self.failUnless("storage-index" in res)
1124 self.failUnless("verifycaps" in res)
1125 self.failUnless("stats" in res)
1126 d.addCallback(_got_json)
1129 def test_POST_DIRURL_deepsize_no_ophandle(self):
1130 d = self.shouldFail2(error.Error,
1131 "test_POST_DIRURL_deepsize_no_ophandle",
1133 "slow operation requires ophandle=",
1134 self.POST, self.public_url, t="start-deep-size")
1137 def test_POST_DIRURL_deepsize(self):
1138 d = self.POST(self.public_url + "/foo/?t=start-deep-size&ophandle=126",
1139 followRedirect=True)
1140 d.addCallback(self.wait_for_operation, "126")
1141 d.addCallback(self.get_operation_results, "126", "json")
1142 def _got_json(data):
1143 self.failUnlessEqual(data["finished"], True)
1145 self.failUnless(size > 1000)
1146 d.addCallback(_got_json)
1147 d.addCallback(self.get_operation_results, "126", "text")
1149 mo = re.search(r'^size: (\d+)$', res, re.M)
1150 self.failUnless(mo, res)
1151 size = int(mo.group(1))
1152 # with directories, the size varies.
1153 self.failUnless(size > 1000)
1154 d.addCallback(_got_text)
1157 def test_POST_DIRURL_deepstats_no_ophandle(self):
1158 d = self.shouldFail2(error.Error,
1159 "test_POST_DIRURL_deepstats_no_ophandle",
1161 "slow operation requires ophandle=",
1162 self.POST, self.public_url, t="start-deep-stats")
1165 def test_POST_DIRURL_deepstats(self):
1166 d = self.POST(self.public_url + "/foo/?t=start-deep-stats&ophandle=127",
1167 followRedirect=True)
1168 d.addCallback(self.wait_for_operation, "127")
1169 d.addCallback(self.get_operation_results, "127", "json")
1170 def _got_json(stats):
1171 expected = {"count-immutable-files": 3,
1172 "count-mutable-files": 0,
1173 "count-literal-files": 0,
1175 "count-directories": 3,
1176 "size-immutable-files": 57,
1177 "size-literal-files": 0,
1178 #"size-directories": 1912, # varies
1179 #"largest-directory": 1590,
1180 "largest-directory-children": 5,
1181 "largest-immutable-file": 19,
1183 for k,v in expected.iteritems():
1184 self.failUnlessEqual(stats[k], v,
1185 "stats[%s] was %s, not %s" %
1187 self.failUnlessEqual(stats["size-files-histogram"],
1189 d.addCallback(_got_json)
1192 def test_POST_DIRURL_stream_manifest(self):
1193 d = self.POST(self.public_url + "/foo/?t=stream-manifest")
1195 self.failUnless(res.endswith("\n"))
1196 units = [simplejson.loads(t) for t in res[:-1].split("\n")]
1197 self.failUnlessEqual(len(units), 7)
1198 self.failUnlessEqual(units[-1]["type"], "stats")
1200 self.failUnlessEqual(first["path"], [])
1201 self.failUnlessEqual(first["cap"], self._foo_uri)
1202 self.failUnlessEqual(first["type"], "directory")
1203 baz = [u for u in units[:-1] if u["cap"] == self._baz_file_uri][0]
1204 self.failUnlessEqual(baz["path"], ["sub", "baz.txt"])
1205 self.failIfEqual(baz["storage-index"], None)
1206 self.failIfEqual(baz["verifycap"], None)
1207 self.failIfEqual(baz["repaircap"], None)
1209 d.addCallback(_check)
1212 def test_GET_DIRURL_uri(self):
1213 d = self.GET(self.public_url + "/foo?t=uri")
1215 self.failUnlessEqual(res, self._foo_uri)
1216 d.addCallback(_check)
1219 def test_GET_DIRURL_readonly_uri(self):
1220 d = self.GET(self.public_url + "/foo?t=readonly-uri")
1222 self.failUnlessEqual(res, self._foo_readonly_uri)
1223 d.addCallback(_check)
1226 def test_PUT_NEWDIRURL(self):
1227 d = self.PUT(self.public_url + "/foo/newdir?t=mkdir", "")
1228 d.addCallback(lambda res:
1229 self.failUnlessNodeHasChild(self._foo_node, u"newdir"))
1230 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
1231 d.addCallback(self.failUnlessNodeKeysAre, [])
1234 def test_POST_NEWDIRURL(self):
1235 d = self.POST2(self.public_url + "/foo/newdir?t=mkdir", "")
1236 d.addCallback(lambda res:
1237 self.failUnlessNodeHasChild(self._foo_node, u"newdir"))
1238 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
1239 d.addCallback(self.failUnlessNodeKeysAre, [])
1242 def test_POST_NEWDIRURL_emptyname(self):
1243 # an empty pathname component (i.e. a double-slash) is disallowed
1244 d = self.shouldFail2(error.Error, "test_POST_NEWDIRURL_emptyname",
1246 "The webapi does not allow empty pathname components, i.e. a double slash",
1247 self.POST, self.public_url + "//?t=mkdir")
1250 def test_POST_NEWDIRURL_initial_children(self):
1251 (newkids, caps) = self._create_initial_children()
1252 d = self.POST2(self.public_url + "/foo/newdir?t=mkdir-with-children",
1253 simplejson.dumps(newkids))
1255 n = self.s.create_node_from_uri(uri.strip())
1256 d2 = self.failUnlessNodeKeysAre(n, newkids.keys())
1257 d2.addCallback(lambda ign:
1258 self.failUnlessROChildURIIs(n, u"child-imm",
1260 d2.addCallback(lambda ign:
1261 self.failUnlessRWChildURIIs(n, u"child-mutable",
1263 d2.addCallback(lambda ign:
1264 self.failUnlessROChildURIIs(n, u"child-mutable-ro",
1266 d2.addCallback(lambda ign:
1267 self.failUnlessROChildURIIs(n, u"unknownchild-ro",
1268 caps['unknown_rocap']))
1269 d2.addCallback(lambda ign:
1270 self.failUnlessRWChildURIIs(n, u"unknownchild-rw",
1271 caps['unknown_rwcap']))
1272 d2.addCallback(lambda ign:
1273 self.failUnlessROChildURIIs(n, u"unknownchild-imm",
1274 caps['unknown_immcap']))
1275 d2.addCallback(lambda ign:
1276 self.failUnlessRWChildURIIs(n, u"dirchild",
1278 d2.addCallback(lambda ign:
1279 self.failUnlessROChildURIIs(n, u"dirchild-lit",
1281 d2.addCallback(lambda ign:
1282 self.failUnlessROChildURIIs(n, u"dirchild-empty",
1283 caps['emptydircap']))
1285 d.addCallback(_check)
1286 d.addCallback(lambda res:
1287 self.failUnlessNodeHasChild(self._foo_node, u"newdir"))
1288 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
1289 d.addCallback(self.failUnlessNodeKeysAre, newkids.keys())
1290 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
1291 d.addCallback(self.failUnlessROChildURIIs, u"child-imm", caps['filecap1'])
1294 def test_POST_NEWDIRURL_immutable(self):
1295 (newkids, caps) = self._create_immutable_children()
1296 d = self.POST2(self.public_url + "/foo/newdir?t=mkdir-immutable",
1297 simplejson.dumps(newkids))
1299 n = self.s.create_node_from_uri(uri.strip())
1300 d2 = self.failUnlessNodeKeysAre(n, newkids.keys())
1301 d2.addCallback(lambda ign:
1302 self.failUnlessROChildURIIs(n, u"child-imm",
1304 d2.addCallback(lambda ign:
1305 self.failUnlessROChildURIIs(n, u"unknownchild-imm",
1306 caps['unknown_immcap']))
1307 d2.addCallback(lambda ign:
1308 self.failUnlessROChildURIIs(n, u"dirchild-imm",
1310 d2.addCallback(lambda ign:
1311 self.failUnlessROChildURIIs(n, u"dirchild-lit",
1313 d2.addCallback(lambda ign:
1314 self.failUnlessROChildURIIs(n, u"dirchild-empty",
1315 caps['emptydircap']))
1317 d.addCallback(_check)
1318 d.addCallback(lambda res:
1319 self.failUnlessNodeHasChild(self._foo_node, u"newdir"))
1320 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
1321 d.addCallback(self.failUnlessNodeKeysAre, newkids.keys())
1322 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
1323 d.addCallback(self.failUnlessROChildURIIs, u"child-imm", caps['filecap1'])
1324 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
1325 d.addCallback(self.failUnlessROChildURIIs, u"unknownchild-imm", caps['unknown_immcap'])
1326 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
1327 d.addCallback(self.failUnlessROChildURIIs, u"dirchild-imm", caps['immdircap'])
1328 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
1329 d.addCallback(self.failUnlessROChildURIIs, u"dirchild-lit", caps['litdircap'])
1330 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
1331 d.addCallback(self.failUnlessROChildURIIs, u"dirchild-empty", caps['emptydircap'])
1332 d.addErrback(self.explain_web_error)
1335 def test_POST_NEWDIRURL_immutable_bad(self):
1336 (newkids, caps) = self._create_initial_children()
1337 d = self.shouldFail2(error.Error, "test_POST_NEWDIRURL_immutable_bad",
1339 "needed to be immutable but was not",
1341 self.public_url + "/foo/newdir?t=mkdir-immutable",
1342 simplejson.dumps(newkids))
1345 def test_PUT_NEWDIRURL_exists(self):
1346 d = self.PUT(self.public_url + "/foo/sub?t=mkdir", "")
1347 d.addCallback(lambda res:
1348 self.failUnlessNodeHasChild(self._foo_node, u"sub"))
1349 d.addCallback(lambda res: self._foo_node.get(u"sub"))
1350 d.addCallback(self.failUnlessNodeKeysAre, [u"baz.txt"])
1353 def test_PUT_NEWDIRURL_blocked(self):
1354 d = self.shouldFail2(error.Error, "PUT_NEWDIRURL_blocked",
1355 "409 Conflict", "Unable to create directory 'bar.txt': a file was in the way",
1357 self.public_url + "/foo/bar.txt/sub?t=mkdir", "")
1358 d.addCallback(lambda res:
1359 self.failUnlessNodeHasChild(self._foo_node, u"sub"))
1360 d.addCallback(lambda res: self._foo_node.get(u"sub"))
1361 d.addCallback(self.failUnlessNodeKeysAre, [u"baz.txt"])
1364 def test_PUT_NEWDIRURL_mkdir_p(self):
1365 d = defer.succeed(None)
1366 d.addCallback(lambda res: self.POST(self.public_url + "/foo", t='mkdir', name='mkp'))
1367 d.addCallback(lambda res: self.failUnlessNodeHasChild(self._foo_node, u"mkp"))
1368 d.addCallback(lambda res: self._foo_node.get(u"mkp"))
1369 def mkdir_p(mkpnode):
1370 url = '/uri/%s?t=mkdir-p&path=/sub1/sub2' % urllib.quote(mkpnode.get_uri())
1372 def made_subsub(ssuri):
1373 d = self._foo_node.get_child_at_path(u"mkp/sub1/sub2")
1374 d.addCallback(lambda ssnode: self.failUnlessEqual(ssnode.get_uri(), ssuri))
1376 d.addCallback(lambda uri2: self.failUnlessEqual(uri2, ssuri))
1378 d.addCallback(made_subsub)
1380 d.addCallback(mkdir_p)
1383 def test_PUT_NEWDIRURL_mkdirs(self):
1384 d = self.PUT(self.public_url + "/foo/subdir/newdir?t=mkdir", "")
1385 d.addCallback(lambda res:
1386 self.failIfNodeHasChild(self._foo_node, u"newdir"))
1387 d.addCallback(lambda res:
1388 self.failUnlessNodeHasChild(self._foo_node, u"subdir"))
1389 d.addCallback(lambda res:
1390 self._foo_node.get_child_at_path(u"subdir/newdir"))
1391 d.addCallback(self.failUnlessNodeKeysAre, [])
1394 def test_DELETE_DIRURL(self):
1395 d = self.DELETE(self.public_url + "/foo")
1396 d.addCallback(lambda res:
1397 self.failIfNodeHasChild(self.public_root, u"foo"))
1400 def test_DELETE_DIRURL_missing(self):
1401 d = self.DELETE(self.public_url + "/foo/missing")
1402 d.addBoth(self.should404, "test_DELETE_DIRURL_missing")
1403 d.addCallback(lambda res:
1404 self.failUnlessNodeHasChild(self.public_root, u"foo"))
1407 def test_DELETE_DIRURL_missing2(self):
1408 d = self.DELETE(self.public_url + "/missing")
1409 d.addBoth(self.should404, "test_DELETE_DIRURL_missing2")
1412 def dump_root(self):
1414 w = webish.DirnodeWalkerMixin()
1415 def visitor(childpath, childnode, metadata):
1417 d = w.walk(self.public_root, visitor)
1420 def failUnlessNodeKeysAre(self, node, expected_keys):
1421 for k in expected_keys:
1422 assert isinstance(k, unicode)
1424 def _check(children):
1425 self.failUnlessEqual(sorted(children.keys()), sorted(expected_keys))
1426 d.addCallback(_check)
1428 def failUnlessNodeHasChild(self, node, name):
1429 assert isinstance(name, unicode)
1431 def _check(children):
1432 self.failUnless(name in children)
1433 d.addCallback(_check)
1435 def failIfNodeHasChild(self, node, name):
1436 assert isinstance(name, unicode)
1438 def _check(children):
1439 self.failIf(name in children)
1440 d.addCallback(_check)
1443 def failUnlessChildContentsAre(self, node, name, expected_contents):
1444 assert isinstance(name, unicode)
1445 d = node.get_child_at_path(name)
1446 d.addCallback(lambda node: download_to_data(node))
1447 def _check(contents):
1448 self.failUnlessEqual(contents, expected_contents)
1449 d.addCallback(_check)
1452 def failUnlessMutableChildContentsAre(self, node, name, expected_contents):
1453 assert isinstance(name, unicode)
1454 d = node.get_child_at_path(name)
1455 d.addCallback(lambda node: node.download_best_version())
1456 def _check(contents):
1457 self.failUnlessEqual(contents, expected_contents)
1458 d.addCallback(_check)
1461 def failUnlessRWChildURIIs(self, node, name, expected_uri):
1462 assert isinstance(name, unicode)
1463 d = node.get_child_at_path(name)
1465 self.failUnless(child.is_unknown() or not child.is_readonly())
1466 self.failUnlessEqual(child.get_uri(), expected_uri.strip())
1467 self.failUnlessEqual(child.get_write_uri(), expected_uri.strip())
1468 expected_ro_uri = self._make_readonly(expected_uri)
1470 self.failUnlessEqual(child.get_readonly_uri(), expected_ro_uri.strip())
1471 d.addCallback(_check)
1474 def failUnlessROChildURIIs(self, node, name, expected_uri):
1475 assert isinstance(name, unicode)
1476 d = node.get_child_at_path(name)
1478 self.failUnless(child.is_unknown() or child.is_readonly())
1479 self.failUnlessEqual(child.get_write_uri(), None)
1480 self.failUnlessEqual(child.get_uri(), expected_uri.strip())
1481 self.failUnlessEqual(child.get_readonly_uri(), expected_uri.strip())
1482 d.addCallback(_check)
1485 def failUnlessURIMatchesRWChild(self, got_uri, node, name):
1486 assert isinstance(name, unicode)
1487 d = node.get_child_at_path(name)
1489 self.failUnless(child.is_unknown() or not child.is_readonly())
1490 self.failUnlessEqual(child.get_uri(), got_uri.strip())
1491 self.failUnlessEqual(child.get_write_uri(), got_uri.strip())
1492 expected_ro_uri = self._make_readonly(got_uri)
1494 self.failUnlessEqual(child.get_readonly_uri(), expected_ro_uri.strip())
1495 d.addCallback(_check)
1498 def failUnlessURIMatchesROChild(self, got_uri, node, name):
1499 assert isinstance(name, unicode)
1500 d = node.get_child_at_path(name)
1502 self.failUnless(child.is_unknown() or child.is_readonly())
1503 self.failUnlessEqual(child.get_write_uri(), None)
1504 self.failUnlessEqual(got_uri.strip(), child.get_uri())
1505 self.failUnlessEqual(got_uri.strip(), child.get_readonly_uri())
1506 d.addCallback(_check)
1509 def failUnlessCHKURIHasContents(self, got_uri, contents):
1510 self.failUnless(FakeCHKFileNode.all_contents[got_uri] == contents)
1512 def test_POST_upload(self):
1513 d = self.POST(self.public_url + "/foo", t="upload",
1514 file=("new.txt", self.NEWFILE_CONTENTS))
1516 d.addCallback(self.failUnlessURIMatchesROChild, fn, u"new.txt")
1517 d.addCallback(lambda res:
1518 self.failUnlessChildContentsAre(fn, u"new.txt",
1519 self.NEWFILE_CONTENTS))
1522 def test_POST_upload_unicode(self):
1523 filename = u"n\u00e9wer.txt" # n e-acute w e r . t x t
1524 d = self.POST(self.public_url + "/foo", t="upload",
1525 file=(filename, self.NEWFILE_CONTENTS))
1527 d.addCallback(self.failUnlessURIMatchesROChild, fn, filename)
1528 d.addCallback(lambda res:
1529 self.failUnlessChildContentsAre(fn, filename,
1530 self.NEWFILE_CONTENTS))
1531 target_url = self.public_url + "/foo/" + filename.encode("utf-8")
1532 d.addCallback(lambda res: self.GET(target_url))
1533 d.addCallback(lambda contents: self.failUnlessEqual(contents,
1534 self.NEWFILE_CONTENTS,
1538 def test_POST_upload_unicode_named(self):
1539 filename = u"n\u00e9wer.txt" # n e-acute w e r . t x t
1540 d = self.POST(self.public_url + "/foo", t="upload",
1542 file=("overridden", self.NEWFILE_CONTENTS))
1544 d.addCallback(self.failUnlessURIMatchesROChild, fn, filename)
1545 d.addCallback(lambda res:
1546 self.failUnlessChildContentsAre(fn, filename,
1547 self.NEWFILE_CONTENTS))
1548 target_url = self.public_url + "/foo/" + filename.encode("utf-8")
1549 d.addCallback(lambda res: self.GET(target_url))
1550 d.addCallback(lambda contents: self.failUnlessEqual(contents,
1551 self.NEWFILE_CONTENTS,
1555 def test_POST_upload_no_link(self):
1556 d = self.POST("/uri", t="upload",
1557 file=("new.txt", self.NEWFILE_CONTENTS))
1558 def _check_upload_results(page):
1559 # this should be a page which describes the results of the upload
1560 # that just finished.
1561 self.failUnless("Upload Results:" in page)
1562 self.failUnless("URI:" in page)
1563 uri_re = re.compile("URI: <tt><span>(.*)</span>")
1564 mo = uri_re.search(page)
1565 self.failUnless(mo, page)
1566 new_uri = mo.group(1)
1568 d.addCallback(_check_upload_results)
1569 d.addCallback(self.failUnlessCHKURIHasContents, self.NEWFILE_CONTENTS)
1572 def test_POST_upload_no_link_whendone(self):
1573 d = self.POST("/uri", t="upload", when_done="/",
1574 file=("new.txt", self.NEWFILE_CONTENTS))
1575 d.addBoth(self.shouldRedirect, "/")
1578 def shouldRedirect2(self, which, checker, callable, *args, **kwargs):
1579 d = defer.maybeDeferred(callable, *args, **kwargs)
1581 if isinstance(res, failure.Failure):
1582 res.trap(error.PageRedirect)
1583 statuscode = res.value.status
1584 target = res.value.location
1585 return checker(statuscode, target)
1586 self.fail("%s: callable was supposed to redirect, not return '%s'"
1591 def test_POST_upload_no_link_whendone_results(self):
1592 def check(statuscode, target):
1593 self.failUnlessEqual(statuscode, str(http.FOUND))
1594 self.failUnless(target.startswith(self.webish_url), target)
1595 return client.getPage(target, method="GET")
1596 d = self.shouldRedirect2("test_POST_upload_no_link_whendone_results",
1598 self.POST, "/uri", t="upload",
1599 when_done="/uri/%(uri)s",
1600 file=("new.txt", self.NEWFILE_CONTENTS))
1601 d.addCallback(lambda res:
1602 self.failUnlessEqual(res, self.NEWFILE_CONTENTS))
1605 def test_POST_upload_no_link_mutable(self):
1606 d = self.POST("/uri", t="upload", mutable="true",
1607 file=("new.txt", self.NEWFILE_CONTENTS))
1608 def _check(filecap):
1609 filecap = filecap.strip()
1610 self.failUnless(filecap.startswith("URI:SSK:"), filecap)
1611 self.filecap = filecap
1612 u = uri.WriteableSSKFileURI.init_from_string(filecap)
1613 self.failUnless(u.get_storage_index() in FakeMutableFileNode.all_contents)
1614 n = self.s.create_node_from_uri(filecap)
1615 return n.download_best_version()
1616 d.addCallback(_check)
1618 self.failUnlessEqual(data, self.NEWFILE_CONTENTS)
1619 return self.GET("/uri/%s" % urllib.quote(self.filecap))
1620 d.addCallback(_check2)
1622 self.failUnlessEqual(data, self.NEWFILE_CONTENTS)
1623 return self.GET("/file/%s" % urllib.quote(self.filecap))
1624 d.addCallback(_check3)
1626 self.failUnlessEqual(data, self.NEWFILE_CONTENTS)
1627 d.addCallback(_check4)
1630 def test_POST_upload_no_link_mutable_toobig(self):
1631 d = self.shouldFail2(error.Error,
1632 "test_POST_upload_no_link_mutable_toobig",
1633 "413 Request Entity Too Large",
1634 "SDMF is limited to one segment, and 10001 > 10000",
1636 "/uri", t="upload", mutable="true",
1638 "b" * (self.s.MUTABLE_SIZELIMIT+1)) )
1641 def test_POST_upload_mutable(self):
1642 # this creates a mutable file
1643 d = self.POST(self.public_url + "/foo", t="upload", mutable="true",
1644 file=("new.txt", self.NEWFILE_CONTENTS))
1646 d.addCallback(self.failUnlessURIMatchesRWChild, fn, u"new.txt")
1647 d.addCallback(lambda res:
1648 self.failUnlessMutableChildContentsAre(fn, u"new.txt",
1649 self.NEWFILE_CONTENTS))
1650 d.addCallback(lambda res: self._foo_node.get(u"new.txt"))
1652 self.failUnless(IMutableFileNode.providedBy(newnode))
1653 self.failUnless(newnode.is_mutable())
1654 self.failIf(newnode.is_readonly())
1655 self._mutable_node = newnode
1656 self._mutable_uri = newnode.get_uri()
1659 # now upload it again and make sure that the URI doesn't change
1660 NEWER_CONTENTS = self.NEWFILE_CONTENTS + "newer\n"
1661 d.addCallback(lambda res:
1662 self.POST(self.public_url + "/foo", t="upload",
1664 file=("new.txt", NEWER_CONTENTS)))
1665 d.addCallback(self.failUnlessURIMatchesRWChild, fn, u"new.txt")
1666 d.addCallback(lambda res:
1667 self.failUnlessMutableChildContentsAre(fn, u"new.txt",
1669 d.addCallback(lambda res: self._foo_node.get(u"new.txt"))
1671 self.failUnless(IMutableFileNode.providedBy(newnode))
1672 self.failUnless(newnode.is_mutable())
1673 self.failIf(newnode.is_readonly())
1674 self.failUnlessEqual(self._mutable_uri, newnode.get_uri())
1675 d.addCallback(_got2)
1677 # upload a second time, using PUT instead of POST
1678 NEW2_CONTENTS = NEWER_CONTENTS + "overwrite with PUT\n"
1679 d.addCallback(lambda res:
1680 self.PUT(self.public_url + "/foo/new.txt", NEW2_CONTENTS))
1681 d.addCallback(self.failUnlessURIMatchesRWChild, fn, u"new.txt")
1682 d.addCallback(lambda res:
1683 self.failUnlessMutableChildContentsAre(fn, u"new.txt",
1686 # finally list the directory, since mutable files are displayed
1687 # slightly differently
1689 d.addCallback(lambda res:
1690 self.GET(self.public_url + "/foo/",
1691 followRedirect=True))
1692 def _check_page(res):
1693 # TODO: assert more about the contents
1694 self.failUnless("SSK" in res)
1696 d.addCallback(_check_page)
1698 d.addCallback(lambda res: self._foo_node.get(u"new.txt"))
1700 self.failUnless(IMutableFileNode.providedBy(newnode))
1701 self.failUnless(newnode.is_mutable())
1702 self.failIf(newnode.is_readonly())
1703 self.failUnlessEqual(self._mutable_uri, newnode.get_uri())
1704 d.addCallback(_got3)
1706 # look at the JSON form of the enclosing directory
1707 d.addCallback(lambda res:
1708 self.GET(self.public_url + "/foo/?t=json",
1709 followRedirect=True))
1710 def _check_page_json(res):
1711 parsed = simplejson.loads(res)
1712 self.failUnlessEqual(parsed[0], "dirnode")
1713 children = dict( [(unicode(name),value)
1715 in parsed[1]["children"].iteritems()] )
1716 self.failUnless("new.txt" in children)
1717 new_json = children["new.txt"]
1718 self.failUnlessEqual(new_json[0], "filenode")
1719 self.failUnless(new_json[1]["mutable"])
1720 self.failUnlessEqual(new_json[1]["rw_uri"], self._mutable_uri)
1721 ro_uri = unicode(self._mutable_node.get_readonly().to_string())
1722 self.failUnlessEqual(new_json[1]["ro_uri"], ro_uri)
1723 d.addCallback(_check_page_json)
1725 # and the JSON form of the file
1726 d.addCallback(lambda res:
1727 self.GET(self.public_url + "/foo/new.txt?t=json"))
1728 def _check_file_json(res):
1729 parsed = simplejson.loads(res)
1730 self.failUnlessEqual(parsed[0], "filenode")
1731 self.failUnless(parsed[1]["mutable"])
1732 self.failUnlessEqual(parsed[1]["rw_uri"], self._mutable_uri)
1733 ro_uri = unicode(self._mutable_node.get_readonly().to_string())
1734 self.failUnlessEqual(parsed[1]["ro_uri"], ro_uri)
1735 d.addCallback(_check_file_json)
1737 # and look at t=uri and t=readonly-uri
1738 d.addCallback(lambda res:
1739 self.GET(self.public_url + "/foo/new.txt?t=uri"))
1740 d.addCallback(lambda res: self.failUnlessEqual(res, self._mutable_uri))
1741 d.addCallback(lambda res:
1742 self.GET(self.public_url + "/foo/new.txt?t=readonly-uri"))
1743 def _check_ro_uri(res):
1744 ro_uri = unicode(self._mutable_node.get_readonly().to_string())
1745 self.failUnlessEqual(res, ro_uri)
1746 d.addCallback(_check_ro_uri)
1748 # make sure we can get to it from /uri/URI
1749 d.addCallback(lambda res:
1750 self.GET("/uri/%s" % urllib.quote(self._mutable_uri)))
1751 d.addCallback(lambda res:
1752 self.failUnlessEqual(res, NEW2_CONTENTS))
1754 # and that HEAD computes the size correctly
1755 d.addCallback(lambda res:
1756 self.HEAD(self.public_url + "/foo/new.txt",
1757 return_response=True))
1758 def _got_headers((res, status, headers)):
1759 self.failUnlessEqual(res, "")
1760 self.failUnlessEqual(headers["content-length"][0],
1761 str(len(NEW2_CONTENTS)))
1762 self.failUnlessEqual(headers["content-type"], ["text/plain"])
1763 d.addCallback(_got_headers)
1765 # make sure that size errors are displayed correctly for overwrite
1766 d.addCallback(lambda res:
1767 self.shouldFail2(error.Error,
1768 "test_POST_upload_mutable-toobig",
1769 "413 Request Entity Too Large",
1770 "SDMF is limited to one segment, and 10001 > 10000",
1772 self.public_url + "/foo", t="upload",
1775 "b" * (self.s.MUTABLE_SIZELIMIT+1)),
1778 d.addErrback(self.dump_error)
1781 def test_POST_upload_mutable_toobig(self):
1782 d = self.shouldFail2(error.Error,
1783 "test_POST_upload_mutable_toobig",
1784 "413 Request Entity Too Large",
1785 "SDMF is limited to one segment, and 10001 > 10000",
1787 self.public_url + "/foo",
1788 t="upload", mutable="true",
1790 "b" * (self.s.MUTABLE_SIZELIMIT+1)) )
1793 def dump_error(self, f):
1794 # if the web server returns an error code (like 400 Bad Request),
1795 # web.client.getPage puts the HTTP response body into the .response
1796 # attribute of the exception object that it gives back. It does not
1797 # appear in the Failure's repr(), so the ERROR that trial displays
1798 # will be rather terse and unhelpful. addErrback this method to the
1799 # end of your chain to get more information out of these errors.
1800 if f.check(error.Error):
1801 print "web.error.Error:"
1803 print f.value.response
1806 def test_POST_upload_replace(self):
1807 d = self.POST(self.public_url + "/foo", t="upload",
1808 file=("bar.txt", self.NEWFILE_CONTENTS))
1810 d.addCallback(self.failUnlessURIMatchesROChild, fn, u"bar.txt")
1811 d.addCallback(lambda res:
1812 self.failUnlessChildContentsAre(fn, u"bar.txt",
1813 self.NEWFILE_CONTENTS))
1816 def test_POST_upload_no_replace_ok(self):
1817 d = self.POST(self.public_url + "/foo?replace=false", t="upload",
1818 file=("new.txt", self.NEWFILE_CONTENTS))
1819 d.addCallback(lambda res: self.GET(self.public_url + "/foo/new.txt"))
1820 d.addCallback(lambda res: self.failUnlessEqual(res,
1821 self.NEWFILE_CONTENTS))
1824 def test_POST_upload_no_replace_queryarg(self):
1825 d = self.POST(self.public_url + "/foo?replace=false", t="upload",
1826 file=("bar.txt", self.NEWFILE_CONTENTS))
1827 d.addBoth(self.shouldFail, error.Error,
1828 "POST_upload_no_replace_queryarg",
1830 "There was already a child by that name, and you asked me "
1831 "to not replace it")
1832 d.addCallback(lambda res: self.GET(self.public_url + "/foo/bar.txt"))
1833 d.addCallback(self.failUnlessIsBarDotTxt)
1836 def test_POST_upload_no_replace_field(self):
1837 d = self.POST(self.public_url + "/foo", t="upload", replace="false",
1838 file=("bar.txt", self.NEWFILE_CONTENTS))
1839 d.addBoth(self.shouldFail, error.Error, "POST_upload_no_replace_field",
1841 "There was already a child by that name, and you asked me "
1842 "to not replace it")
1843 d.addCallback(lambda res: self.GET(self.public_url + "/foo/bar.txt"))
1844 d.addCallback(self.failUnlessIsBarDotTxt)
1847 def test_POST_upload_whendone(self):
1848 d = self.POST(self.public_url + "/foo", t="upload", when_done="/THERE",
1849 file=("new.txt", self.NEWFILE_CONTENTS))
1850 d.addBoth(self.shouldRedirect, "/THERE")
1852 d.addCallback(lambda res:
1853 self.failUnlessChildContentsAre(fn, u"new.txt",
1854 self.NEWFILE_CONTENTS))
1857 def test_POST_upload_named(self):
1859 d = self.POST(self.public_url + "/foo", t="upload",
1860 name="new.txt", file=self.NEWFILE_CONTENTS)
1861 d.addCallback(self.failUnlessURIMatchesROChild, fn, u"new.txt")
1862 d.addCallback(lambda res:
1863 self.failUnlessChildContentsAre(fn, u"new.txt",
1864 self.NEWFILE_CONTENTS))
1867 def test_POST_upload_named_badfilename(self):
1868 d = self.POST(self.public_url + "/foo", t="upload",
1869 name="slashes/are/bad.txt", file=self.NEWFILE_CONTENTS)
1870 d.addBoth(self.shouldFail, error.Error,
1871 "test_POST_upload_named_badfilename",
1873 "name= may not contain a slash",
1875 # make sure that nothing was added
1876 d.addCallback(lambda res:
1877 self.failUnlessNodeKeysAre(self._foo_node,
1878 [u"bar.txt", u"blockingfile",
1879 u"empty", u"n\u00fc.txt",
1883 def test_POST_FILEURL_check(self):
1884 bar_url = self.public_url + "/foo/bar.txt"
1885 d = self.POST(bar_url, t="check")
1887 self.failUnless("Healthy :" in res)
1888 d.addCallback(_check)
1889 redir_url = "http://allmydata.org/TARGET"
1890 def _check2(statuscode, target):
1891 self.failUnlessEqual(statuscode, str(http.FOUND))
1892 self.failUnlessEqual(target, redir_url)
1893 d.addCallback(lambda res:
1894 self.shouldRedirect2("test_POST_FILEURL_check",
1898 when_done=redir_url))
1899 d.addCallback(lambda res:
1900 self.POST(bar_url, t="check", return_to=redir_url))
1902 self.failUnless("Healthy :" in res)
1903 self.failUnless("Return to file" in res)
1904 self.failUnless(redir_url in res)
1905 d.addCallback(_check3)
1907 d.addCallback(lambda res:
1908 self.POST(bar_url, t="check", output="JSON"))
1909 def _check_json(res):
1910 data = simplejson.loads(res)
1911 self.failUnless("storage-index" in data)
1912 self.failUnless(data["results"]["healthy"])
1913 d.addCallback(_check_json)
1917 def test_POST_FILEURL_check_and_repair(self):
1918 bar_url = self.public_url + "/foo/bar.txt"
1919 d = self.POST(bar_url, t="check", repair="true")
1921 self.failUnless("Healthy :" in res)
1922 d.addCallback(_check)
1923 redir_url = "http://allmydata.org/TARGET"
1924 def _check2(statuscode, target):
1925 self.failUnlessEqual(statuscode, str(http.FOUND))
1926 self.failUnlessEqual(target, redir_url)
1927 d.addCallback(lambda res:
1928 self.shouldRedirect2("test_POST_FILEURL_check_and_repair",
1931 t="check", repair="true",
1932 when_done=redir_url))
1933 d.addCallback(lambda res:
1934 self.POST(bar_url, t="check", return_to=redir_url))
1936 self.failUnless("Healthy :" in res)
1937 self.failUnless("Return to file" in res)
1938 self.failUnless(redir_url in res)
1939 d.addCallback(_check3)
1942 def test_POST_DIRURL_check(self):
1943 foo_url = self.public_url + "/foo/"
1944 d = self.POST(foo_url, t="check")
1946 self.failUnless("Healthy :" in res, res)
1947 d.addCallback(_check)
1948 redir_url = "http://allmydata.org/TARGET"
1949 def _check2(statuscode, target):
1950 self.failUnlessEqual(statuscode, str(http.FOUND))
1951 self.failUnlessEqual(target, redir_url)
1952 d.addCallback(lambda res:
1953 self.shouldRedirect2("test_POST_DIRURL_check",
1957 when_done=redir_url))
1958 d.addCallback(lambda res:
1959 self.POST(foo_url, t="check", return_to=redir_url))
1961 self.failUnless("Healthy :" in res, res)
1962 self.failUnless("Return to file/directory" in res)
1963 self.failUnless(redir_url in res)
1964 d.addCallback(_check3)
1966 d.addCallback(lambda res:
1967 self.POST(foo_url, t="check", output="JSON"))
1968 def _check_json(res):
1969 data = simplejson.loads(res)
1970 self.failUnless("storage-index" in data)
1971 self.failUnless(data["results"]["healthy"])
1972 d.addCallback(_check_json)
1976 def test_POST_DIRURL_check_and_repair(self):
1977 foo_url = self.public_url + "/foo/"
1978 d = self.POST(foo_url, t="check", repair="true")
1980 self.failUnless("Healthy :" in res, res)
1981 d.addCallback(_check)
1982 redir_url = "http://allmydata.org/TARGET"
1983 def _check2(statuscode, target):
1984 self.failUnlessEqual(statuscode, str(http.FOUND))
1985 self.failUnlessEqual(target, redir_url)
1986 d.addCallback(lambda res:
1987 self.shouldRedirect2("test_POST_DIRURL_check_and_repair",
1990 t="check", repair="true",
1991 when_done=redir_url))
1992 d.addCallback(lambda res:
1993 self.POST(foo_url, t="check", return_to=redir_url))
1995 self.failUnless("Healthy :" in res)
1996 self.failUnless("Return to file/directory" in res)
1997 self.failUnless(redir_url in res)
1998 d.addCallback(_check3)
2001 def wait_for_operation(self, ignored, ophandle):
2002 url = "/operations/" + ophandle
2003 url += "?t=status&output=JSON"
2006 data = simplejson.loads(res)
2007 if not data["finished"]:
2008 d = self.stall(delay=1.0)
2009 d.addCallback(self.wait_for_operation, ophandle)
2015 def get_operation_results(self, ignored, ophandle, output=None):
2016 url = "/operations/" + ophandle
2019 url += "&output=" + output
2022 if output and output.lower() == "json":
2023 return simplejson.loads(res)
2028 def test_POST_DIRURL_deepcheck_no_ophandle(self):
2029 d = self.shouldFail2(error.Error,
2030 "test_POST_DIRURL_deepcheck_no_ophandle",
2032 "slow operation requires ophandle=",
2033 self.POST, self.public_url, t="start-deep-check")
2036 def test_POST_DIRURL_deepcheck(self):
2037 def _check_redirect(statuscode, target):
2038 self.failUnlessEqual(statuscode, str(http.FOUND))
2039 self.failUnless(target.endswith("/operations/123"))
2040 d = self.shouldRedirect2("test_POST_DIRURL_deepcheck", _check_redirect,
2041 self.POST, self.public_url,
2042 t="start-deep-check", ophandle="123")
2043 d.addCallback(self.wait_for_operation, "123")
2044 def _check_json(data):
2045 self.failUnlessEqual(data["finished"], True)
2046 self.failUnlessEqual(data["count-objects-checked"], 8)
2047 self.failUnlessEqual(data["count-objects-healthy"], 8)
2048 d.addCallback(_check_json)
2049 d.addCallback(self.get_operation_results, "123", "html")
2050 def _check_html(res):
2051 self.failUnless("Objects Checked: <span>8</span>" in res)
2052 self.failUnless("Objects Healthy: <span>8</span>" in res)
2053 d.addCallback(_check_html)
2055 d.addCallback(lambda res:
2056 self.GET("/operations/123/"))
2057 d.addCallback(_check_html) # should be the same as without the slash
2059 d.addCallback(lambda res:
2060 self.shouldFail2(error.Error, "one", "404 Not Found",
2061 "No detailed results for SI bogus",
2062 self.GET, "/operations/123/bogus"))
2064 foo_si = self._foo_node.get_storage_index()
2065 foo_si_s = base32.b2a(foo_si)
2066 d.addCallback(lambda res:
2067 self.GET("/operations/123/%s?output=JSON" % foo_si_s))
2068 def _check_foo_json(res):
2069 data = simplejson.loads(res)
2070 self.failUnlessEqual(data["storage-index"], foo_si_s)
2071 self.failUnless(data["results"]["healthy"])
2072 d.addCallback(_check_foo_json)
2075 def test_POST_DIRURL_deepcheck_and_repair(self):
2076 d = self.POST(self.public_url, t="start-deep-check", repair="true",
2077 ophandle="124", output="json", followRedirect=True)
2078 d.addCallback(self.wait_for_operation, "124")
2079 def _check_json(data):
2080 self.failUnlessEqual(data["finished"], True)
2081 self.failUnlessEqual(data["count-objects-checked"], 8)
2082 self.failUnlessEqual(data["count-objects-healthy-pre-repair"], 8)
2083 self.failUnlessEqual(data["count-objects-unhealthy-pre-repair"], 0)
2084 self.failUnlessEqual(data["count-corrupt-shares-pre-repair"], 0)
2085 self.failUnlessEqual(data["count-repairs-attempted"], 0)
2086 self.failUnlessEqual(data["count-repairs-successful"], 0)
2087 self.failUnlessEqual(data["count-repairs-unsuccessful"], 0)
2088 self.failUnlessEqual(data["count-objects-healthy-post-repair"], 8)
2089 self.failUnlessEqual(data["count-objects-unhealthy-post-repair"], 0)
2090 self.failUnlessEqual(data["count-corrupt-shares-post-repair"], 0)
2091 d.addCallback(_check_json)
2092 d.addCallback(self.get_operation_results, "124", "html")
2093 def _check_html(res):
2094 self.failUnless("Objects Checked: <span>8</span>" in res)
2096 self.failUnless("Objects Healthy (before repair): <span>8</span>" in res)
2097 self.failUnless("Objects Unhealthy (before repair): <span>0</span>" in res)
2098 self.failUnless("Corrupt Shares (before repair): <span>0</span>" in res)
2100 self.failUnless("Repairs Attempted: <span>0</span>" in res)
2101 self.failUnless("Repairs Successful: <span>0</span>" in res)
2102 self.failUnless("Repairs Unsuccessful: <span>0</span>" in res)
2104 self.failUnless("Objects Healthy (after repair): <span>8</span>" in res)
2105 self.failUnless("Objects Unhealthy (after repair): <span>0</span>" in res)
2106 self.failUnless("Corrupt Shares (after repair): <span>0</span>" in res)
2107 d.addCallback(_check_html)
2110 def test_POST_FILEURL_bad_t(self):
2111 d = self.shouldFail2(error.Error, "POST_bad_t", "400 Bad Request",
2112 "POST to file: bad t=bogus",
2113 self.POST, self.public_url + "/foo/bar.txt",
2117 def test_POST_mkdir(self): # return value?
2118 d = self.POST(self.public_url + "/foo", t="mkdir", name="newdir")
2119 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
2120 d.addCallback(self.failUnlessNodeKeysAre, [])
2123 def test_POST_mkdir_initial_children(self):
2124 (newkids, caps) = self._create_initial_children()
2125 d = self.POST2(self.public_url +
2126 "/foo?t=mkdir-with-children&name=newdir",
2127 simplejson.dumps(newkids))
2128 d.addCallback(lambda res:
2129 self.failUnlessNodeHasChild(self._foo_node, u"newdir"))
2130 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
2131 d.addCallback(self.failUnlessNodeKeysAre, newkids.keys())
2132 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
2133 d.addCallback(self.failUnlessROChildURIIs, u"child-imm", caps['filecap1'])
2136 def test_POST_mkdir_immutable(self):
2137 (newkids, caps) = self._create_immutable_children()
2138 d = self.POST2(self.public_url +
2139 "/foo?t=mkdir-immutable&name=newdir",
2140 simplejson.dumps(newkids))
2141 d.addCallback(lambda res:
2142 self.failUnlessNodeHasChild(self._foo_node, u"newdir"))
2143 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
2144 d.addCallback(self.failUnlessNodeKeysAre, newkids.keys())
2145 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
2146 d.addCallback(self.failUnlessROChildURIIs, u"child-imm", caps['filecap1'])
2147 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
2148 d.addCallback(self.failUnlessROChildURIIs, u"unknownchild-imm", caps['unknown_immcap'])
2149 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
2150 d.addCallback(self.failUnlessROChildURIIs, u"dirchild-imm", caps['immdircap'])
2151 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
2152 d.addCallback(self.failUnlessROChildURIIs, u"dirchild-lit", caps['litdircap'])
2153 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
2154 d.addCallback(self.failUnlessROChildURIIs, u"dirchild-empty", caps['emptydircap'])
2157 def test_POST_mkdir_immutable_bad(self):
2158 (newkids, caps) = self._create_initial_children()
2159 d = self.shouldFail2(error.Error, "test_POST_mkdir_immutable_bad",
2161 "needed to be immutable but was not",
2164 "/foo?t=mkdir-immutable&name=newdir",
2165 simplejson.dumps(newkids))
2168 def test_POST_mkdir_2(self):
2169 d = self.POST(self.public_url + "/foo/newdir?t=mkdir", "")
2170 d.addCallback(lambda res:
2171 self.failUnlessNodeHasChild(self._foo_node, u"newdir"))
2172 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
2173 d.addCallback(self.failUnlessNodeKeysAre, [])
2176 def test_POST_mkdirs_2(self):
2177 d = self.POST(self.public_url + "/foo/bardir/newdir?t=mkdir", "")
2178 d.addCallback(lambda res:
2179 self.failUnlessNodeHasChild(self._foo_node, u"bardir"))
2180 d.addCallback(lambda res: self._foo_node.get(u"bardir"))
2181 d.addCallback(lambda bardirnode: bardirnode.get(u"newdir"))
2182 d.addCallback(self.failUnlessNodeKeysAre, [])
2185 def test_POST_mkdir_no_parentdir_noredirect(self):
2186 d = self.POST("/uri?t=mkdir")
2187 def _after_mkdir(res):
2188 uri.DirectoryURI.init_from_string(res)
2189 d.addCallback(_after_mkdir)
2192 def test_POST_mkdir_no_parentdir_noredirect2(self):
2193 # make sure form-based arguments (as on the welcome page) still work
2194 d = self.POST("/uri", t="mkdir")
2195 def _after_mkdir(res):
2196 uri.DirectoryURI.init_from_string(res)
2197 d.addCallback(_after_mkdir)
2198 d.addErrback(self.explain_web_error)
2201 def test_POST_mkdir_no_parentdir_redirect(self):
2202 d = self.POST("/uri?t=mkdir&redirect_to_result=true")
2203 d.addBoth(self.shouldRedirect, None, statuscode='303')
2204 def _check_target(target):
2205 target = urllib.unquote(target)
2206 self.failUnless(target.startswith("uri/URI:DIR2:"), target)
2207 d.addCallback(_check_target)
2210 def test_POST_mkdir_no_parentdir_redirect2(self):
2211 d = self.POST("/uri", t="mkdir", redirect_to_result="true")
2212 d.addBoth(self.shouldRedirect, None, statuscode='303')
2213 def _check_target(target):
2214 target = urllib.unquote(target)
2215 self.failUnless(target.startswith("uri/URI:DIR2:"), target)
2216 d.addCallback(_check_target)
2217 d.addErrback(self.explain_web_error)
2220 def _make_readonly(self, u):
2221 ro_uri = uri.from_string(u).get_readonly()
2224 return ro_uri.to_string()
2226 def _create_initial_children(self):
2227 contents, n, filecap1 = self.makefile(12)
2228 md1 = {"metakey1": "metavalue1"}
2229 filecap2 = make_mutable_file_uri()
2230 node3 = self.s.create_node_from_uri(make_mutable_file_uri())
2231 filecap3 = node3.get_readonly_uri()
2232 node4 = self.s.create_node_from_uri(make_mutable_file_uri())
2233 dircap = DirectoryNode(node4, None, None).get_uri()
2234 litdircap = "URI:DIR2-LIT:ge3dumj2mewdcotyfqydulbshj5x2lbm"
2235 emptydircap = "URI:DIR2-LIT:"
2236 newkids = {u"child-imm": ["filenode", {"rw_uri": filecap1,
2237 "ro_uri": self._make_readonly(filecap1),
2238 "metadata": md1, }],
2239 u"child-mutable": ["filenode", {"rw_uri": filecap2,
2240 "ro_uri": self._make_readonly(filecap2)}],
2241 u"child-mutable-ro": ["filenode", {"ro_uri": filecap3}],
2242 u"unknownchild-rw": ["unknown", {"rw_uri": unknown_rwcap,
2243 "ro_uri": unknown_rocap}],
2244 u"unknownchild-ro": ["unknown", {"ro_uri": unknown_rocap}],
2245 u"unknownchild-imm": ["unknown", {"ro_uri": unknown_immcap}],
2246 u"dirchild": ["dirnode", {"rw_uri": dircap,
2247 "ro_uri": self._make_readonly(dircap)}],
2248 u"dirchild-lit": ["dirnode", {"ro_uri": litdircap}],
2249 u"dirchild-empty": ["dirnode", {"ro_uri": emptydircap}],
2251 return newkids, {'filecap1': filecap1,
2252 'filecap2': filecap2,
2253 'filecap3': filecap3,
2254 'unknown_rwcap': unknown_rwcap,
2255 'unknown_rocap': unknown_rocap,
2256 'unknown_immcap': unknown_immcap,
2258 'litdircap': litdircap,
2259 'emptydircap': emptydircap}
2261 def _create_immutable_children(self):
2262 contents, n, filecap1 = self.makefile(12)
2263 md1 = {"metakey1": "metavalue1"}
2264 tnode = create_chk_filenode("immutable directory contents\n"*10)
2265 dnode = DirectoryNode(tnode, None, None)
2266 assert not dnode.is_mutable()
2267 immdircap = dnode.get_uri()
2268 litdircap = "URI:DIR2-LIT:ge3dumj2mewdcotyfqydulbshj5x2lbm"
2269 emptydircap = "URI:DIR2-LIT:"
2270 newkids = {u"child-imm": ["filenode", {"ro_uri": filecap1,
2271 "metadata": md1, }],
2272 u"unknownchild-imm": ["unknown", {"ro_uri": unknown_immcap}],
2273 u"dirchild-imm": ["dirnode", {"ro_uri": immdircap}],
2274 u"dirchild-lit": ["dirnode", {"ro_uri": litdircap}],
2275 u"dirchild-empty": ["dirnode", {"ro_uri": emptydircap}],
2277 return newkids, {'filecap1': filecap1,
2278 'unknown_immcap': unknown_immcap,
2279 'immdircap': immdircap,
2280 'litdircap': litdircap,
2281 'emptydircap': emptydircap}
2283 def test_POST_mkdir_no_parentdir_initial_children(self):
2284 (newkids, caps) = self._create_initial_children()
2285 d = self.POST2("/uri?t=mkdir-with-children", simplejson.dumps(newkids))
2286 def _after_mkdir(res):
2287 self.failUnless(res.startswith("URI:DIR"), res)
2288 n = self.s.create_node_from_uri(res)
2289 d2 = self.failUnlessNodeKeysAre(n, newkids.keys())
2290 d2.addCallback(lambda ign:
2291 self.failUnlessROChildURIIs(n, u"child-imm",
2293 d2.addCallback(lambda ign:
2294 self.failUnlessRWChildURIIs(n, u"child-mutable",
2296 d2.addCallback(lambda ign:
2297 self.failUnlessROChildURIIs(n, u"child-mutable-ro",
2299 d2.addCallback(lambda ign:
2300 self.failUnlessRWChildURIIs(n, u"unknownchild-rw",
2301 caps['unknown_rwcap']))
2302 d2.addCallback(lambda ign:
2303 self.failUnlessROChildURIIs(n, u"unknownchild-ro",
2304 caps['unknown_rocap']))
2305 d2.addCallback(lambda ign:
2306 self.failUnlessROChildURIIs(n, u"unknownchild-imm",
2307 caps['unknown_immcap']))
2308 d2.addCallback(lambda ign:
2309 self.failUnlessRWChildURIIs(n, u"dirchild",
2312 d.addCallback(_after_mkdir)
2315 def test_POST_mkdir_no_parentdir_unexpected_children(self):
2316 # the regular /uri?t=mkdir operation is specified to ignore its body.
2317 # Only t=mkdir-with-children pays attention to it.
2318 (newkids, caps) = self._create_initial_children()
2319 d = self.shouldHTTPError("POST t=mkdir unexpected children",
2321 "t=mkdir does not accept children=, "
2322 "try t=mkdir-with-children instead",
2323 self.POST2, "/uri?t=mkdir", # without children
2324 simplejson.dumps(newkids))
2327 def test_POST_noparent_bad(self):
2328 d = self.shouldHTTPError("POST /uri?t=bogus", 400, "Bad Request",
2329 "/uri accepts only PUT, PUT?t=mkdir, "
2330 "POST?t=upload, and POST?t=mkdir",
2331 self.POST, "/uri?t=bogus")
2334 def test_POST_mkdir_no_parentdir_immutable(self):
2335 (newkids, caps) = self._create_immutable_children()
2336 d = self.POST2("/uri?t=mkdir-immutable", simplejson.dumps(newkids))
2337 def _after_mkdir(res):
2338 self.failUnless(res.startswith("URI:DIR"), res)
2339 n = self.s.create_node_from_uri(res)
2340 d2 = self.failUnlessNodeKeysAre(n, newkids.keys())
2341 d2.addCallback(lambda ign:
2342 self.failUnlessROChildURIIs(n, u"child-imm",
2344 d2.addCallback(lambda ign:
2345 self.failUnlessROChildURIIs(n, u"unknownchild-imm",
2346 caps['unknown_immcap']))
2347 d2.addCallback(lambda ign:
2348 self.failUnlessROChildURIIs(n, u"dirchild-imm",
2350 d2.addCallback(lambda ign:
2351 self.failUnlessROChildURIIs(n, u"dirchild-lit",
2353 d2.addCallback(lambda ign:
2354 self.failUnlessROChildURIIs(n, u"dirchild-empty",
2355 caps['emptydircap']))
2357 d.addCallback(_after_mkdir)
2360 def test_POST_mkdir_no_parentdir_immutable_bad(self):
2361 (newkids, caps) = self._create_initial_children()
2362 d = self.shouldFail2(error.Error,
2363 "test_POST_mkdir_no_parentdir_immutable_bad",
2365 "needed to be immutable but was not",
2367 "/uri?t=mkdir-immutable",
2368 simplejson.dumps(newkids))
2371 def test_welcome_page_mkdir_button(self):
2372 # Fetch the welcome page.
2374 def _after_get_welcome_page(res):
2375 MKDIR_BUTTON_RE = re.compile(
2376 '<form action="([^"]*)" method="post".*?'
2377 '<input type="hidden" name="t" value="([^"]*)" />'
2378 '<input type="hidden" name="([^"]*)" value="([^"]*)" />'
2379 '<input type="submit" value="Create a directory" />',
2381 mo = MKDIR_BUTTON_RE.search(res)
2382 formaction = mo.group(1)
2384 formaname = mo.group(3)
2385 formavalue = mo.group(4)
2386 return (formaction, formt, formaname, formavalue)
2387 d.addCallback(_after_get_welcome_page)
2388 def _after_parse_form(res):
2389 (formaction, formt, formaname, formavalue) = res
2390 return self.POST("/%s?t=%s&%s=%s" % (formaction, formt, formaname, formavalue))
2391 d.addCallback(_after_parse_form)
2392 d.addBoth(self.shouldRedirect, None, statuscode='303')
2395 def test_POST_mkdir_replace(self): # return value?
2396 d = self.POST(self.public_url + "/foo", t="mkdir", name="sub")
2397 d.addCallback(lambda res: self._foo_node.get(u"sub"))
2398 d.addCallback(self.failUnlessNodeKeysAre, [])
2401 def test_POST_mkdir_no_replace_queryarg(self): # return value?
2402 d = self.POST(self.public_url + "/foo?replace=false", t="mkdir", name="sub")
2403 d.addBoth(self.shouldFail, error.Error,
2404 "POST_mkdir_no_replace_queryarg",
2406 "There was already a child by that name, and you asked me "
2407 "to not replace it")
2408 d.addCallback(lambda res: self._foo_node.get(u"sub"))
2409 d.addCallback(self.failUnlessNodeKeysAre, [u"baz.txt"])
2412 def test_POST_mkdir_no_replace_field(self): # return value?
2413 d = self.POST(self.public_url + "/foo", t="mkdir", name="sub",
2415 d.addBoth(self.shouldFail, error.Error, "POST_mkdir_no_replace_field",
2417 "There was already a child by that name, and you asked me "
2418 "to not replace it")
2419 d.addCallback(lambda res: self._foo_node.get(u"sub"))
2420 d.addCallback(self.failUnlessNodeKeysAre, [u"baz.txt"])
2423 def test_POST_mkdir_whendone_field(self):
2424 d = self.POST(self.public_url + "/foo",
2425 t="mkdir", name="newdir", when_done="/THERE")
2426 d.addBoth(self.shouldRedirect, "/THERE")
2427 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
2428 d.addCallback(self.failUnlessNodeKeysAre, [])
2431 def test_POST_mkdir_whendone_queryarg(self):
2432 d = self.POST(self.public_url + "/foo?when_done=/THERE",
2433 t="mkdir", name="newdir")
2434 d.addBoth(self.shouldRedirect, "/THERE")
2435 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
2436 d.addCallback(self.failUnlessNodeKeysAre, [])
2439 def test_POST_bad_t(self):
2440 d = self.shouldFail2(error.Error, "POST_bad_t", "400 Bad Request",
2441 "POST to a directory with bad t=BOGUS",
2442 self.POST, self.public_url + "/foo", t="BOGUS")
2445 def test_POST_set_children(self, command_name="set_children"):
2446 contents9, n9, newuri9 = self.makefile(9)
2447 contents10, n10, newuri10 = self.makefile(10)
2448 contents11, n11, newuri11 = self.makefile(11)
2451 "atomic_added_1": [ "filenode", { "rw_uri": "%s",
2454 "ctime": 1002777696.7564139,
2455 "mtime": 1002777696.7564139
2458 "atomic_added_2": [ "filenode", { "rw_uri": "%s",
2461 "ctime": 1002777696.7564139,
2462 "mtime": 1002777696.7564139
2465 "atomic_added_3": [ "filenode", { "rw_uri": "%s",
2468 "ctime": 1002777696.7564139,
2469 "mtime": 1002777696.7564139
2472 }""" % (newuri9, newuri10, newuri11)
2474 url = self.webish_url + self.public_url + "/foo" + "?t=" + command_name
2476 d = client.getPage(url, method="POST", postdata=reqbody)
2478 self.failUnlessURIMatchesROChild(newuri9, self._foo_node, u"atomic_added_1")
2479 self.failUnlessURIMatchesROChild(newuri10, self._foo_node, u"atomic_added_2")
2480 self.failUnlessURIMatchesROChild(newuri11, self._foo_node, u"atomic_added_3")
2482 d.addCallback(_then)
2483 d.addErrback(self.dump_error)
2486 def test_POST_set_children_with_hyphen(self):
2487 return self.test_POST_set_children(command_name="set-children")
2489 def test_POST_link_uri(self):
2490 contents, n, newuri = self.makefile(8)
2491 d = self.POST(self.public_url + "/foo", t="uri", name="new.txt", uri=newuri)
2492 d.addCallback(self.failUnlessURIMatchesROChild, self._foo_node, u"new.txt")
2493 d.addCallback(lambda res:
2494 self.failUnlessChildContentsAre(self._foo_node, u"new.txt",
2498 def test_POST_link_uri_replace(self):
2499 contents, n, newuri = self.makefile(8)
2500 d = self.POST(self.public_url + "/foo", t="uri", name="bar.txt", uri=newuri)
2501 d.addCallback(self.failUnlessURIMatchesROChild, self._foo_node, u"bar.txt")
2502 d.addCallback(lambda res:
2503 self.failUnlessChildContentsAre(self._foo_node, u"bar.txt",
2507 def test_POST_link_uri_unknown_bad(self):
2508 d = self.POST(self.public_url + "/foo", t="uri", name="future.txt", uri=unknown_rwcap)
2509 d.addBoth(self.shouldFail, error.Error,
2510 "POST_link_uri_unknown_bad",
2512 "unknown cap in a write slot")
2515 def test_POST_link_uri_unknown_ro_good(self):
2516 d = self.POST(self.public_url + "/foo", t="uri", name="future-ro.txt", uri=unknown_rocap)
2517 d.addCallback(self.failUnlessURIMatchesROChild, self._foo_node, u"future-ro.txt")
2520 def test_POST_link_uri_unknown_imm_good(self):
2521 d = self.POST(self.public_url + "/foo", t="uri", name="future-imm.txt", uri=unknown_immcap)
2522 d.addCallback(self.failUnlessURIMatchesROChild, self._foo_node, u"future-imm.txt")
2525 def test_POST_link_uri_no_replace_queryarg(self):
2526 contents, n, newuri = self.makefile(8)
2527 d = self.POST(self.public_url + "/foo?replace=false", t="uri",
2528 name="bar.txt", uri=newuri)
2529 d.addBoth(self.shouldFail, error.Error,
2530 "POST_link_uri_no_replace_queryarg",
2532 "There was already a child by that name, and you asked me "
2533 "to not replace it")
2534 d.addCallback(lambda res: self.GET(self.public_url + "/foo/bar.txt"))
2535 d.addCallback(self.failUnlessIsBarDotTxt)
2538 def test_POST_link_uri_no_replace_field(self):
2539 contents, n, newuri = self.makefile(8)
2540 d = self.POST(self.public_url + "/foo", t="uri", replace="false",
2541 name="bar.txt", uri=newuri)
2542 d.addBoth(self.shouldFail, error.Error,
2543 "POST_link_uri_no_replace_field",
2545 "There was already a child by that name, and you asked me "
2546 "to not replace it")
2547 d.addCallback(lambda res: self.GET(self.public_url + "/foo/bar.txt"))
2548 d.addCallback(self.failUnlessIsBarDotTxt)
2551 def test_POST_delete(self):
2552 d = self.POST(self.public_url + "/foo", t="delete", name="bar.txt")
2553 d.addCallback(lambda res: self._foo_node.list())
2554 def _check(children):
2555 self.failIf(u"bar.txt" in children)
2556 d.addCallback(_check)
2559 def test_POST_rename_file(self):
2560 d = self.POST(self.public_url + "/foo", t="rename",
2561 from_name="bar.txt", to_name='wibble.txt')
2562 d.addCallback(lambda res:
2563 self.failIfNodeHasChild(self._foo_node, u"bar.txt"))
2564 d.addCallback(lambda res:
2565 self.failUnlessNodeHasChild(self._foo_node, u"wibble.txt"))
2566 d.addCallback(lambda res: self.GET(self.public_url + "/foo/wibble.txt"))
2567 d.addCallback(self.failUnlessIsBarDotTxt)
2568 d.addCallback(lambda res: self.GET(self.public_url + "/foo/wibble.txt?t=json"))
2569 d.addCallback(self.failUnlessIsBarJSON)
2572 def test_POST_rename_file_redundant(self):
2573 d = self.POST(self.public_url + "/foo", t="rename",
2574 from_name="bar.txt", to_name='bar.txt')
2575 d.addCallback(lambda res:
2576 self.failUnlessNodeHasChild(self._foo_node, u"bar.txt"))
2577 d.addCallback(lambda res: self.GET(self.public_url + "/foo/bar.txt"))
2578 d.addCallback(self.failUnlessIsBarDotTxt)
2579 d.addCallback(lambda res: self.GET(self.public_url + "/foo/bar.txt?t=json"))
2580 d.addCallback(self.failUnlessIsBarJSON)
2583 def test_POST_rename_file_replace(self):
2584 # rename a file and replace a directory with it
2585 d = self.POST(self.public_url + "/foo", t="rename",
2586 from_name="bar.txt", to_name='empty')
2587 d.addCallback(lambda res:
2588 self.failIfNodeHasChild(self._foo_node, u"bar.txt"))
2589 d.addCallback(lambda res:
2590 self.failUnlessNodeHasChild(self._foo_node, u"empty"))
2591 d.addCallback(lambda res: self.GET(self.public_url + "/foo/empty"))
2592 d.addCallback(self.failUnlessIsBarDotTxt)
2593 d.addCallback(lambda res: self.GET(self.public_url + "/foo/empty?t=json"))
2594 d.addCallback(self.failUnlessIsBarJSON)
2597 def test_POST_rename_file_no_replace_queryarg(self):
2598 # rename a file and replace a directory with it
2599 d = self.POST(self.public_url + "/foo?replace=false", t="rename",
2600 from_name="bar.txt", to_name='empty')
2601 d.addBoth(self.shouldFail, error.Error,
2602 "POST_rename_file_no_replace_queryarg",
2604 "There was already a child by that name, and you asked me "
2605 "to not replace it")
2606 d.addCallback(lambda res: self.GET(self.public_url + "/foo/empty?t=json"))
2607 d.addCallback(self.failUnlessIsEmptyJSON)
2610 def test_POST_rename_file_no_replace_field(self):
2611 # rename a file and replace a directory with it
2612 d = self.POST(self.public_url + "/foo", t="rename", replace="false",
2613 from_name="bar.txt", to_name='empty')
2614 d.addBoth(self.shouldFail, error.Error,
2615 "POST_rename_file_no_replace_field",
2617 "There was already a child by that name, and you asked me "
2618 "to not replace it")
2619 d.addCallback(lambda res: self.GET(self.public_url + "/foo/empty?t=json"))
2620 d.addCallback(self.failUnlessIsEmptyJSON)
2623 def failUnlessIsEmptyJSON(self, res):
2624 data = simplejson.loads(res)
2625 self.failUnlessEqual(data[0], "dirnode", data)
2626 self.failUnlessEqual(len(data[1]["children"]), 0)
2628 def test_POST_rename_file_slash_fail(self):
2629 d = self.POST(self.public_url + "/foo", t="rename",
2630 from_name="bar.txt", to_name='kirk/spock.txt')
2631 d.addBoth(self.shouldFail, error.Error,
2632 "test_POST_rename_file_slash_fail",
2634 "to_name= may not contain a slash",
2636 d.addCallback(lambda res:
2637 self.failUnlessNodeHasChild(self._foo_node, u"bar.txt"))
2640 def test_POST_rename_dir(self):
2641 d = self.POST(self.public_url, t="rename",
2642 from_name="foo", to_name='plunk')
2643 d.addCallback(lambda res:
2644 self.failIfNodeHasChild(self.public_root, u"foo"))
2645 d.addCallback(lambda res:
2646 self.failUnlessNodeHasChild(self.public_root, u"plunk"))
2647 d.addCallback(lambda res: self.GET(self.public_url + "/plunk?t=json"))
2648 d.addCallback(self.failUnlessIsFooJSON)
2651 def shouldRedirect(self, res, target=None, statuscode=None, which=""):
2652 """ If target is not None then the redirection has to go to target. If
2653 statuscode is not None then the redirection has to be accomplished with
2654 that HTTP status code."""
2655 if not isinstance(res, failure.Failure):
2656 to_where = (target is None) and "somewhere" or ("to " + target)
2657 self.fail("%s: we were expecting to get redirected %s, not get an"
2658 " actual page: %s" % (which, to_where, res))
2659 res.trap(error.PageRedirect)
2660 if statuscode is not None:
2661 self.failUnlessEqual(res.value.status, statuscode,
2662 "%s: not a redirect" % which)
2663 if target is not None:
2664 # the PageRedirect does not seem to capture the uri= query arg
2665 # properly, so we can't check for it.
2666 realtarget = self.webish_url + target
2667 self.failUnlessEqual(res.value.location, realtarget,
2668 "%s: wrong target" % which)
2669 return res.value.location
2671 def test_GET_URI_form(self):
2672 base = "/uri?uri=%s" % self._bar_txt_uri
2673 # this is supposed to give us a redirect to /uri/$URI, plus arguments
2674 targetbase = "/uri/%s" % urllib.quote(self._bar_txt_uri)
2676 d.addBoth(self.shouldRedirect, targetbase)
2677 d.addCallback(lambda res: self.GET(base+"&filename=bar.txt"))
2678 d.addBoth(self.shouldRedirect, targetbase+"?filename=bar.txt")
2679 d.addCallback(lambda res: self.GET(base+"&t=json"))
2680 d.addBoth(self.shouldRedirect, targetbase+"?t=json")
2681 d.addCallback(self.log, "about to get file by uri")
2682 d.addCallback(lambda res: self.GET(base, followRedirect=True))
2683 d.addCallback(self.failUnlessIsBarDotTxt)
2684 d.addCallback(self.log, "got file by uri, about to get dir by uri")
2685 d.addCallback(lambda res: self.GET("/uri?uri=%s&t=json" % self._foo_uri,
2686 followRedirect=True))
2687 d.addCallback(self.failUnlessIsFooJSON)
2688 d.addCallback(self.log, "got dir by uri")
2692 def test_GET_URI_form_bad(self):
2693 d = self.shouldFail2(error.Error, "test_GET_URI_form_bad",
2694 "400 Bad Request", "GET /uri requires uri=",
2698 def test_GET_rename_form(self):
2699 d = self.GET(self.public_url + "/foo?t=rename-form&name=bar.txt",
2700 followRedirect=True)
2702 self.failUnless('name="when_done" value="."' in res, res)
2703 self.failUnless(re.search(r'name="from_name" value="bar\.txt"', res))
2704 d.addCallback(_check)
2707 def log(self, res, msg):
2708 #print "MSG: %s RES: %s" % (msg, res)
2712 def test_GET_URI_URL(self):
2713 base = "/uri/%s" % self._bar_txt_uri
2715 d.addCallback(self.failUnlessIsBarDotTxt)
2716 d.addCallback(lambda res: self.GET(base+"?filename=bar.txt"))
2717 d.addCallback(self.failUnlessIsBarDotTxt)
2718 d.addCallback(lambda res: self.GET(base+"?filename=bar.txt&save=true"))
2719 d.addCallback(self.failUnlessIsBarDotTxt)
2722 def test_GET_URI_URL_dir(self):
2723 base = "/uri/%s?t=json" % self._foo_uri
2725 d.addCallback(self.failUnlessIsFooJSON)
2728 def test_GET_URI_URL_missing(self):
2729 base = "/uri/%s" % self._bad_file_uri
2730 d = self.shouldHTTPError("test_GET_URI_URL_missing",
2731 http.GONE, None, "NotEnoughSharesError",
2733 # TODO: how can we exercise both sides of WebDownloadTarget.fail
2734 # here? we must arrange for a download to fail after target.open()
2735 # has been called, and then inspect the response to see that it is
2736 # shorter than we expected.
2739 def test_PUT_DIRURL_uri(self):
2740 d = self.s.create_dirnode()
2742 new_uri = dn.get_uri()
2743 # replace /foo with a new (empty) directory
2744 d = self.PUT(self.public_url + "/foo?t=uri", new_uri)
2745 d.addCallback(lambda res:
2746 self.failUnlessEqual(res.strip(), new_uri))
2747 d.addCallback(lambda res:
2748 self.failUnlessRWChildURIIs(self.public_root,
2752 d.addCallback(_made_dir)
2755 def test_PUT_DIRURL_uri_noreplace(self):
2756 d = self.s.create_dirnode()
2758 new_uri = dn.get_uri()
2759 # replace /foo with a new (empty) directory, but ask that
2760 # replace=false, so it should fail
2761 d = self.shouldFail2(error.Error, "test_PUT_DIRURL_uri_noreplace",
2762 "409 Conflict", "There was already a child by that name, and you asked me to not replace it",
2764 self.public_url + "/foo?t=uri&replace=false",
2766 d.addCallback(lambda res:
2767 self.failUnlessRWChildURIIs(self.public_root,
2771 d.addCallback(_made_dir)
2774 def test_PUT_DIRURL_bad_t(self):
2775 d = self.shouldFail2(error.Error, "test_PUT_DIRURL_bad_t",
2776 "400 Bad Request", "PUT to a directory",
2777 self.PUT, self.public_url + "/foo?t=BOGUS", "")
2778 d.addCallback(lambda res:
2779 self.failUnlessRWChildURIIs(self.public_root,
2784 def test_PUT_NEWFILEURL_uri(self):
2785 contents, n, new_uri = self.makefile(8)
2786 d = self.PUT(self.public_url + "/foo/new.txt?t=uri", new_uri)
2787 d.addCallback(lambda res: self.failUnlessEqual(res.strip(), new_uri))
2788 d.addCallback(lambda res:
2789 self.failUnlessChildContentsAre(self._foo_node, u"new.txt",
2793 def test_PUT_NEWFILEURL_uri_replace(self):
2794 contents, n, new_uri = self.makefile(8)
2795 d = self.PUT(self.public_url + "/foo/bar.txt?t=uri", new_uri)
2796 d.addCallback(lambda res: self.failUnlessEqual(res.strip(), new_uri))
2797 d.addCallback(lambda res:
2798 self.failUnlessChildContentsAre(self._foo_node, u"bar.txt",
2802 def test_PUT_NEWFILEURL_uri_no_replace(self):
2803 contents, n, new_uri = self.makefile(8)
2804 d = self.PUT(self.public_url + "/foo/bar.txt?t=uri&replace=false", new_uri)
2805 d.addBoth(self.shouldFail, error.Error, "PUT_NEWFILEURL_uri_no_replace",
2807 "There was already a child by that name, and you asked me "
2808 "to not replace it")
2811 def test_PUT_NEWFILEURL_uri_unknown_bad(self):
2812 d = self.PUT(self.public_url + "/foo/put-future.txt?t=uri", unknown_rwcap)
2813 d.addBoth(self.shouldFail, error.Error,
2814 "POST_put_uri_unknown_bad",
2816 "unknown cap in a write slot")
2819 def test_PUT_NEWFILEURL_uri_unknown_ro_good(self):
2820 d = self.PUT(self.public_url + "/foo/put-future-ro.txt?t=uri", unknown_rocap)
2821 d.addCallback(self.failUnlessURIMatchesROChild, self._foo_node,
2822 u"put-future-ro.txt")
2825 def test_PUT_NEWFILEURL_uri_unknown_imm_good(self):
2826 d = self.PUT(self.public_url + "/foo/put-future-imm.txt?t=uri", unknown_immcap)
2827 d.addCallback(self.failUnlessURIMatchesROChild, self._foo_node,
2828 u"put-future-imm.txt")
2831 def test_PUT_NEWFILE_URI(self):
2832 file_contents = "New file contents here\n"
2833 d = self.PUT("/uri", file_contents)
2835 assert isinstance(uri, str), uri
2836 self.failUnless(uri in FakeCHKFileNode.all_contents)
2837 self.failUnlessEqual(FakeCHKFileNode.all_contents[uri],
2839 return self.GET("/uri/%s" % uri)
2840 d.addCallback(_check)
2842 self.failUnlessEqual(res, file_contents)
2843 d.addCallback(_check2)
2846 def test_PUT_NEWFILE_URI_not_mutable(self):
2847 file_contents = "New file contents here\n"
2848 d = self.PUT("/uri?mutable=false", file_contents)
2850 assert isinstance(uri, str), uri
2851 self.failUnless(uri in FakeCHKFileNode.all_contents)
2852 self.failUnlessEqual(FakeCHKFileNode.all_contents[uri],
2854 return self.GET("/uri/%s" % uri)
2855 d.addCallback(_check)
2857 self.failUnlessEqual(res, file_contents)
2858 d.addCallback(_check2)
2861 def test_PUT_NEWFILE_URI_only_PUT(self):
2862 d = self.PUT("/uri?t=bogus", "")
2863 d.addBoth(self.shouldFail, error.Error,
2864 "PUT_NEWFILE_URI_only_PUT",
2866 "/uri accepts only PUT, PUT?t=mkdir, POST?t=upload, and POST?t=mkdir")
2869 def test_PUT_NEWFILE_URI_mutable(self):
2870 file_contents = "New file contents here\n"
2871 d = self.PUT("/uri?mutable=true", file_contents)
2872 def _check1(filecap):
2873 filecap = filecap.strip()
2874 self.failUnless(filecap.startswith("URI:SSK:"), filecap)
2875 self.filecap = filecap
2876 u = uri.WriteableSSKFileURI.init_from_string(filecap)
2877 self.failUnless(u.get_storage_index() in FakeMutableFileNode.all_contents)
2878 n = self.s.create_node_from_uri(filecap)
2879 return n.download_best_version()
2880 d.addCallback(_check1)
2882 self.failUnlessEqual(data, file_contents)
2883 return self.GET("/uri/%s" % urllib.quote(self.filecap))
2884 d.addCallback(_check2)
2886 self.failUnlessEqual(res, file_contents)
2887 d.addCallback(_check3)
2890 def test_PUT_mkdir(self):
2891 d = self.PUT("/uri?t=mkdir", "")
2893 n = self.s.create_node_from_uri(uri.strip())
2894 d2 = self.failUnlessNodeKeysAre(n, [])
2895 d2.addCallback(lambda res:
2896 self.GET("/uri/%s?t=json" % uri))
2898 d.addCallback(_check)
2899 d.addCallback(self.failUnlessIsEmptyJSON)
2902 def test_POST_check(self):
2903 d = self.POST(self.public_url + "/foo", t="check", name="bar.txt")
2905 # this returns a string form of the results, which are probably
2906 # None since we're using fake filenodes.
2907 # TODO: verify that the check actually happened, by changing
2908 # FakeCHKFileNode to count how many times .check() has been
2911 d.addCallback(_done)
2914 def test_bad_method(self):
2915 url = self.webish_url + self.public_url + "/foo/bar.txt"
2916 d = self.shouldHTTPError("test_bad_method",
2917 501, "Not Implemented",
2918 "I don't know how to treat a BOGUS request.",
2919 client.getPage, url, method="BOGUS")
2922 def test_short_url(self):
2923 url = self.webish_url + "/uri"
2924 d = self.shouldHTTPError("test_short_url", 501, "Not Implemented",
2925 "I don't know how to treat a DELETE request.",
2926 client.getPage, url, method="DELETE")
2929 def test_ophandle_bad(self):
2930 url = self.webish_url + "/operations/bogus?t=status"
2931 d = self.shouldHTTPError("test_ophandle_bad", 404, "404 Not Found",
2932 "unknown/expired handle 'bogus'",
2933 client.getPage, url)
2936 def test_ophandle_cancel(self):
2937 d = self.POST(self.public_url + "/foo/?t=start-manifest&ophandle=128",
2938 followRedirect=True)
2939 d.addCallback(lambda ignored:
2940 self.GET("/operations/128?t=status&output=JSON"))
2942 data = simplejson.loads(res)
2943 self.failUnless("finished" in data, res)
2944 monitor = self.ws.root.child_operations.handles["128"][0]
2945 d = self.POST("/operations/128?t=cancel&output=JSON")
2947 data = simplejson.loads(res)
2948 self.failUnless("finished" in data, res)
2949 # t=cancel causes the handle to be forgotten
2950 self.failUnless(monitor.is_cancelled())
2951 d.addCallback(_check2)
2953 d.addCallback(_check1)
2954 d.addCallback(lambda ignored:
2955 self.shouldHTTPError("test_ophandle_cancel",
2956 404, "404 Not Found",
2957 "unknown/expired handle '128'",
2959 "/operations/128?t=status&output=JSON"))
2962 def test_ophandle_retainfor(self):
2963 d = self.POST(self.public_url + "/foo/?t=start-manifest&ophandle=129&retain-for=60",
2964 followRedirect=True)
2965 d.addCallback(lambda ignored:
2966 self.GET("/operations/129?t=status&output=JSON&retain-for=0"))
2968 data = simplejson.loads(res)
2969 self.failUnless("finished" in data, res)
2970 d.addCallback(_check1)
2971 # the retain-for=0 will cause the handle to be expired very soon
2972 d.addCallback(lambda ign:
2973 self.clock.advance(2.0))
2974 d.addCallback(lambda ignored:
2975 self.shouldHTTPError("test_ophandle_retainfor",
2976 404, "404 Not Found",
2977 "unknown/expired handle '129'",
2979 "/operations/129?t=status&output=JSON"))
2982 def test_ophandle_release_after_complete(self):
2983 d = self.POST(self.public_url + "/foo/?t=start-manifest&ophandle=130",
2984 followRedirect=True)
2985 d.addCallback(self.wait_for_operation, "130")
2986 d.addCallback(lambda ignored:
2987 self.GET("/operations/130?t=status&output=JSON&release-after-complete=true"))
2988 # the release-after-complete=true will cause the handle to be expired
2989 d.addCallback(lambda ignored:
2990 self.shouldHTTPError("test_ophandle_release_after_complete",
2991 404, "404 Not Found",
2992 "unknown/expired handle '130'",
2994 "/operations/130?t=status&output=JSON"))
2997 def test_uncollected_ophandle_expiration(self):
2998 # uncollected ophandles should expire after 4 days
2999 def _make_uncollected_ophandle(ophandle):
3000 d = self.POST(self.public_url +
3001 "/foo/?t=start-manifest&ophandle=%d" % ophandle,
3002 followRedirect=False)
3003 # When we start the operation, the webapi server will want
3004 # to redirect us to the page for the ophandle, so we get
3005 # confirmation that the operation has started. If the
3006 # manifest operation has finished by the time we get there,
3007 # following that redirect (by setting followRedirect=True
3008 # above) has the side effect of collecting the ophandle that
3009 # we've just created, which means that we can't use the
3010 # ophandle to test the uncollected timeout anymore. So,
3011 # instead, catch the 302 here and don't follow it.
3012 d.addBoth(self.should302, "uncollected_ophandle_creation")
3014 # Create an ophandle, don't collect it, then advance the clock by
3015 # 4 days - 1 second and make sure that the ophandle is still there.
3016 d = _make_uncollected_ophandle(131)
3017 d.addCallback(lambda ign:
3018 self.clock.advance((96*60*60) - 1)) # 96 hours = 4 days
3019 d.addCallback(lambda ign:
3020 self.GET("/operations/131?t=status&output=JSON"))
3022 data = simplejson.loads(res)
3023 self.failUnless("finished" in data, res)
3024 d.addCallback(_check1)
3025 # Create an ophandle, don't collect it, then try to collect it
3026 # after 4 days. It should be gone.
3027 d.addCallback(lambda ign:
3028 _make_uncollected_ophandle(132))
3029 d.addCallback(lambda ign:
3030 self.clock.advance(96*60*60))
3031 d.addCallback(lambda ign:
3032 self.shouldHTTPError("test_uncollected_ophandle_expired_after_100_hours",
3033 404, "404 Not Found",
3034 "unknown/expired handle '132'",
3036 "/operations/132?t=status&output=JSON"))
3039 def test_collected_ophandle_expiration(self):
3040 # collected ophandles should expire after 1 day
3041 def _make_collected_ophandle(ophandle):
3042 d = self.POST(self.public_url +
3043 "/foo/?t=start-manifest&ophandle=%d" % ophandle,
3044 followRedirect=True)
3045 # By following the initial redirect, we collect the ophandle
3046 # we've just created.
3048 # Create a collected ophandle, then collect it after 23 hours
3049 # and 59 seconds to make sure that it is still there.
3050 d = _make_collected_ophandle(133)
3051 d.addCallback(lambda ign:
3052 self.clock.advance((24*60*60) - 1))
3053 d.addCallback(lambda ign:
3054 self.GET("/operations/133?t=status&output=JSON"))
3056 data = simplejson.loads(res)
3057 self.failUnless("finished" in data, res)
3058 d.addCallback(_check1)
3059 # Create another uncollected ophandle, then try to collect it
3060 # after 24 hours to make sure that it is gone.
3061 d.addCallback(lambda ign:
3062 _make_collected_ophandle(134))
3063 d.addCallback(lambda ign:
3064 self.clock.advance(24*60*60))
3065 d.addCallback(lambda ign:
3066 self.shouldHTTPError("test_collected_ophandle_expired_after_1000_minutes",
3067 404, "404 Not Found",
3068 "unknown/expired handle '134'",
3070 "/operations/134?t=status&output=JSON"))
3073 def test_incident(self):
3074 d = self.POST("/report_incident", details="eek")
3076 self.failUnless("Thank you for your report!" in res, res)
3077 d.addCallback(_done)
3080 def test_static(self):
3081 webdir = os.path.join(self.staticdir, "subdir")
3082 fileutil.make_dirs(webdir)
3083 f = open(os.path.join(webdir, "hello.txt"), "wb")
3087 d = self.GET("/static/subdir/hello.txt")
3089 self.failUnlessEqual(res, "hello")
3090 d.addCallback(_check)
3094 class Util(unittest.TestCase, ShouldFailMixin):
3095 def test_load_file(self):
3096 # This will raise an exception unless a well-formed XML file is found under that name.
3097 common.getxmlfile('directory.xhtml').load()
3099 def test_parse_replace_arg(self):
3100 self.failUnlessEqual(common.parse_replace_arg("true"), True)
3101 self.failUnlessEqual(common.parse_replace_arg("false"), False)
3102 self.failUnlessEqual(common.parse_replace_arg("only-files"),
3104 self.shouldFail(AssertionError, "test_parse_replace_arg", "",
3105 common.parse_replace_arg, "only_fles")
3107 def test_abbreviate_time(self):
3108 self.failUnlessEqual(common.abbreviate_time(None), "")
3109 self.failUnlessEqual(common.abbreviate_time(1.234), "1.23s")
3110 self.failUnlessEqual(common.abbreviate_time(0.123), "123ms")
3111 self.failUnlessEqual(common.abbreviate_time(0.00123), "1.2ms")
3112 self.failUnlessEqual(common.abbreviate_time(0.000123), "123us")
3114 def test_abbreviate_rate(self):
3115 self.failUnlessEqual(common.abbreviate_rate(None), "")
3116 self.failUnlessEqual(common.abbreviate_rate(1234000), "1.23MBps")
3117 self.failUnlessEqual(common.abbreviate_rate(12340), "12.3kBps")
3118 self.failUnlessEqual(common.abbreviate_rate(123), "123Bps")
3120 def test_abbreviate_size(self):
3121 self.failUnlessEqual(common.abbreviate_size(None), "")
3122 self.failUnlessEqual(common.abbreviate_size(1.23*1000*1000*1000), "1.23GB")
3123 self.failUnlessEqual(common.abbreviate_size(1.23*1000*1000), "1.23MB")
3124 self.failUnlessEqual(common.abbreviate_size(1230), "1.2kB")
3125 self.failUnlessEqual(common.abbreviate_size(123), "123B")
3127 def test_plural(self):
3129 return "%d second%s" % (s, status.plural(s))
3130 self.failUnlessEqual(convert(0), "0 seconds")
3131 self.failUnlessEqual(convert(1), "1 second")
3132 self.failUnlessEqual(convert(2), "2 seconds")
3134 return "has share%s: %s" % (status.plural(s), ",".join(s))
3135 self.failUnlessEqual(convert2([]), "has shares: ")
3136 self.failUnlessEqual(convert2(["1"]), "has share: 1")
3137 self.failUnlessEqual(convert2(["1","2"]), "has shares: 1,2")
3140 class Grid(GridTestMixin, WebErrorMixin, unittest.TestCase, ShouldFailMixin):
3142 def CHECK(self, ign, which, args, clientnum=0):
3143 fileurl = self.fileurls[which]
3144 url = fileurl + "?" + args
3145 return self.GET(url, method="POST", clientnum=clientnum)
3147 def test_filecheck(self):
3148 self.basedir = "web/Grid/filecheck"
3150 c0 = self.g.clients[0]
3153 d = c0.upload(upload.Data(DATA, convergence=""))
3154 def _stash_uri(ur, which):
3155 self.uris[which] = ur.uri
3156 d.addCallback(_stash_uri, "good")
3157 d.addCallback(lambda ign:
3158 c0.upload(upload.Data(DATA+"1", convergence="")))
3159 d.addCallback(_stash_uri, "sick")
3160 d.addCallback(lambda ign:
3161 c0.upload(upload.Data(DATA+"2", convergence="")))
3162 d.addCallback(_stash_uri, "dead")
3163 def _stash_mutable_uri(n, which):
3164 self.uris[which] = n.get_uri()
3165 assert isinstance(self.uris[which], str)
3166 d.addCallback(lambda ign: c0.create_mutable_file(DATA+"3"))
3167 d.addCallback(_stash_mutable_uri, "corrupt")
3168 d.addCallback(lambda ign:
3169 c0.upload(upload.Data("literal", convergence="")))
3170 d.addCallback(_stash_uri, "small")
3171 d.addCallback(lambda ign: c0.create_immutable_dirnode({}))
3172 d.addCallback(_stash_mutable_uri, "smalldir")
3174 def _compute_fileurls(ignored):
3176 for which in self.uris:
3177 self.fileurls[which] = "uri/" + urllib.quote(self.uris[which])
3178 d.addCallback(_compute_fileurls)
3180 def _clobber_shares(ignored):
3181 good_shares = self.find_shares(self.uris["good"])
3182 self.failUnlessEqual(len(good_shares), 10)
3183 sick_shares = self.find_shares(self.uris["sick"])
3184 os.unlink(sick_shares[0][2])
3185 dead_shares = self.find_shares(self.uris["dead"])
3186 for i in range(1, 10):
3187 os.unlink(dead_shares[i][2])
3188 c_shares = self.find_shares(self.uris["corrupt"])
3189 cso = CorruptShareOptions()
3190 cso.stdout = StringIO()
3191 cso.parseOptions([c_shares[0][2]])
3193 d.addCallback(_clobber_shares)
3195 d.addCallback(self.CHECK, "good", "t=check")
3196 def _got_html_good(res):
3197 self.failUnless("Healthy" in res, res)
3198 self.failIf("Not Healthy" in res, res)
3199 d.addCallback(_got_html_good)
3200 d.addCallback(self.CHECK, "good", "t=check&return_to=somewhere")
3201 def _got_html_good_return_to(res):
3202 self.failUnless("Healthy" in res, res)
3203 self.failIf("Not Healthy" in res, res)
3204 self.failUnless('<a href="somewhere">Return to file'
3206 d.addCallback(_got_html_good_return_to)
3207 d.addCallback(self.CHECK, "good", "t=check&output=json")
3208 def _got_json_good(res):
3209 r = simplejson.loads(res)
3210 self.failUnlessEqual(r["summary"], "Healthy")
3211 self.failUnless(r["results"]["healthy"])
3212 self.failIf(r["results"]["needs-rebalancing"])
3213 self.failUnless(r["results"]["recoverable"])
3214 d.addCallback(_got_json_good)
3216 d.addCallback(self.CHECK, "small", "t=check")
3217 def _got_html_small(res):
3218 self.failUnless("Literal files are always healthy" in res, res)
3219 self.failIf("Not Healthy" in res, res)
3220 d.addCallback(_got_html_small)
3221 d.addCallback(self.CHECK, "small", "t=check&return_to=somewhere")
3222 def _got_html_small_return_to(res):
3223 self.failUnless("Literal files are always healthy" in res, res)
3224 self.failIf("Not Healthy" in res, res)
3225 self.failUnless('<a href="somewhere">Return to file'
3227 d.addCallback(_got_html_small_return_to)
3228 d.addCallback(self.CHECK, "small", "t=check&output=json")
3229 def _got_json_small(res):
3230 r = simplejson.loads(res)
3231 self.failUnlessEqual(r["storage-index"], "")
3232 self.failUnless(r["results"]["healthy"])
3233 d.addCallback(_got_json_small)
3235 d.addCallback(self.CHECK, "smalldir", "t=check")
3236 def _got_html_smalldir(res):
3237 self.failUnless("Literal files are always healthy" in res, res)
3238 self.failIf("Not Healthy" in res, res)
3239 d.addCallback(_got_html_smalldir)
3240 d.addCallback(self.CHECK, "smalldir", "t=check&output=json")
3241 def _got_json_smalldir(res):
3242 r = simplejson.loads(res)
3243 self.failUnlessEqual(r["storage-index"], "")
3244 self.failUnless(r["results"]["healthy"])
3245 d.addCallback(_got_json_smalldir)
3247 d.addCallback(self.CHECK, "sick", "t=check")
3248 def _got_html_sick(res):
3249 self.failUnless("Not Healthy" in res, res)
3250 d.addCallback(_got_html_sick)
3251 d.addCallback(self.CHECK, "sick", "t=check&output=json")
3252 def _got_json_sick(res):
3253 r = simplejson.loads(res)
3254 self.failUnlessEqual(r["summary"],
3255 "Not Healthy: 9 shares (enc 3-of-10)")
3256 self.failIf(r["results"]["healthy"])
3257 self.failIf(r["results"]["needs-rebalancing"])
3258 self.failUnless(r["results"]["recoverable"])
3259 d.addCallback(_got_json_sick)
3261 d.addCallback(self.CHECK, "dead", "t=check")
3262 def _got_html_dead(res):
3263 self.failUnless("Not Healthy" in res, res)
3264 d.addCallback(_got_html_dead)
3265 d.addCallback(self.CHECK, "dead", "t=check&output=json")
3266 def _got_json_dead(res):
3267 r = simplejson.loads(res)
3268 self.failUnlessEqual(r["summary"],
3269 "Not Healthy: 1 shares (enc 3-of-10)")
3270 self.failIf(r["results"]["healthy"])
3271 self.failIf(r["results"]["needs-rebalancing"])
3272 self.failIf(r["results"]["recoverable"])
3273 d.addCallback(_got_json_dead)
3275 d.addCallback(self.CHECK, "corrupt", "t=check&verify=true")
3276 def _got_html_corrupt(res):
3277 self.failUnless("Not Healthy! : Unhealthy" in res, res)
3278 d.addCallback(_got_html_corrupt)
3279 d.addCallback(self.CHECK, "corrupt", "t=check&verify=true&output=json")
3280 def _got_json_corrupt(res):
3281 r = simplejson.loads(res)
3282 self.failUnless("Unhealthy: 9 shares (enc 3-of-10)" in r["summary"],
3284 self.failIf(r["results"]["healthy"])
3285 self.failUnless(r["results"]["recoverable"])
3286 self.failUnlessEqual(r["results"]["count-shares-good"], 9)
3287 self.failUnlessEqual(r["results"]["count-corrupt-shares"], 1)
3288 d.addCallback(_got_json_corrupt)
3290 d.addErrback(self.explain_web_error)
3293 def test_repair_html(self):
3294 self.basedir = "web/Grid/repair_html"
3296 c0 = self.g.clients[0]
3299 d = c0.upload(upload.Data(DATA, convergence=""))
3300 def _stash_uri(ur, which):
3301 self.uris[which] = ur.uri
3302 d.addCallback(_stash_uri, "good")
3303 d.addCallback(lambda ign:
3304 c0.upload(upload.Data(DATA+"1", convergence="")))
3305 d.addCallback(_stash_uri, "sick")
3306 d.addCallback(lambda ign:
3307 c0.upload(upload.Data(DATA+"2", convergence="")))
3308 d.addCallback(_stash_uri, "dead")
3309 def _stash_mutable_uri(n, which):
3310 self.uris[which] = n.get_uri()
3311 assert isinstance(self.uris[which], str)
3312 d.addCallback(lambda ign: c0.create_mutable_file(DATA+"3"))
3313 d.addCallback(_stash_mutable_uri, "corrupt")
3315 def _compute_fileurls(ignored):
3317 for which in self.uris:
3318 self.fileurls[which] = "uri/" + urllib.quote(self.uris[which])
3319 d.addCallback(_compute_fileurls)
3321 def _clobber_shares(ignored):
3322 good_shares = self.find_shares(self.uris["good"])
3323 self.failUnlessEqual(len(good_shares), 10)
3324 sick_shares = self.find_shares(self.uris["sick"])
3325 os.unlink(sick_shares[0][2])
3326 dead_shares = self.find_shares(self.uris["dead"])
3327 for i in range(1, 10):
3328 os.unlink(dead_shares[i][2])
3329 c_shares = self.find_shares(self.uris["corrupt"])
3330 cso = CorruptShareOptions()
3331 cso.stdout = StringIO()
3332 cso.parseOptions([c_shares[0][2]])
3334 d.addCallback(_clobber_shares)
3336 d.addCallback(self.CHECK, "good", "t=check&repair=true")
3337 def _got_html_good(res):
3338 self.failUnless("Healthy" in res, res)
3339 self.failIf("Not Healthy" in res, res)
3340 self.failUnless("No repair necessary" in res, res)
3341 d.addCallback(_got_html_good)
3343 d.addCallback(self.CHECK, "sick", "t=check&repair=true")
3344 def _got_html_sick(res):
3345 self.failUnless("Healthy : healthy" in res, res)
3346 self.failIf("Not Healthy" in res, res)
3347 self.failUnless("Repair successful" in res, res)
3348 d.addCallback(_got_html_sick)
3350 # repair of a dead file will fail, of course, but it isn't yet
3351 # clear how this should be reported. Right now it shows up as
3354 #d.addCallback(self.CHECK, "dead", "t=check&repair=true")
3355 #def _got_html_dead(res):
3357 # self.failUnless("Healthy : healthy" in res, res)
3358 # self.failIf("Not Healthy" in res, res)
3359 # self.failUnless("No repair necessary" in res, res)
3360 #d.addCallback(_got_html_dead)
3362 d.addCallback(self.CHECK, "corrupt", "t=check&verify=true&repair=true")
3363 def _got_html_corrupt(res):
3364 self.failUnless("Healthy : Healthy" in res, res)
3365 self.failIf("Not Healthy" in res, res)
3366 self.failUnless("Repair successful" in res, res)
3367 d.addCallback(_got_html_corrupt)
3369 d.addErrback(self.explain_web_error)
3372 def test_repair_json(self):
3373 self.basedir = "web/Grid/repair_json"
3375 c0 = self.g.clients[0]
3378 d = c0.upload(upload.Data(DATA+"1", convergence=""))
3379 def _stash_uri(ur, which):
3380 self.uris[which] = ur.uri
3381 d.addCallback(_stash_uri, "sick")
3383 def _compute_fileurls(ignored):
3385 for which in self.uris:
3386 self.fileurls[which] = "uri/" + urllib.quote(self.uris[which])
3387 d.addCallback(_compute_fileurls)
3389 def _clobber_shares(ignored):
3390 sick_shares = self.find_shares(self.uris["sick"])
3391 os.unlink(sick_shares[0][2])
3392 d.addCallback(_clobber_shares)
3394 d.addCallback(self.CHECK, "sick", "t=check&repair=true&output=json")
3395 def _got_json_sick(res):
3396 r = simplejson.loads(res)
3397 self.failUnlessEqual(r["repair-attempted"], True)
3398 self.failUnlessEqual(r["repair-successful"], True)
3399 self.failUnlessEqual(r["pre-repair-results"]["summary"],
3400 "Not Healthy: 9 shares (enc 3-of-10)")
3401 self.failIf(r["pre-repair-results"]["results"]["healthy"])
3402 self.failUnlessEqual(r["post-repair-results"]["summary"], "healthy")
3403 self.failUnless(r["post-repair-results"]["results"]["healthy"])
3404 d.addCallback(_got_json_sick)
3406 d.addErrback(self.explain_web_error)
3409 def test_unknown(self, immutable=False):
3410 self.basedir = "web/Grid/unknown"
3412 self.basedir = "web/Grid/unknown-immutable"
3415 c0 = self.g.clients[0]
3419 # the future cap format may contain slashes, which must be tolerated
3420 expected_info_url = "uri/%s?t=info" % urllib.quote(unknown_rwcap,
3424 name = u"future-imm"
3425 future_node = UnknownNode(None, unknown_immcap, deep_immutable=True)
3426 d = c0.create_immutable_dirnode({name: (future_node, {})})
3429 future_node = UnknownNode(unknown_rwcap, unknown_rocap)
3430 d = c0.create_dirnode()
3432 def _stash_root_and_create_file(n):
3434 self.rooturl = "uri/" + urllib.quote(n.get_uri()) + "/"
3435 self.rourl = "uri/" + urllib.quote(n.get_readonly_uri()) + "/"
3437 return self.rootnode.set_node(name, future_node)
3438 d.addCallback(_stash_root_and_create_file)
3440 # make sure directory listing tolerates unknown nodes
3441 d.addCallback(lambda ign: self.GET(self.rooturl))
3442 def _check_directory_html(res, expected_type_suffix):
3443 pattern = re.compile(r'<td>\?%s</td>[ \t\n\r]*'
3444 '<td>%s</td>' % (expected_type_suffix, str(name)),
3446 self.failUnless(re.search(pattern, res), res)
3447 # find the More Info link for name, should be relative
3448 mo = re.search(r'<a href="([^"]+)">More Info</a>', res)
3449 info_url = mo.group(1)
3450 self.failUnlessEqual(info_url, "%s?t=info" % (str(name),))
3452 d.addCallback(_check_directory_html, "-IMM")
3454 d.addCallback(_check_directory_html, "")
3456 d.addCallback(lambda ign: self.GET(self.rooturl+"?t=json"))
3457 def _check_directory_json(res, expect_rw_uri):
3458 data = simplejson.loads(res)
3459 self.failUnlessEqual(data[0], "dirnode")
3460 f = data[1]["children"][name]
3461 self.failUnlessEqual(f[0], "unknown")
3463 self.failUnlessEqual(f[1]["rw_uri"], unknown_rwcap)
3465 self.failIfIn("rw_uri", f[1])
3467 self.failUnlessEqual(f[1]["ro_uri"], unknown_immcap, data)
3469 self.failUnlessEqual(f[1]["ro_uri"], unknown_rocap)
3470 self.failUnless("metadata" in f[1])
3471 d.addCallback(_check_directory_json, expect_rw_uri=not immutable)
3473 def _check_info(res, expect_rw_uri, expect_ro_uri):
3474 self.failUnlessIn("Object Type: <span>unknown</span>", res)
3476 self.failUnlessIn(unknown_rwcap, res)
3479 self.failUnlessIn(unknown_immcap, res)
3481 self.failUnlessIn(unknown_rocap, res)
3483 self.failIfIn(unknown_rocap, res)
3484 self.failIfIn("Raw data as", res)
3485 self.failIfIn("Directory writecap", res)
3486 self.failIfIn("Checker Operations", res)
3487 self.failIfIn("Mutable File Operations", res)
3488 self.failIfIn("Directory Operations", res)
3490 # FIXME: these should have expect_rw_uri=not immutable; I don't know
3491 # why they fail. Possibly related to ticket #922.
3493 d.addCallback(lambda ign: self.GET(expected_info_url))
3494 d.addCallback(_check_info, expect_rw_uri=False, expect_ro_uri=False)
3495 d.addCallback(lambda ign: self.GET("%s%s?t=info" % (self.rooturl, str(name))))
3496 d.addCallback(_check_info, expect_rw_uri=False, expect_ro_uri=True)
3498 def _check_json(res, expect_rw_uri):
3499 data = simplejson.loads(res)
3500 self.failUnlessEqual(data[0], "unknown")
3502 self.failUnlessEqual(data[1]["rw_uri"], unknown_rwcap)
3504 self.failIfIn("rw_uri", data[1])
3507 self.failUnlessEqual(data[1]["ro_uri"], unknown_immcap)
3508 self.failUnlessEqual(data[1]["mutable"], False)
3510 self.failUnlessEqual(data[1]["ro_uri"], unknown_rocap)
3511 self.failUnlessEqual(data[1]["mutable"], True)
3513 self.failUnlessEqual(data[1]["ro_uri"], unknown_rocap)
3514 self.failIf("mutable" in data[1], data[1])
3516 # TODO: check metadata contents
3517 self.failUnless("metadata" in data[1])
3519 d.addCallback(lambda ign: self.GET("%s%s?t=json" % (self.rooturl, str(name))))
3520 d.addCallback(_check_json, expect_rw_uri=not immutable)
3522 # and make sure that a read-only version of the directory can be
3523 # rendered too. This version will not have unknown_rwcap, whether
3524 # or not future_node was immutable.
3525 d.addCallback(lambda ign: self.GET(self.rourl))
3527 d.addCallback(_check_directory_html, "-IMM")
3529 d.addCallback(_check_directory_html, "-RO")
3531 d.addCallback(lambda ign: self.GET(self.rourl+"?t=json"))
3532 d.addCallback(_check_directory_json, expect_rw_uri=False)
3534 d.addCallback(lambda ign: self.GET("%s%s?t=json" % (self.rourl, str(name))))
3535 d.addCallback(_check_json, expect_rw_uri=False)
3537 # TODO: check that getting t=info from the Info link in the ro directory
3538 # works, and does not include the writecap URI.
3541 def test_immutable_unknown(self):
3542 return self.test_unknown(immutable=True)
3544 def test_mutant_dirnodes_are_omitted(self):
3545 self.basedir = "web/Grid/mutant_dirnodes_are_omitted"
3548 c = self.g.clients[0]
3553 lonely_uri = "URI:LIT:n5xgk" # LIT for "one"
3554 mut_write_uri = "URI:SSK:vfvcbdfbszyrsaxchgevhmmlii:euw4iw7bbnkrrwpzuburbhppuxhc3gwxv26f6imekhz7zyw2ojnq"
3555 mut_read_uri = "URI:SSK-RO:e3mdrzfwhoq42hy5ubcz6rp3o4:ybyibhnp3vvwuq2vaw2ckjmesgkklfs6ghxleztqidihjyofgw7q"
3557 # This method tests mainly dirnode, but we'd have to duplicate code in order to
3558 # test the dirnode and web layers separately.
3560 # 'lonely' is a valid LIT child, 'ro' is a mutant child with an SSK-RO readcap,
3561 # and 'write-in-ro' is a mutant child with an SSK writecap in the ro_uri field.
3562 # When the directory is read, the mutants should be silently disposed of, leaving
3563 # their lonely sibling.
3564 # We don't test the case of a retrieving a cap from the encrypted rw_uri field,
3565 # because immutable directories don't have a writecap and therefore that field
3566 # isn't (and can't be) decrypted.
3567 # TODO: The field still exists in the netstring. Technically we should check what
3568 # happens if something is put there (_unpack_contents should raise ValueError),
3569 # but that can wait.
3571 lonely_child = nm.create_from_cap(lonely_uri)
3572 mutant_ro_child = nm.create_from_cap(mut_read_uri)
3573 mutant_write_in_ro_child = nm.create_from_cap(mut_write_uri)
3575 def _by_hook_or_by_crook():
3577 for n in [mutant_ro_child, mutant_write_in_ro_child]:
3578 n.is_allowed_in_immutable_directory = _by_hook_or_by_crook
3580 mutant_write_in_ro_child.get_write_uri = lambda: None
3581 mutant_write_in_ro_child.get_readonly_uri = lambda: mut_write_uri
3583 kids = {u"lonely": (lonely_child, {}),
3584 u"ro": (mutant_ro_child, {}),
3585 u"write-in-ro": (mutant_write_in_ro_child, {}),
3587 d = c.create_immutable_dirnode(kids)
3590 self.failUnless(isinstance(dn, dirnode.DirectoryNode))
3591 self.failIf(dn.is_mutable())
3592 self.failUnless(dn.is_readonly())
3593 # This checks that if we somehow ended up calling dn._decrypt_rwcapdata, it would fail.
3594 self.failIf(hasattr(dn._node, 'get_writekey'))
3596 self.failUnless("RO-IMM" in rep)
3598 self.failUnlessIn("CHK", cap.to_string())
3601 self.rooturl = "uri/" + urllib.quote(dn.get_uri()) + "/"
3602 return download_to_data(dn._node)
3603 d.addCallback(_created)
3605 def _check_data(data):
3606 # Decode the netstring representation of the directory to check that all children
3607 # are present. This is a bit of an abstraction violation, but there's not really
3608 # any other way to do it given that the real DirectoryNode._unpack_contents would
3609 # strip the mutant children out (which is what we're trying to test, later).
3612 while position < len(data):
3613 entries, position = split_netstring(data, 1, position)
3615 (name_utf8, ro_uri, rwcapdata, metadata_s), subpos = split_netstring(entry, 4)
3616 name = name_utf8.decode("utf-8")
3617 self.failUnless(rwcapdata == "")
3618 self.failUnless(name in kids)
3619 (expected_child, ign) = kids[name]
3620 self.failUnlessEqual(ro_uri, expected_child.get_readonly_uri())
3623 self.failUnlessEqual(numkids, 3)
3624 return self.rootnode.list()
3625 d.addCallback(_check_data)
3627 # Now when we use the real directory listing code, the mutants should be absent.
3628 def _check_kids(children):
3629 self.failUnlessEqual(sorted(children.keys()), [u"lonely"])
3630 lonely_node, lonely_metadata = children[u"lonely"]
3632 self.failUnlessEqual(lonely_node.get_write_uri(), None)
3633 self.failUnlessEqual(lonely_node.get_readonly_uri(), lonely_uri)
3634 d.addCallback(_check_kids)
3636 d.addCallback(lambda ign: nm.create_from_cap(self.cap.to_string()))
3637 d.addCallback(lambda n: n.list())
3638 d.addCallback(_check_kids) # again with dirnode recreated from cap
3640 # Make sure the lonely child can be listed in HTML...
3641 d.addCallback(lambda ign: self.GET(self.rooturl))
3642 def _check_html(res):
3643 self.failIfIn("URI:SSK", res)
3644 get_lonely = "".join([r'<td>FILE</td>',
3646 r'<a href="[^"]+%s[^"]+">lonely</a>' % (urllib.quote(lonely_uri),),
3648 r'\s+<td>%d</td>' % len("one"),
3650 self.failUnless(re.search(get_lonely, res), res)
3652 # find the More Info link for name, should be relative
3653 mo = re.search(r'<a href="([^"]+)">More Info</a>', res)
3654 info_url = mo.group(1)
3655 self.failUnless(info_url.endswith(urllib.quote(lonely_uri) + "?t=info"), info_url)
3656 d.addCallback(_check_html)
3659 d.addCallback(lambda ign: self.GET(self.rooturl+"?t=json"))
3660 def _check_json(res):
3661 data = simplejson.loads(res)
3662 self.failUnlessEqual(data[0], "dirnode")
3663 listed_children = data[1]["children"]
3664 self.failUnlessEqual(sorted(listed_children.keys()), [u"lonely"])
3665 ll_type, ll_data = listed_children[u"lonely"]
3666 self.failUnlessEqual(ll_type, "filenode")
3667 self.failIf("rw_uri" in ll_data)
3668 self.failUnlessEqual(ll_data["ro_uri"], lonely_uri)
3669 d.addCallback(_check_json)
3672 def test_deep_check(self):
3673 self.basedir = "web/Grid/deep_check"
3675 c0 = self.g.clients[0]
3679 d = c0.create_dirnode()
3680 def _stash_root_and_create_file(n):
3682 self.fileurls["root"] = "uri/" + urllib.quote(n.get_uri()) + "/"
3683 return n.add_file(u"good", upload.Data(DATA, convergence=""))
3684 d.addCallback(_stash_root_and_create_file)
3685 def _stash_uri(fn, which):
3686 self.uris[which] = fn.get_uri()
3688 d.addCallback(_stash_uri, "good")
3689 d.addCallback(lambda ign:
3690 self.rootnode.add_file(u"small",
3691 upload.Data("literal",
3693 d.addCallback(_stash_uri, "small")
3694 d.addCallback(lambda ign:
3695 self.rootnode.add_file(u"sick",
3696 upload.Data(DATA+"1",
3698 d.addCallback(_stash_uri, "sick")
3700 # this tests that deep-check and stream-manifest will ignore
3701 # UnknownNode instances. Hopefully this will also cover deep-stats.
3702 future_node = UnknownNode(unknown_rwcap, unknown_rocap)
3703 d.addCallback(lambda ign: self.rootnode.set_node(u"future", future_node))
3705 def _clobber_shares(ignored):
3706 self.delete_shares_numbered(self.uris["sick"], [0,1])
3707 d.addCallback(_clobber_shares)
3715 d.addCallback(self.CHECK, "root", "t=stream-deep-check")
3718 units = [simplejson.loads(line)
3719 for line in res.splitlines()
3722 print "response is:", res
3723 print "undecodeable line was '%s'" % line
3725 self.failUnlessEqual(len(units), 5+1)
3726 # should be parent-first
3728 self.failUnlessEqual(u0["path"], [])
3729 self.failUnlessEqual(u0["type"], "directory")
3730 self.failUnlessEqual(u0["cap"], self.rootnode.get_uri())
3731 u0cr = u0["check-results"]
3732 self.failUnlessEqual(u0cr["results"]["count-shares-good"], 10)
3734 ugood = [u for u in units
3735 if u["type"] == "file" and u["path"] == [u"good"]][0]
3736 self.failUnlessEqual(ugood["cap"], self.uris["good"])
3737 ugoodcr = ugood["check-results"]
3738 self.failUnlessEqual(ugoodcr["results"]["count-shares-good"], 10)
3741 self.failUnlessEqual(stats["type"], "stats")
3743 self.failUnlessEqual(s["count-immutable-files"], 2)
3744 self.failUnlessEqual(s["count-literal-files"], 1)
3745 self.failUnlessEqual(s["count-directories"], 1)
3746 self.failUnlessEqual(s["count-unknown"], 1)
3747 d.addCallback(_done)
3749 d.addCallback(self.CHECK, "root", "t=stream-manifest")
3750 def _check_manifest(res):
3751 self.failUnless(res.endswith("\n"))
3752 units = [simplejson.loads(t) for t in res[:-1].split("\n")]
3753 self.failUnlessEqual(len(units), 5+1)
3754 self.failUnlessEqual(units[-1]["type"], "stats")
3756 self.failUnlessEqual(first["path"], [])
3757 self.failUnlessEqual(first["cap"], self.rootnode.get_uri())
3758 self.failUnlessEqual(first["type"], "directory")
3759 stats = units[-1]["stats"]
3760 self.failUnlessEqual(stats["count-immutable-files"], 2)
3761 self.failUnlessEqual(stats["count-literal-files"], 1)
3762 self.failUnlessEqual(stats["count-mutable-files"], 0)
3763 self.failUnlessEqual(stats["count-immutable-files"], 2)
3764 self.failUnlessEqual(stats["count-unknown"], 1)
3765 d.addCallback(_check_manifest)
3767 # now add root/subdir and root/subdir/grandchild, then make subdir
3768 # unrecoverable, then see what happens
3770 d.addCallback(lambda ign:
3771 self.rootnode.create_subdirectory(u"subdir"))
3772 d.addCallback(_stash_uri, "subdir")
3773 d.addCallback(lambda subdir_node:
3774 subdir_node.add_file(u"grandchild",
3775 upload.Data(DATA+"2",
3777 d.addCallback(_stash_uri, "grandchild")
3779 d.addCallback(lambda ign:
3780 self.delete_shares_numbered(self.uris["subdir"],
3788 # root/subdir [unrecoverable]
3789 # root/subdir/grandchild
3791 # how should a streaming-JSON API indicate fatal error?
3792 # answer: emit ERROR: instead of a JSON string
3794 d.addCallback(self.CHECK, "root", "t=stream-manifest")
3795 def _check_broken_manifest(res):
3796 lines = res.splitlines()
3798 for (i,line) in enumerate(lines)
3799 if line.startswith("ERROR:")]
3801 self.fail("no ERROR: in output: %s" % (res,))
3802 first_error = error_lines[0]
3803 error_line = lines[first_error]
3804 error_msg = lines[first_error+1:]
3805 error_msg_s = "\n".join(error_msg) + "\n"
3806 self.failUnlessIn("ERROR: UnrecoverableFileError(no recoverable versions)",
3808 self.failUnless(len(error_msg) > 2, error_msg_s) # some traceback
3809 units = [simplejson.loads(line) for line in lines[:first_error]]
3810 self.failUnlessEqual(len(units), 6) # includes subdir
3811 last_unit = units[-1]
3812 self.failUnlessEqual(last_unit["path"], ["subdir"])
3813 d.addCallback(_check_broken_manifest)
3815 d.addCallback(self.CHECK, "root", "t=stream-deep-check")
3816 def _check_broken_deepcheck(res):
3817 lines = res.splitlines()
3819 for (i,line) in enumerate(lines)
3820 if line.startswith("ERROR:")]
3822 self.fail("no ERROR: in output: %s" % (res,))
3823 first_error = error_lines[0]
3824 error_line = lines[first_error]
3825 error_msg = lines[first_error+1:]
3826 error_msg_s = "\n".join(error_msg) + "\n"
3827 self.failUnlessIn("ERROR: UnrecoverableFileError(no recoverable versions)",
3829 self.failUnless(len(error_msg) > 2, error_msg_s) # some traceback
3830 units = [simplejson.loads(line) for line in lines[:first_error]]
3831 self.failUnlessEqual(len(units), 6) # includes subdir
3832 last_unit = units[-1]
3833 self.failUnlessEqual(last_unit["path"], ["subdir"])
3834 r = last_unit["check-results"]["results"]
3835 self.failUnlessEqual(r["count-recoverable-versions"], 0)
3836 self.failUnlessEqual(r["count-shares-good"], 1)
3837 self.failUnlessEqual(r["recoverable"], False)
3838 d.addCallback(_check_broken_deepcheck)
3840 d.addErrback(self.explain_web_error)
3843 def test_deep_check_and_repair(self):
3844 self.basedir = "web/Grid/deep_check_and_repair"
3846 c0 = self.g.clients[0]
3850 d = c0.create_dirnode()
3851 def _stash_root_and_create_file(n):
3853 self.fileurls["root"] = "uri/" + urllib.quote(n.get_uri()) + "/"
3854 return n.add_file(u"good", upload.Data(DATA, convergence=""))
3855 d.addCallback(_stash_root_and_create_file)
3856 def _stash_uri(fn, which):
3857 self.uris[which] = fn.get_uri()
3858 d.addCallback(_stash_uri, "good")
3859 d.addCallback(lambda ign:
3860 self.rootnode.add_file(u"small",
3861 upload.Data("literal",
3863 d.addCallback(_stash_uri, "small")
3864 d.addCallback(lambda ign:
3865 self.rootnode.add_file(u"sick",
3866 upload.Data(DATA+"1",
3868 d.addCallback(_stash_uri, "sick")
3869 #d.addCallback(lambda ign:
3870 # self.rootnode.add_file(u"dead",
3871 # upload.Data(DATA+"2",
3873 #d.addCallback(_stash_uri, "dead")
3875 #d.addCallback(lambda ign: c0.create_mutable_file("mutable"))
3876 #d.addCallback(lambda fn: self.rootnode.set_node(u"corrupt", fn))
3877 #d.addCallback(_stash_uri, "corrupt")
3879 def _clobber_shares(ignored):
3880 good_shares = self.find_shares(self.uris["good"])
3881 self.failUnlessEqual(len(good_shares), 10)
3882 sick_shares = self.find_shares(self.uris["sick"])
3883 os.unlink(sick_shares[0][2])
3884 #dead_shares = self.find_shares(self.uris["dead"])
3885 #for i in range(1, 10):
3886 # os.unlink(dead_shares[i][2])
3888 #c_shares = self.find_shares(self.uris["corrupt"])
3889 #cso = CorruptShareOptions()
3890 #cso.stdout = StringIO()
3891 #cso.parseOptions([c_shares[0][2]])
3893 d.addCallback(_clobber_shares)
3896 # root/good CHK, 10 shares
3898 # root/sick CHK, 9 shares
3900 d.addCallback(self.CHECK, "root", "t=stream-deep-check&repair=true")
3902 units = [simplejson.loads(line)
3903 for line in res.splitlines()
3905 self.failUnlessEqual(len(units), 4+1)
3906 # should be parent-first
3908 self.failUnlessEqual(u0["path"], [])
3909 self.failUnlessEqual(u0["type"], "directory")
3910 self.failUnlessEqual(u0["cap"], self.rootnode.get_uri())
3911 u0crr = u0["check-and-repair-results"]
3912 self.failUnlessEqual(u0crr["repair-attempted"], False)
3913 self.failUnlessEqual(u0crr["pre-repair-results"]["results"]["count-shares-good"], 10)
3915 ugood = [u for u in units
3916 if u["type"] == "file" and u["path"] == [u"good"]][0]
3917 self.failUnlessEqual(ugood["cap"], self.uris["good"])
3918 ugoodcrr = ugood["check-and-repair-results"]
3919 self.failUnlessEqual(ugoodcrr["repair-attempted"], False)
3920 self.failUnlessEqual(ugoodcrr["pre-repair-results"]["results"]["count-shares-good"], 10)
3922 usick = [u for u in units
3923 if u["type"] == "file" and u["path"] == [u"sick"]][0]
3924 self.failUnlessEqual(usick["cap"], self.uris["sick"])
3925 usickcrr = usick["check-and-repair-results"]
3926 self.failUnlessEqual(usickcrr["repair-attempted"], True)
3927 self.failUnlessEqual(usickcrr["repair-successful"], True)
3928 self.failUnlessEqual(usickcrr["pre-repair-results"]["results"]["count-shares-good"], 9)
3929 self.failUnlessEqual(usickcrr["post-repair-results"]["results"]["count-shares-good"], 10)
3932 self.failUnlessEqual(stats["type"], "stats")
3934 self.failUnlessEqual(s["count-immutable-files"], 2)
3935 self.failUnlessEqual(s["count-literal-files"], 1)
3936 self.failUnlessEqual(s["count-directories"], 1)
3937 d.addCallback(_done)
3939 d.addErrback(self.explain_web_error)
3942 def _count_leases(self, ignored, which):
3943 u = self.uris[which]
3944 shares = self.find_shares(u)
3946 for shnum, serverid, fn in shares:
3947 sf = get_share_file(fn)
3948 num_leases = len(list(sf.get_leases()))
3949 lease_counts.append( (fn, num_leases) )
3952 def _assert_leasecount(self, lease_counts, expected):
3953 for (fn, num_leases) in lease_counts:
3954 if num_leases != expected:
3955 self.fail("expected %d leases, have %d, on %s" %
3956 (expected, num_leases, fn))
3958 def test_add_lease(self):
3959 self.basedir = "web/Grid/add_lease"
3960 self.set_up_grid(num_clients=2)
3961 c0 = self.g.clients[0]
3964 d = c0.upload(upload.Data(DATA, convergence=""))
3965 def _stash_uri(ur, which):
3966 self.uris[which] = ur.uri
3967 d.addCallback(_stash_uri, "one")
3968 d.addCallback(lambda ign:
3969 c0.upload(upload.Data(DATA+"1", convergence="")))
3970 d.addCallback(_stash_uri, "two")
3971 def _stash_mutable_uri(n, which):
3972 self.uris[which] = n.get_uri()
3973 assert isinstance(self.uris[which], str)
3974 d.addCallback(lambda ign: c0.create_mutable_file(DATA+"2"))
3975 d.addCallback(_stash_mutable_uri, "mutable")
3977 def _compute_fileurls(ignored):
3979 for which in self.uris:
3980 self.fileurls[which] = "uri/" + urllib.quote(self.uris[which])
3981 d.addCallback(_compute_fileurls)
3983 d.addCallback(self._count_leases, "one")
3984 d.addCallback(self._assert_leasecount, 1)
3985 d.addCallback(self._count_leases, "two")
3986 d.addCallback(self._assert_leasecount, 1)
3987 d.addCallback(self._count_leases, "mutable")
3988 d.addCallback(self._assert_leasecount, 1)
3990 d.addCallback(self.CHECK, "one", "t=check") # no add-lease
3991 def _got_html_good(res):
3992 self.failUnless("Healthy" in res, res)
3993 self.failIf("Not Healthy" in res, res)
3994 d.addCallback(_got_html_good)
3996 d.addCallback(self._count_leases, "one")
3997 d.addCallback(self._assert_leasecount, 1)
3998 d.addCallback(self._count_leases, "two")
3999 d.addCallback(self._assert_leasecount, 1)
4000 d.addCallback(self._count_leases, "mutable")
4001 d.addCallback(self._assert_leasecount, 1)
4003 # this CHECK uses the original client, which uses the same
4004 # lease-secrets, so it will just renew the original lease
4005 d.addCallback(self.CHECK, "one", "t=check&add-lease=true")
4006 d.addCallback(_got_html_good)
4008 d.addCallback(self._count_leases, "one")
4009 d.addCallback(self._assert_leasecount, 1)
4010 d.addCallback(self._count_leases, "two")
4011 d.addCallback(self._assert_leasecount, 1)
4012 d.addCallback(self._count_leases, "mutable")
4013 d.addCallback(self._assert_leasecount, 1)
4015 # this CHECK uses an alternate client, which adds a second lease
4016 d.addCallback(self.CHECK, "one", "t=check&add-lease=true", clientnum=1)
4017 d.addCallback(_got_html_good)
4019 d.addCallback(self._count_leases, "one")
4020 d.addCallback(self._assert_leasecount, 2)
4021 d.addCallback(self._count_leases, "two")
4022 d.addCallback(self._assert_leasecount, 1)
4023 d.addCallback(self._count_leases, "mutable")
4024 d.addCallback(self._assert_leasecount, 1)
4026 d.addCallback(self.CHECK, "mutable", "t=check&add-lease=true")
4027 d.addCallback(_got_html_good)
4029 d.addCallback(self._count_leases, "one")
4030 d.addCallback(self._assert_leasecount, 2)
4031 d.addCallback(self._count_leases, "two")
4032 d.addCallback(self._assert_leasecount, 1)
4033 d.addCallback(self._count_leases, "mutable")
4034 d.addCallback(self._assert_leasecount, 1)
4036 d.addCallback(self.CHECK, "mutable", "t=check&add-lease=true",
4038 d.addCallback(_got_html_good)
4040 d.addCallback(self._count_leases, "one")
4041 d.addCallback(self._assert_leasecount, 2)
4042 d.addCallback(self._count_leases, "two")
4043 d.addCallback(self._assert_leasecount, 1)
4044 d.addCallback(self._count_leases, "mutable")
4045 d.addCallback(self._assert_leasecount, 2)
4047 d.addErrback(self.explain_web_error)
4050 def test_deep_add_lease(self):
4051 self.basedir = "web/Grid/deep_add_lease"
4052 self.set_up_grid(num_clients=2)
4053 c0 = self.g.clients[0]
4057 d = c0.create_dirnode()
4058 def _stash_root_and_create_file(n):
4060 self.uris["root"] = n.get_uri()
4061 self.fileurls["root"] = "uri/" + urllib.quote(n.get_uri()) + "/"
4062 return n.add_file(u"one", upload.Data(DATA, convergence=""))
4063 d.addCallback(_stash_root_and_create_file)
4064 def _stash_uri(fn, which):
4065 self.uris[which] = fn.get_uri()
4066 d.addCallback(_stash_uri, "one")
4067 d.addCallback(lambda ign:
4068 self.rootnode.add_file(u"small",
4069 upload.Data("literal",
4071 d.addCallback(_stash_uri, "small")
4073 d.addCallback(lambda ign: c0.create_mutable_file("mutable"))
4074 d.addCallback(lambda fn: self.rootnode.set_node(u"mutable", fn))
4075 d.addCallback(_stash_uri, "mutable")
4077 d.addCallback(self.CHECK, "root", "t=stream-deep-check") # no add-lease
4079 units = [simplejson.loads(line)
4080 for line in res.splitlines()
4082 # root, one, small, mutable, stats
4083 self.failUnlessEqual(len(units), 4+1)
4084 d.addCallback(_done)
4086 d.addCallback(self._count_leases, "root")
4087 d.addCallback(self._assert_leasecount, 1)
4088 d.addCallback(self._count_leases, "one")
4089 d.addCallback(self._assert_leasecount, 1)
4090 d.addCallback(self._count_leases, "mutable")
4091 d.addCallback(self._assert_leasecount, 1)
4093 d.addCallback(self.CHECK, "root", "t=stream-deep-check&add-lease=true")
4094 d.addCallback(_done)
4096 d.addCallback(self._count_leases, "root")
4097 d.addCallback(self._assert_leasecount, 1)
4098 d.addCallback(self._count_leases, "one")
4099 d.addCallback(self._assert_leasecount, 1)
4100 d.addCallback(self._count_leases, "mutable")
4101 d.addCallback(self._assert_leasecount, 1)
4103 d.addCallback(self.CHECK, "root", "t=stream-deep-check&add-lease=true",
4105 d.addCallback(_done)
4107 d.addCallback(self._count_leases, "root")
4108 d.addCallback(self._assert_leasecount, 2)
4109 d.addCallback(self._count_leases, "one")
4110 d.addCallback(self._assert_leasecount, 2)
4111 d.addCallback(self._count_leases, "mutable")
4112 d.addCallback(self._assert_leasecount, 2)
4114 d.addErrback(self.explain_web_error)
4118 def test_exceptions(self):
4119 self.basedir = "web/Grid/exceptions"
4120 self.set_up_grid(num_clients=1, num_servers=2)
4121 c0 = self.g.clients[0]
4122 c0.DEFAULT_ENCODING_PARAMETERS['happy'] = 2
4125 d = c0.create_dirnode()
4127 self.fileurls["root"] = "uri/" + urllib.quote(n.get_uri()) + "/"
4128 self.fileurls["imaginary"] = self.fileurls["root"] + "imaginary"
4130 d.addCallback(_stash_root)
4131 d.addCallback(lambda ign: c0.upload(upload.Data(DATA, convergence="")))
4133 self.fileurls["1share"] = "uri/" + urllib.quote(ur.uri)
4134 self.delete_shares_numbered(ur.uri, range(1,10))
4136 u = uri.from_string(ur.uri)
4137 u.key = testutil.flip_bit(u.key, 0)
4138 baduri = u.to_string()
4139 self.fileurls["0shares"] = "uri/" + urllib.quote(baduri)
4140 d.addCallback(_stash_bad)
4141 d.addCallback(lambda ign: c0.create_dirnode())
4142 def _mangle_dirnode_1share(n):
4144 url = self.fileurls["dir-1share"] = "uri/" + urllib.quote(u) + "/"
4145 self.fileurls["dir-1share-json"] = url + "?t=json"
4146 self.delete_shares_numbered(u, range(1,10))
4147 d.addCallback(_mangle_dirnode_1share)
4148 d.addCallback(lambda ign: c0.create_dirnode())
4149 def _mangle_dirnode_0share(n):
4151 url = self.fileurls["dir-0share"] = "uri/" + urllib.quote(u) + "/"
4152 self.fileurls["dir-0share-json"] = url + "?t=json"
4153 self.delete_shares_numbered(u, range(0,10))
4154 d.addCallback(_mangle_dirnode_0share)
4156 # NotEnoughSharesError should be reported sensibly, with a
4157 # text/plain explanation of the problem, and perhaps some
4158 # information on which shares *could* be found.
4160 d.addCallback(lambda ignored:
4161 self.shouldHTTPError("GET unrecoverable",
4162 410, "Gone", "NoSharesError",
4163 self.GET, self.fileurls["0shares"]))
4164 def _check_zero_shares(body):
4165 self.failIf("<html>" in body, body)
4166 body = " ".join(body.strip().split())
4167 exp = ("NoSharesError: no shares could be found. "
4168 "Zero shares usually indicates a corrupt URI, or that "
4169 "no servers were connected, but it might also indicate "
4170 "severe corruption. You should perform a filecheck on "
4171 "this object to learn more. The full error message is: "
4172 "Failed to get enough shareholders: have 0, need 3")
4173 self.failUnlessEqual(exp, body)
4174 d.addCallback(_check_zero_shares)
4177 d.addCallback(lambda ignored:
4178 self.shouldHTTPError("GET 1share",
4179 410, "Gone", "NotEnoughSharesError",
4180 self.GET, self.fileurls["1share"]))
4181 def _check_one_share(body):
4182 self.failIf("<html>" in body, body)
4183 body = " ".join(body.strip().split())
4184 exp = ("NotEnoughSharesError: This indicates that some "
4185 "servers were unavailable, or that shares have been "
4186 "lost to server departure, hard drive failure, or disk "
4187 "corruption. You should perform a filecheck on "
4188 "this object to learn more. The full error message is:"
4189 " Failed to get enough shareholders: have 1, need 3")
4190 self.failUnlessEqual(exp, body)
4191 d.addCallback(_check_one_share)
4193 d.addCallback(lambda ignored:
4194 self.shouldHTTPError("GET imaginary",
4195 404, "Not Found", None,
4196 self.GET, self.fileurls["imaginary"]))
4197 def _missing_child(body):
4198 self.failUnless("No such child: imaginary" in body, body)
4199 d.addCallback(_missing_child)
4201 d.addCallback(lambda ignored: self.GET(self.fileurls["dir-0share"]))
4202 def _check_0shares_dir_html(body):
4203 self.failUnless("<html>" in body, body)
4204 # we should see the regular page, but without the child table or
4206 body = " ".join(body.strip().split())
4207 self.failUnlessIn('href="?t=info">More info on this directory',
4209 exp = ("UnrecoverableFileError: the directory (or mutable file) "
4210 "could not be retrieved, because there were insufficient "
4211 "good shares. This might indicate that no servers were "
4212 "connected, insufficient servers were connected, the URI "
4213 "was corrupt, or that shares have been lost due to server "
4214 "departure, hard drive failure, or disk corruption. You "
4215 "should perform a filecheck on this object to learn more.")
4216 self.failUnlessIn(exp, body)
4217 self.failUnlessIn("No upload forms: directory is unreadable", body)
4218 d.addCallback(_check_0shares_dir_html)
4220 d.addCallback(lambda ignored: self.GET(self.fileurls["dir-1share"]))
4221 def _check_1shares_dir_html(body):
4222 # at some point, we'll split UnrecoverableFileError into 0-shares
4223 # and some-shares like we did for immutable files (since there
4224 # are different sorts of advice to offer in each case). For now,
4225 # they present the same way.
4226 self.failUnless("<html>" in body, body)
4227 body = " ".join(body.strip().split())
4228 self.failUnlessIn('href="?t=info">More info on this directory',
4230 exp = ("UnrecoverableFileError: the directory (or mutable file) "
4231 "could not be retrieved, because there were insufficient "
4232 "good shares. This might indicate that no servers were "
4233 "connected, insufficient servers were connected, the URI "
4234 "was corrupt, or that shares have been lost due to server "
4235 "departure, hard drive failure, or disk corruption. You "
4236 "should perform a filecheck on this object to learn more.")
4237 self.failUnlessIn(exp, body)
4238 self.failUnlessIn("No upload forms: directory is unreadable", body)
4239 d.addCallback(_check_1shares_dir_html)
4241 d.addCallback(lambda ignored:
4242 self.shouldHTTPError("GET dir-0share-json",
4243 410, "Gone", "UnrecoverableFileError",
4245 self.fileurls["dir-0share-json"]))
4246 def _check_unrecoverable_file(body):
4247 self.failIf("<html>" in body, body)
4248 body = " ".join(body.strip().split())
4249 exp = ("UnrecoverableFileError: the directory (or mutable file) "
4250 "could not be retrieved, because there were insufficient "
4251 "good shares. This might indicate that no servers were "
4252 "connected, insufficient servers were connected, the URI "
4253 "was corrupt, or that shares have been lost due to server "
4254 "departure, hard drive failure, or disk corruption. You "
4255 "should perform a filecheck on this object to learn more.")
4256 self.failUnlessEqual(exp, body)
4257 d.addCallback(_check_unrecoverable_file)
4259 d.addCallback(lambda ignored:
4260 self.shouldHTTPError("GET dir-1share-json",
4261 410, "Gone", "UnrecoverableFileError",
4263 self.fileurls["dir-1share-json"]))
4264 d.addCallback(_check_unrecoverable_file)
4266 d.addCallback(lambda ignored:
4267 self.shouldHTTPError("GET imaginary",
4268 404, "Not Found", None,
4269 self.GET, self.fileurls["imaginary"]))
4271 # attach a webapi child that throws a random error, to test how it
4273 w = c0.getServiceNamed("webish")
4274 w.root.putChild("ERRORBOOM", ErrorBoom())
4276 # "Accept: */*" : should get a text/html stack trace
4277 # "Accept: text/plain" : should get a text/plain stack trace
4278 # "Accept: text/plain, application/octet-stream" : text/plain (CLI)
4279 # no Accept header: should get a text/html stack trace
4281 d.addCallback(lambda ignored:
4282 self.shouldHTTPError("GET errorboom_html",
4283 500, "Internal Server Error", None,
4284 self.GET, "ERRORBOOM",
4285 headers={"accept": ["*/*"]}))
4286 def _internal_error_html1(body):
4287 self.failUnless("<html>" in body, "expected HTML, not '%s'" % body)
4288 d.addCallback(_internal_error_html1)
4290 d.addCallback(lambda ignored:
4291 self.shouldHTTPError("GET errorboom_text",
4292 500, "Internal Server Error", None,
4293 self.GET, "ERRORBOOM",
4294 headers={"accept": ["text/plain"]}))
4295 def _internal_error_text2(body):
4296 self.failIf("<html>" in body, body)
4297 self.failUnless(body.startswith("Traceback "), body)
4298 d.addCallback(_internal_error_text2)
4300 CLI_accepts = "text/plain, application/octet-stream"
4301 d.addCallback(lambda ignored:
4302 self.shouldHTTPError("GET errorboom_text",
4303 500, "Internal Server Error", None,
4304 self.GET, "ERRORBOOM",
4305 headers={"accept": [CLI_accepts]}))
4306 def _internal_error_text3(body):
4307 self.failIf("<html>" in body, body)
4308 self.failUnless(body.startswith("Traceback "), body)
4309 d.addCallback(_internal_error_text3)
4311 d.addCallback(lambda ignored:
4312 self.shouldHTTPError("GET errorboom_text",
4313 500, "Internal Server Error", None,
4314 self.GET, "ERRORBOOM"))
4315 def _internal_error_html4(body):
4316 self.failUnless("<html>" in body, "expected HTML, not '%s'" % body)
4317 d.addCallback(_internal_error_html4)
4319 def _flush_errors(res):
4320 # Trial: please ignore the CompletelyUnhandledError in the logs
4321 self.flushLoggedErrors(CompletelyUnhandledError)
4323 d.addBoth(_flush_errors)
4327 class CompletelyUnhandledError(Exception):
4329 class ErrorBoom(rend.Page):
4330 def beforeRender(self, ctx):
4331 raise CompletelyUnhandledError("whoops")