1 import os.path, re, urllib
3 from StringIO import StringIO
4 from twisted.application import service
5 from twisted.trial import unittest
6 from twisted.internet import defer, reactor
7 from twisted.internet.task import Clock
8 from twisted.web import client, error, http
9 from twisted.python import failure, log
10 from nevow import rend
11 from allmydata import interfaces, uri, webish, dirnode
12 from allmydata.storage.shares import get_share_file
13 from allmydata.storage_client import StorageFarmBroker
14 from allmydata.immutable import upload, download
15 from allmydata.dirnode import DirectoryNode
16 from allmydata.nodemaker import NodeMaker
17 from allmydata.unknown import UnknownNode
18 from allmydata.web import status, common
19 from allmydata.scripts.debug import CorruptShareOptions, corrupt_share
20 from allmydata.util import fileutil, base32
21 from allmydata.util.consumer import download_to_data
22 from allmydata.util.netstring import split_netstring
23 from allmydata.test.common import FakeCHKFileNode, FakeMutableFileNode, \
24 create_chk_filenode, WebErrorMixin, ShouldFailMixin, make_mutable_file_uri
25 from allmydata.interfaces import IMutableFileNode
26 from allmydata.mutable import servermap, publish, retrieve
27 import allmydata.test.common_util as testutil
28 from allmydata.test.no_network import GridTestMixin
29 from allmydata.test.common_web import HTTPClientGETFactory, \
31 from allmydata.client import Client, SecretHolder
33 # create a fake uploader/downloader, and a couple of fake dirnodes, then
34 # create a webserver that works against them
36 timeout = 480 # Most of these take longer than 240 seconds on Francois's arm box.
38 unknown_rwcap = "lafs://from_the_future"
39 unknown_rocap = "ro.lafs://readonly_from_the_future"
40 unknown_immcap = "imm.lafs://immutable_from_the_future"
42 class FakeStatsProvider:
44 stats = {'stats': {}, 'counters': {}}
47 class FakeNodeMaker(NodeMaker):
48 def _create_lit(self, cap):
49 return FakeCHKFileNode(cap)
50 def _create_immutable(self, cap):
51 return FakeCHKFileNode(cap)
52 def _create_mutable(self, cap):
53 return FakeMutableFileNode(None, None, None, None).init_from_cap(cap)
54 def create_mutable_file(self, contents="", keysize=None):
55 n = FakeMutableFileNode(None, None, None, None)
56 return n.create(contents)
58 class FakeUploader(service.Service):
60 def upload(self, uploadable, history=None):
61 d = uploadable.get_size()
62 d.addCallback(lambda size: uploadable.read(size))
65 n = create_chk_filenode(data)
66 results = upload.UploadResults()
67 results.uri = n.get_uri()
69 d.addCallback(_got_data)
71 def get_helper_info(self):
75 _all_upload_status = [upload.UploadStatus()]
76 _all_download_status = [download.DownloadStatus()]
77 _all_mapupdate_statuses = [servermap.UpdateStatus()]
78 _all_publish_statuses = [publish.PublishStatus()]
79 _all_retrieve_statuses = [retrieve.RetrieveStatus()]
81 def list_all_upload_statuses(self):
82 return self._all_upload_status
83 def list_all_download_statuses(self):
84 return self._all_download_status
85 def list_all_mapupdate_statuses(self):
86 return self._all_mapupdate_statuses
87 def list_all_publish_statuses(self):
88 return self._all_publish_statuses
89 def list_all_retrieve_statuses(self):
90 return self._all_retrieve_statuses
91 def list_all_helper_statuses(self):
94 class FakeClient(Client):
96 # don't upcall to Client.__init__, since we only want to initialize a
98 service.MultiService.__init__(self)
99 self.nodeid = "fake_nodeid"
100 self.nickname = "fake_nickname"
101 self.introducer_furl = "None"
102 self.stats_provider = FakeStatsProvider()
103 self._secret_holder = SecretHolder("lease secret", "convergence secret")
105 self.convergence = "some random string"
106 self.storage_broker = StorageFarmBroker(None, permute_peers=True)
107 self.introducer_client = None
108 self.history = FakeHistory()
109 self.uploader = FakeUploader()
110 self.uploader.setServiceParent(self)
111 self.nodemaker = FakeNodeMaker(None, self._secret_holder, None,
112 self.uploader, None, None,
115 def startService(self):
116 return service.MultiService.startService(self)
117 def stopService(self):
118 return service.MultiService.stopService(self)
120 MUTABLE_SIZELIMIT = FakeMutableFileNode.MUTABLE_SIZELIMIT
122 class WebMixin(object):
124 self.s = FakeClient()
125 self.s.startService()
126 self.staticdir = self.mktemp()
128 self.ws = webish.WebishServer(self.s, "0", staticdir=self.staticdir,
130 self.ws.setServiceParent(self.s)
131 self.webish_port = port = self.ws.listener._port.getHost().port
132 self.webish_url = "http://localhost:%d" % port
134 l = [ self.s.create_dirnode() for x in range(6) ]
135 d = defer.DeferredList(l)
137 self.public_root = res[0][1]
138 assert interfaces.IDirectoryNode.providedBy(self.public_root), res
139 self.public_url = "/uri/" + self.public_root.get_uri()
140 self.private_root = res[1][1]
144 self._foo_uri = foo.get_uri()
145 self._foo_readonly_uri = foo.get_readonly_uri()
146 self._foo_verifycap = foo.get_verify_cap().to_string()
147 # NOTE: we ignore the deferred on all set_uri() calls, because we
148 # know the fake nodes do these synchronously
149 self.public_root.set_uri(u"foo", foo.get_uri(),
150 foo.get_readonly_uri())
152 self.BAR_CONTENTS, n, self._bar_txt_uri = self.makefile(0)
153 foo.set_uri(u"bar.txt", self._bar_txt_uri, self._bar_txt_uri)
154 self._bar_txt_verifycap = n.get_verify_cap().to_string()
156 foo.set_uri(u"empty", res[3][1].get_uri(),
157 res[3][1].get_readonly_uri())
158 sub_uri = res[4][1].get_uri()
159 self._sub_uri = sub_uri
160 foo.set_uri(u"sub", sub_uri, sub_uri)
161 sub = self.s.create_node_from_uri(sub_uri)
163 _ign, n, blocking_uri = self.makefile(1)
164 foo.set_uri(u"blockingfile", blocking_uri, blocking_uri)
166 unicode_filename = u"n\u00fc.txt" # n u-umlaut . t x t
167 # ok, unicode calls it LATIN SMALL LETTER U WITH DIAERESIS but I
168 # still think of it as an umlaut
169 foo.set_uri(unicode_filename, self._bar_txt_uri, self._bar_txt_uri)
171 _ign, n, baz_file = self.makefile(2)
172 self._baz_file_uri = baz_file
173 sub.set_uri(u"baz.txt", baz_file, baz_file)
175 _ign, n, self._bad_file_uri = self.makefile(3)
176 # this uri should not be downloadable
177 del FakeCHKFileNode.all_contents[self._bad_file_uri]
180 self.public_root.set_uri(u"reedownlee", rodir.get_readonly_uri(),
181 rodir.get_readonly_uri())
182 rodir.set_uri(u"nor", baz_file, baz_file)
187 # public/foo/blockingfile
190 # public/foo/sub/baz.txt
192 # public/reedownlee/nor
193 self.NEWFILE_CONTENTS = "newfile contents\n"
195 return foo.get_metadata_for(u"bar.txt")
197 def _got_metadata(metadata):
198 self._bar_txt_metadata = metadata
199 d.addCallback(_got_metadata)
202 def makefile(self, number):
203 contents = "contents of file %s\n" % number
204 n = create_chk_filenode(contents)
205 return contents, n, n.get_uri()
208 return self.s.stopService()
210 def failUnlessIsBarDotTxt(self, res):
211 self.failUnlessEqual(res, self.BAR_CONTENTS, res)
213 def failUnlessIsBarJSON(self, res):
214 data = simplejson.loads(res)
215 self.failUnless(isinstance(data, list))
216 self.failUnlessEqual(data[0], u"filenode")
217 self.failUnless(isinstance(data[1], dict))
218 self.failIf(data[1]["mutable"])
219 self.failIf("rw_uri" in data[1]) # immutable
220 self.failUnlessEqual(data[1]["ro_uri"], self._bar_txt_uri)
221 self.failUnlessEqual(data[1]["verify_uri"], self._bar_txt_verifycap)
222 self.failUnlessEqual(data[1]["size"], len(self.BAR_CONTENTS))
224 def failUnlessIsFooJSON(self, res):
225 data = simplejson.loads(res)
226 self.failUnless(isinstance(data, list))
227 self.failUnlessEqual(data[0], "dirnode", res)
228 self.failUnless(isinstance(data[1], dict))
229 self.failUnless(data[1]["mutable"])
230 self.failUnless("rw_uri" in data[1]) # mutable
231 self.failUnlessEqual(data[1]["rw_uri"], self._foo_uri)
232 self.failUnlessEqual(data[1]["ro_uri"], self._foo_readonly_uri)
233 self.failUnlessEqual(data[1]["verify_uri"], self._foo_verifycap)
235 kidnames = sorted([unicode(n) for n in data[1]["children"]])
236 self.failUnlessEqual(kidnames,
237 [u"bar.txt", u"blockingfile", u"empty",
238 u"n\u00fc.txt", u"sub"])
239 kids = dict( [(unicode(name),value)
241 in data[1]["children"].iteritems()] )
242 self.failUnlessEqual(kids[u"sub"][0], "dirnode")
243 self.failUnless("metadata" in kids[u"sub"][1])
244 self.failUnless("ctime" in kids[u"sub"][1]["metadata"])
245 self.failUnless("mtime" in kids[u"sub"][1]["metadata"])
246 self.failUnlessEqual(kids[u"bar.txt"][0], "filenode")
247 self.failUnlessEqual(kids[u"bar.txt"][1]["size"], len(self.BAR_CONTENTS))
248 self.failUnlessEqual(kids[u"bar.txt"][1]["ro_uri"], self._bar_txt_uri)
249 self.failUnlessEqual(kids[u"bar.txt"][1]["verify_uri"],
250 self._bar_txt_verifycap)
251 self.failUnlessEqual(kids[u"bar.txt"][1]["metadata"]["ctime"],
252 self._bar_txt_metadata["ctime"])
253 self.failUnlessEqual(kids[u"n\u00fc.txt"][1]["ro_uri"],
256 def GET(self, urlpath, followRedirect=False, return_response=False,
258 # if return_response=True, this fires with (data, statuscode,
259 # respheaders) instead of just data.
260 assert not isinstance(urlpath, unicode)
261 url = self.webish_url + urlpath
262 factory = HTTPClientGETFactory(url, method="GET",
263 followRedirect=followRedirect, **kwargs)
264 reactor.connectTCP("localhost", self.webish_port, factory)
267 return (data, factory.status, factory.response_headers)
269 d.addCallback(_got_data)
270 return factory.deferred
272 def HEAD(self, urlpath, return_response=False, **kwargs):
273 # this requires some surgery, because twisted.web.client doesn't want
274 # to give us back the response headers.
275 factory = HTTPClientHEADFactory(urlpath, method="HEAD", **kwargs)
276 reactor.connectTCP("localhost", self.webish_port, factory)
279 return (data, factory.status, factory.response_headers)
281 d.addCallback(_got_data)
282 return factory.deferred
284 def PUT(self, urlpath, data, **kwargs):
285 url = self.webish_url + urlpath
286 return client.getPage(url, method="PUT", postdata=data, **kwargs)
288 def DELETE(self, urlpath):
289 url = self.webish_url + urlpath
290 return client.getPage(url, method="DELETE")
292 def POST(self, urlpath, followRedirect=False, **fields):
293 sepbase = "boogabooga"
297 form.append('Content-Disposition: form-data; name="_charset"')
301 for name, value in fields.iteritems():
302 if isinstance(value, tuple):
303 filename, value = value
304 form.append('Content-Disposition: form-data; name="%s"; '
305 'filename="%s"' % (name, filename.encode("utf-8")))
307 form.append('Content-Disposition: form-data; name="%s"' % name)
309 if isinstance(value, unicode):
310 value = value.encode("utf-8")
313 assert isinstance(value, str)
320 body = "\r\n".join(form) + "\r\n"
321 headers["content-type"] = "multipart/form-data; boundary=%s" % sepbase
322 return self.POST2(urlpath, body, headers, followRedirect)
324 def POST2(self, urlpath, body="", headers={}, followRedirect=False):
325 url = self.webish_url + urlpath
326 return client.getPage(url, method="POST", postdata=body,
327 headers=headers, followRedirect=followRedirect)
329 def shouldFail(self, res, expected_failure, which,
330 substring=None, response_substring=None):
331 if isinstance(res, failure.Failure):
332 res.trap(expected_failure)
334 self.failUnless(substring in str(res),
335 "substring '%s' not in '%s'"
336 % (substring, str(res)))
337 if response_substring:
338 self.failUnless(response_substring in res.value.response,
339 "response substring '%s' not in '%s'"
340 % (response_substring, res.value.response))
342 self.fail("%s was supposed to raise %s, not get '%s'" %
343 (which, expected_failure, res))
345 def shouldFail2(self, expected_failure, which, substring,
347 callable, *args, **kwargs):
348 assert substring is None or isinstance(substring, str)
349 assert response_substring is None or isinstance(response_substring, str)
350 d = defer.maybeDeferred(callable, *args, **kwargs)
352 if isinstance(res, failure.Failure):
353 res.trap(expected_failure)
355 self.failUnless(substring in str(res),
356 "%s: substring '%s' not in '%s'"
357 % (which, substring, str(res)))
358 if response_substring:
359 self.failUnless(response_substring in res.value.response,
360 "%s: response substring '%s' not in '%s'"
362 response_substring, res.value.response))
364 self.fail("%s was supposed to raise %s, not get '%s'" %
365 (which, expected_failure, res))
369 def should404(self, res, which):
370 if isinstance(res, failure.Failure):
371 res.trap(error.Error)
372 self.failUnlessEqual(res.value.status, "404")
374 self.fail("%s was supposed to Error(404), not get '%s'" %
377 def should302(self, res, which):
378 if isinstance(res, failure.Failure):
379 res.trap(error.Error)
380 self.failUnlessEqual(res.value.status, "302")
382 self.fail("%s was supposed to Error(302), not get '%s'" %
386 class Web(WebMixin, WebErrorMixin, testutil.StallMixin, unittest.TestCase):
387 def test_create(self):
390 def test_welcome(self):
393 self.failUnless('Welcome To Tahoe-LAFS' in res, res)
395 self.s.basedir = 'web/test_welcome'
396 fileutil.make_dirs("web/test_welcome")
397 fileutil.make_dirs("web/test_welcome/private")
399 d.addCallback(_check)
402 def test_provisioning(self):
403 d = self.GET("/provisioning/")
405 self.failUnless('Tahoe Provisioning Tool' in res)
406 fields = {'filled': True,
407 "num_users": int(50e3),
408 "files_per_user": 1000,
409 "space_per_user": int(1e9),
410 "sharing_ratio": 1.0,
411 "encoding_parameters": "3-of-10-5",
413 "ownership_mode": "A",
414 "download_rate": 100,
419 return self.POST("/provisioning/", **fields)
421 d.addCallback(_check)
423 self.failUnless('Tahoe Provisioning Tool' in res)
424 self.failUnless("Share space consumed: 167.01TB" in res)
426 fields = {'filled': True,
427 "num_users": int(50e6),
428 "files_per_user": 1000,
429 "space_per_user": int(5e9),
430 "sharing_ratio": 1.0,
431 "encoding_parameters": "25-of-100-50",
432 "num_servers": 30000,
433 "ownership_mode": "E",
434 "drive_failure_model": "U",
436 "download_rate": 1000,
441 return self.POST("/provisioning/", **fields)
442 d.addCallback(_check2)
444 self.failUnless("Share space consumed: huge!" in res)
445 fields = {'filled': True}
446 return self.POST("/provisioning/", **fields)
447 d.addCallback(_check3)
449 self.failUnless("Share space consumed:" in res)
450 d.addCallback(_check4)
453 def test_reliability_tool(self):
455 from allmydata import reliability
456 _hush_pyflakes = reliability
459 raise unittest.SkipTest("reliability tool requires NumPy")
461 d = self.GET("/reliability/")
463 self.failUnless('Tahoe Reliability Tool' in res)
464 fields = {'drive_lifetime': "8Y",
469 "check_period": "1M",
470 "report_period": "3M",
473 return self.POST("/reliability/", **fields)
475 d.addCallback(_check)
477 self.failUnless('Tahoe Reliability Tool' in res)
478 r = r'Probability of loss \(no maintenance\):\s+<span>0.033591'
479 self.failUnless(re.search(r, res), res)
480 d.addCallback(_check2)
483 def test_status(self):
484 h = self.s.get_history()
485 dl_num = h.list_all_download_statuses()[0].get_counter()
486 ul_num = h.list_all_upload_statuses()[0].get_counter()
487 mu_num = h.list_all_mapupdate_statuses()[0].get_counter()
488 pub_num = h.list_all_publish_statuses()[0].get_counter()
489 ret_num = h.list_all_retrieve_statuses()[0].get_counter()
490 d = self.GET("/status", followRedirect=True)
492 self.failUnless('Upload and Download Status' in res, res)
493 self.failUnless('"down-%d"' % dl_num in res, res)
494 self.failUnless('"up-%d"' % ul_num in res, res)
495 self.failUnless('"mapupdate-%d"' % mu_num in res, res)
496 self.failUnless('"publish-%d"' % pub_num in res, res)
497 self.failUnless('"retrieve-%d"' % ret_num in res, res)
498 d.addCallback(_check)
499 d.addCallback(lambda res: self.GET("/status/?t=json"))
500 def _check_json(res):
501 data = simplejson.loads(res)
502 self.failUnless(isinstance(data, dict))
503 #active = data["active"]
504 # TODO: test more. We need a way to fake an active operation
506 d.addCallback(_check_json)
508 d.addCallback(lambda res: self.GET("/status/down-%d" % dl_num))
510 self.failUnless("File Download Status" in res, res)
511 d.addCallback(_check_dl)
512 d.addCallback(lambda res: self.GET("/status/up-%d" % ul_num))
514 self.failUnless("File Upload Status" in res, res)
515 d.addCallback(_check_ul)
516 d.addCallback(lambda res: self.GET("/status/mapupdate-%d" % mu_num))
517 def _check_mapupdate(res):
518 self.failUnless("Mutable File Servermap Update Status" in res, res)
519 d.addCallback(_check_mapupdate)
520 d.addCallback(lambda res: self.GET("/status/publish-%d" % pub_num))
521 def _check_publish(res):
522 self.failUnless("Mutable File Publish Status" in res, res)
523 d.addCallback(_check_publish)
524 d.addCallback(lambda res: self.GET("/status/retrieve-%d" % ret_num))
525 def _check_retrieve(res):
526 self.failUnless("Mutable File Retrieve Status" in res, res)
527 d.addCallback(_check_retrieve)
531 def test_status_numbers(self):
532 drrm = status.DownloadResultsRendererMixin()
533 self.failUnlessEqual(drrm.render_time(None, None), "")
534 self.failUnlessEqual(drrm.render_time(None, 2.5), "2.50s")
535 self.failUnlessEqual(drrm.render_time(None, 0.25), "250ms")
536 self.failUnlessEqual(drrm.render_time(None, 0.0021), "2.1ms")
537 self.failUnlessEqual(drrm.render_time(None, 0.000123), "123us")
538 self.failUnlessEqual(drrm.render_rate(None, None), "")
539 self.failUnlessEqual(drrm.render_rate(None, 2500000), "2.50MBps")
540 self.failUnlessEqual(drrm.render_rate(None, 30100), "30.1kBps")
541 self.failUnlessEqual(drrm.render_rate(None, 123), "123Bps")
543 urrm = status.UploadResultsRendererMixin()
544 self.failUnlessEqual(urrm.render_time(None, None), "")
545 self.failUnlessEqual(urrm.render_time(None, 2.5), "2.50s")
546 self.failUnlessEqual(urrm.render_time(None, 0.25), "250ms")
547 self.failUnlessEqual(urrm.render_time(None, 0.0021), "2.1ms")
548 self.failUnlessEqual(urrm.render_time(None, 0.000123), "123us")
549 self.failUnlessEqual(urrm.render_rate(None, None), "")
550 self.failUnlessEqual(urrm.render_rate(None, 2500000), "2.50MBps")
551 self.failUnlessEqual(urrm.render_rate(None, 30100), "30.1kBps")
552 self.failUnlessEqual(urrm.render_rate(None, 123), "123Bps")
554 def test_GET_FILEURL(self):
555 d = self.GET(self.public_url + "/foo/bar.txt")
556 d.addCallback(self.failUnlessIsBarDotTxt)
559 def test_GET_FILEURL_range(self):
560 headers = {"range": "bytes=1-10"}
561 d = self.GET(self.public_url + "/foo/bar.txt", headers=headers,
562 return_response=True)
563 def _got((res, status, headers)):
564 self.failUnlessEqual(int(status), 206)
565 self.failUnless(headers.has_key("content-range"))
566 self.failUnlessEqual(headers["content-range"][0],
567 "bytes 1-10/%d" % len(self.BAR_CONTENTS))
568 self.failUnlessEqual(res, self.BAR_CONTENTS[1:11])
572 def test_GET_FILEURL_partial_range(self):
573 headers = {"range": "bytes=5-"}
574 length = len(self.BAR_CONTENTS)
575 d = self.GET(self.public_url + "/foo/bar.txt", headers=headers,
576 return_response=True)
577 def _got((res, status, headers)):
578 self.failUnlessEqual(int(status), 206)
579 self.failUnless(headers.has_key("content-range"))
580 self.failUnlessEqual(headers["content-range"][0],
581 "bytes 5-%d/%d" % (length-1, length))
582 self.failUnlessEqual(res, self.BAR_CONTENTS[5:])
586 def test_HEAD_FILEURL_range(self):
587 headers = {"range": "bytes=1-10"}
588 d = self.HEAD(self.public_url + "/foo/bar.txt", headers=headers,
589 return_response=True)
590 def _got((res, status, headers)):
591 self.failUnlessEqual(res, "")
592 self.failUnlessEqual(int(status), 206)
593 self.failUnless(headers.has_key("content-range"))
594 self.failUnlessEqual(headers["content-range"][0],
595 "bytes 1-10/%d" % len(self.BAR_CONTENTS))
599 def test_HEAD_FILEURL_partial_range(self):
600 headers = {"range": "bytes=5-"}
601 length = len(self.BAR_CONTENTS)
602 d = self.HEAD(self.public_url + "/foo/bar.txt", headers=headers,
603 return_response=True)
604 def _got((res, status, headers)):
605 self.failUnlessEqual(int(status), 206)
606 self.failUnless(headers.has_key("content-range"))
607 self.failUnlessEqual(headers["content-range"][0],
608 "bytes 5-%d/%d" % (length-1, length))
612 def test_GET_FILEURL_range_bad(self):
613 headers = {"range": "BOGUS=fizbop-quarnak"}
614 d = self.shouldFail2(error.Error, "test_GET_FILEURL_range_bad",
616 "Syntactically invalid http range header",
617 self.GET, self.public_url + "/foo/bar.txt",
621 def test_HEAD_FILEURL(self):
622 d = self.HEAD(self.public_url + "/foo/bar.txt", return_response=True)
623 def _got((res, status, headers)):
624 self.failUnlessEqual(res, "")
625 self.failUnlessEqual(headers["content-length"][0],
626 str(len(self.BAR_CONTENTS)))
627 self.failUnlessEqual(headers["content-type"], ["text/plain"])
631 def test_GET_FILEURL_named(self):
632 base = "/file/%s" % urllib.quote(self._bar_txt_uri)
633 base2 = "/named/%s" % urllib.quote(self._bar_txt_uri)
634 d = self.GET(base + "/@@name=/blah.txt")
635 d.addCallback(self.failUnlessIsBarDotTxt)
636 d.addCallback(lambda res: self.GET(base + "/blah.txt"))
637 d.addCallback(self.failUnlessIsBarDotTxt)
638 d.addCallback(lambda res: self.GET(base + "/ignore/lots/blah.txt"))
639 d.addCallback(self.failUnlessIsBarDotTxt)
640 d.addCallback(lambda res: self.GET(base2 + "/@@name=/blah.txt"))
641 d.addCallback(self.failUnlessIsBarDotTxt)
642 save_url = base + "?save=true&filename=blah.txt"
643 d.addCallback(lambda res: self.GET(save_url))
644 d.addCallback(self.failUnlessIsBarDotTxt) # TODO: check headers
645 u_filename = u"n\u00e9wer.txt" # n e-acute w e r . t x t
646 u_fn_e = urllib.quote(u_filename.encode("utf-8"))
647 u_url = base + "?save=true&filename=" + u_fn_e
648 d.addCallback(lambda res: self.GET(u_url))
649 d.addCallback(self.failUnlessIsBarDotTxt) # TODO: check headers
652 def test_PUT_FILEURL_named_bad(self):
653 base = "/file/%s" % urllib.quote(self._bar_txt_uri)
654 d = self.shouldFail2(error.Error, "test_PUT_FILEURL_named_bad",
656 "/file can only be used with GET or HEAD",
657 self.PUT, base + "/@@name=/blah.txt", "")
660 def test_GET_DIRURL_named_bad(self):
661 base = "/file/%s" % urllib.quote(self._foo_uri)
662 d = self.shouldFail2(error.Error, "test_PUT_DIRURL_named_bad",
665 self.GET, base + "/@@name=/blah.txt")
668 def test_GET_slash_file_bad(self):
669 d = self.shouldFail2(error.Error, "test_GET_slash_file_bad",
671 "/file must be followed by a file-cap and a name",
675 def test_GET_unhandled_URI_named(self):
676 contents, n, newuri = self.makefile(12)
677 verifier_cap = n.get_verify_cap().to_string()
678 base = "/file/%s" % urllib.quote(verifier_cap)
679 # client.create_node_from_uri() can't handle verify-caps
680 d = self.shouldFail2(error.Error, "GET_unhandled_URI_named",
681 "400 Bad Request", "is not a file-cap",
685 def test_GET_unhandled_URI(self):
686 contents, n, newuri = self.makefile(12)
687 verifier_cap = n.get_verify_cap().to_string()
688 base = "/uri/%s" % urllib.quote(verifier_cap)
689 # client.create_node_from_uri() can't handle verify-caps
690 d = self.shouldFail2(error.Error, "test_GET_unhandled_URI",
692 "GET unknown URI type: can only do t=info",
696 def test_GET_FILE_URI(self):
697 base = "/uri/%s" % urllib.quote(self._bar_txt_uri)
699 d.addCallback(self.failUnlessIsBarDotTxt)
702 def test_GET_FILE_URI_badchild(self):
703 base = "/uri/%s/boguschild" % urllib.quote(self._bar_txt_uri)
704 errmsg = "Files have no children, certainly not named 'boguschild'"
705 d = self.shouldFail2(error.Error, "test_GET_FILE_URI_badchild",
706 "400 Bad Request", errmsg,
710 def test_PUT_FILE_URI_badchild(self):
711 base = "/uri/%s/boguschild" % urllib.quote(self._bar_txt_uri)
712 errmsg = "Cannot create directory 'boguschild', because its parent is a file, not a directory"
713 d = self.shouldFail2(error.Error, "test_GET_FILE_URI_badchild",
714 "400 Bad Request", errmsg,
718 # TODO: version of this with a Unicode filename
719 def test_GET_FILEURL_save(self):
720 d = self.GET(self.public_url + "/foo/bar.txt?filename=bar.txt&save=true",
721 return_response=True)
722 def _got((res, statuscode, headers)):
723 content_disposition = headers["content-disposition"][0]
724 self.failUnless(content_disposition == 'attachment; filename="bar.txt"', content_disposition)
725 self.failUnlessIsBarDotTxt(res)
729 def test_GET_FILEURL_missing(self):
730 d = self.GET(self.public_url + "/foo/missing")
731 d.addBoth(self.should404, "test_GET_FILEURL_missing")
734 def test_PUT_overwrite_only_files(self):
735 # create a directory, put a file in that directory.
736 contents, n, filecap = self.makefile(8)
737 d = self.PUT(self.public_url + "/foo/dir?t=mkdir", "")
738 d.addCallback(lambda res:
739 self.PUT(self.public_url + "/foo/dir/file1.txt",
740 self.NEWFILE_CONTENTS))
741 # try to overwrite the file with replace=only-files
743 d.addCallback(lambda res:
744 self.PUT(self.public_url + "/foo/dir/file1.txt?t=uri&replace=only-files",
746 d.addCallback(lambda res:
747 self.shouldFail2(error.Error, "PUT_bad_t", "409 Conflict",
748 "There was already a child by that name, and you asked me "
750 self.PUT, self.public_url + "/foo/dir?t=uri&replace=only-files",
754 def test_PUT_NEWFILEURL(self):
755 d = self.PUT(self.public_url + "/foo/new.txt", self.NEWFILE_CONTENTS)
756 # TODO: we lose the response code, so we can't check this
757 #self.failUnlessEqual(responsecode, 201)
758 d.addCallback(self.failUnlessURIMatchesROChild, self._foo_node, u"new.txt")
759 d.addCallback(lambda res:
760 self.failUnlessChildContentsAre(self._foo_node, u"new.txt",
761 self.NEWFILE_CONTENTS))
764 def test_PUT_NEWFILEURL_not_mutable(self):
765 d = self.PUT(self.public_url + "/foo/new.txt?mutable=false",
766 self.NEWFILE_CONTENTS)
767 # TODO: we lose the response code, so we can't check this
768 #self.failUnlessEqual(responsecode, 201)
769 d.addCallback(self.failUnlessURIMatchesROChild, self._foo_node, u"new.txt")
770 d.addCallback(lambda res:
771 self.failUnlessChildContentsAre(self._foo_node, u"new.txt",
772 self.NEWFILE_CONTENTS))
775 def test_PUT_NEWFILEURL_range_bad(self):
776 headers = {"content-range": "bytes 1-10/%d" % len(self.NEWFILE_CONTENTS)}
777 target = self.public_url + "/foo/new.txt"
778 d = self.shouldFail2(error.Error, "test_PUT_NEWFILEURL_range_bad",
779 "501 Not Implemented",
780 "Content-Range in PUT not yet supported",
781 # (and certainly not for immutable files)
782 self.PUT, target, self.NEWFILE_CONTENTS[1:11],
784 d.addCallback(lambda res:
785 self.failIfNodeHasChild(self._foo_node, u"new.txt"))
788 def test_PUT_NEWFILEURL_mutable(self):
789 d = self.PUT(self.public_url + "/foo/new.txt?mutable=true",
790 self.NEWFILE_CONTENTS)
791 # TODO: we lose the response code, so we can't check this
792 #self.failUnlessEqual(responsecode, 201)
794 u = uri.from_string_mutable_filenode(res)
795 self.failUnless(u.is_mutable())
796 self.failIf(u.is_readonly())
798 d.addCallback(_check_uri)
799 d.addCallback(self.failUnlessURIMatchesRWChild, self._foo_node, u"new.txt")
800 d.addCallback(lambda res:
801 self.failUnlessMutableChildContentsAre(self._foo_node,
803 self.NEWFILE_CONTENTS))
806 def test_PUT_NEWFILEURL_mutable_toobig(self):
807 d = self.shouldFail2(error.Error, "test_PUT_NEWFILEURL_mutable_toobig",
808 "413 Request Entity Too Large",
809 "SDMF is limited to one segment, and 10001 > 10000",
811 self.public_url + "/foo/new.txt?mutable=true",
812 "b" * (self.s.MUTABLE_SIZELIMIT+1))
815 def test_PUT_NEWFILEURL_replace(self):
816 d = self.PUT(self.public_url + "/foo/bar.txt", self.NEWFILE_CONTENTS)
817 # TODO: we lose the response code, so we can't check this
818 #self.failUnlessEqual(responsecode, 200)
819 d.addCallback(self.failUnlessURIMatchesROChild, self._foo_node, u"bar.txt")
820 d.addCallback(lambda res:
821 self.failUnlessChildContentsAre(self._foo_node, u"bar.txt",
822 self.NEWFILE_CONTENTS))
825 def test_PUT_NEWFILEURL_bad_t(self):
826 d = self.shouldFail2(error.Error, "PUT_bad_t", "400 Bad Request",
827 "PUT to a file: bad t=bogus",
828 self.PUT, self.public_url + "/foo/bar.txt?t=bogus",
832 def test_PUT_NEWFILEURL_no_replace(self):
833 d = self.PUT(self.public_url + "/foo/bar.txt?replace=false",
834 self.NEWFILE_CONTENTS)
835 d.addBoth(self.shouldFail, error.Error, "PUT_NEWFILEURL_no_replace",
837 "There was already a child by that name, and you asked me "
841 def test_PUT_NEWFILEURL_mkdirs(self):
842 d = self.PUT(self.public_url + "/foo/newdir/new.txt", self.NEWFILE_CONTENTS)
844 d.addCallback(self.failUnlessURIMatchesROChild, fn, u"newdir/new.txt")
845 d.addCallback(lambda res: self.failIfNodeHasChild(fn, u"new.txt"))
846 d.addCallback(lambda res: self.failUnlessNodeHasChild(fn, u"newdir"))
847 d.addCallback(lambda res:
848 self.failUnlessChildContentsAre(fn, u"newdir/new.txt",
849 self.NEWFILE_CONTENTS))
852 def test_PUT_NEWFILEURL_blocked(self):
853 d = self.PUT(self.public_url + "/foo/blockingfile/new.txt",
854 self.NEWFILE_CONTENTS)
855 d.addBoth(self.shouldFail, error.Error, "PUT_NEWFILEURL_blocked",
857 "Unable to create directory 'blockingfile': a file was in the way")
860 def test_PUT_NEWFILEURL_emptyname(self):
861 # an empty pathname component (i.e. a double-slash) is disallowed
862 d = self.shouldFail2(error.Error, "test_PUT_NEWFILEURL_emptyname",
864 "The webapi does not allow empty pathname components",
865 self.PUT, self.public_url + "/foo//new.txt", "")
868 def test_DELETE_FILEURL(self):
869 d = self.DELETE(self.public_url + "/foo/bar.txt")
870 d.addCallback(lambda res:
871 self.failIfNodeHasChild(self._foo_node, u"bar.txt"))
874 def test_DELETE_FILEURL_missing(self):
875 d = self.DELETE(self.public_url + "/foo/missing")
876 d.addBoth(self.should404, "test_DELETE_FILEURL_missing")
879 def test_DELETE_FILEURL_missing2(self):
880 d = self.DELETE(self.public_url + "/missing/missing")
881 d.addBoth(self.should404, "test_DELETE_FILEURL_missing2")
884 def failUnlessHasBarDotTxtMetadata(self, res):
885 data = simplejson.loads(res)
886 self.failUnless(isinstance(data, list))
887 self.failUnless(data[1].has_key("metadata"))
888 self.failUnless(data[1]["metadata"].has_key("ctime"))
889 self.failUnless(data[1]["metadata"].has_key("mtime"))
890 self.failUnlessEqual(data[1]["metadata"]["ctime"],
891 self._bar_txt_metadata["ctime"])
893 def test_GET_FILEURL_json(self):
894 # twisted.web.http.parse_qs ignores any query args without an '=', so
895 # I can't do "GET /path?json", I have to do "GET /path/t=json"
896 # instead. This may make it tricky to emulate the S3 interface
898 d = self.GET(self.public_url + "/foo/bar.txt?t=json")
900 self.failUnlessIsBarJSON(data)
901 self.failUnlessHasBarDotTxtMetadata(data)
903 d.addCallback(_check1)
906 def test_GET_FILEURL_json_missing(self):
907 d = self.GET(self.public_url + "/foo/missing?json")
908 d.addBoth(self.should404, "test_GET_FILEURL_json_missing")
911 def test_GET_FILEURL_uri(self):
912 d = self.GET(self.public_url + "/foo/bar.txt?t=uri")
914 self.failUnlessEqual(res, self._bar_txt_uri)
915 d.addCallback(_check)
916 d.addCallback(lambda res:
917 self.GET(self.public_url + "/foo/bar.txt?t=readonly-uri"))
919 # for now, for files, uris and readonly-uris are the same
920 self.failUnlessEqual(res, self._bar_txt_uri)
921 d.addCallback(_check2)
924 def test_GET_FILEURL_badtype(self):
925 d = self.shouldHTTPError("GET t=bogus", 400, "Bad Request",
928 self.public_url + "/foo/bar.txt?t=bogus")
931 def test_GET_FILEURL_uri_missing(self):
932 d = self.GET(self.public_url + "/foo/missing?t=uri")
933 d.addBoth(self.should404, "test_GET_FILEURL_uri_missing")
936 def test_GET_DIRURL(self):
937 # the addSlash means we get a redirect here
938 # from /uri/$URI/foo/ , we need ../../../ to get back to the root
940 d = self.GET(self.public_url + "/foo", followRedirect=True)
942 self.failUnless(('<a href="%s">Return to Welcome page' % ROOT)
944 # the FILE reference points to a URI, but it should end in bar.txt
945 bar_url = ("%s/file/%s/@@named=/bar.txt" %
946 (ROOT, urllib.quote(self._bar_txt_uri)))
947 get_bar = "".join([r'<td>FILE</td>',
949 r'<a href="%s">bar.txt</a>' % bar_url,
951 r'\s+<td>%d</td>' % len(self.BAR_CONTENTS),
953 self.failUnless(re.search(get_bar, res), res)
954 for line in res.split("\n"):
955 # find the line that contains the delete button for bar.txt
956 if ("form action" in line and
957 'value="delete"' in line and
958 'value="bar.txt"' in line):
959 # the form target should use a relative URL
960 foo_url = urllib.quote("%s/uri/%s/" % (ROOT, self._foo_uri))
961 self.failUnless(('action="%s"' % foo_url) in line, line)
962 # and the when_done= should too
963 #done_url = urllib.quote(???)
964 #self.failUnless(('name="when_done" value="%s"' % done_url)
968 self.fail("unable to find delete-bar.txt line", res)
970 # the DIR reference just points to a URI
971 sub_url = ("%s/uri/%s/" % (ROOT, urllib.quote(self._sub_uri)))
972 get_sub = ((r'<td>DIR</td>')
973 +r'\s+<td><a href="%s">sub</a></td>' % sub_url)
974 self.failUnless(re.search(get_sub, res), res)
975 d.addCallback(_check)
977 # look at a readonly directory
978 d.addCallback(lambda res:
979 self.GET(self.public_url + "/reedownlee", followRedirect=True))
981 self.failUnless("(read-only)" in res, res)
982 self.failIf("Upload a file" in res, res)
983 d.addCallback(_check2)
985 # and at a directory that contains a readonly directory
986 d.addCallback(lambda res:
987 self.GET(self.public_url, followRedirect=True))
989 self.failUnless(re.search('<td>DIR-RO</td>'
990 r'\s+<td><a href="[\.\/]+/uri/URI%3ADIR2-RO%3A[^"]+">reedownlee</a></td>', res), res)
991 d.addCallback(_check3)
993 # and an empty directory
994 d.addCallback(lambda res: self.GET(self.public_url + "/foo/empty/"))
996 self.failUnless("directory is empty" in res, res)
997 MKDIR_BUTTON_RE=re.compile('<input type="hidden" name="t" value="mkdir" />.*<legend class="freeform-form-label">Create a new directory in this directory</legend>.*<input type="submit" value="Create" />', re.I)
998 self.failUnless(MKDIR_BUTTON_RE.search(res), res)
999 d.addCallback(_check4)
1001 # and at a literal directory
1002 tiny_litdir_uri = "URI:DIR2-LIT:gqytunj2onug64tufqzdcosvkjetutcjkq5gw4tvm5vwszdgnz5hgyzufqydulbshj5x2lbm" # contains one child which is itself also LIT
1003 d.addCallback(lambda res:
1004 self.GET("/uri/" + tiny_litdir_uri + "/", followRedirect=True))
1006 self.failUnless('(immutable)' in res, res)
1007 self.failUnless(re.search('<td>FILE</td>'
1008 r'\s+<td><a href="[\.\/]+/file/URI%3ALIT%3Akrugkidfnzsc4/@@named=/short">short</a></td>', res), res)
1009 d.addCallback(_check5)
1012 def test_GET_DIRURL_badtype(self):
1013 d = self.shouldHTTPError("test_GET_DIRURL_badtype",
1017 self.public_url + "/foo?t=bogus")
1020 def test_GET_DIRURL_json(self):
1021 d = self.GET(self.public_url + "/foo?t=json")
1022 d.addCallback(self.failUnlessIsFooJSON)
1026 def test_POST_DIRURL_manifest_no_ophandle(self):
1027 d = self.shouldFail2(error.Error,
1028 "test_POST_DIRURL_manifest_no_ophandle",
1030 "slow operation requires ophandle=",
1031 self.POST, self.public_url, t="start-manifest")
1034 def test_POST_DIRURL_manifest(self):
1035 d = defer.succeed(None)
1036 def getman(ignored, output):
1037 d = self.POST(self.public_url + "/foo/?t=start-manifest&ophandle=125",
1038 followRedirect=True)
1039 d.addCallback(self.wait_for_operation, "125")
1040 d.addCallback(self.get_operation_results, "125", output)
1042 d.addCallback(getman, None)
1043 def _got_html(manifest):
1044 self.failUnless("Manifest of SI=" in manifest)
1045 self.failUnless("<td>sub</td>" in manifest)
1046 self.failUnless(self._sub_uri in manifest)
1047 self.failUnless("<td>sub/baz.txt</td>" in manifest)
1048 d.addCallback(_got_html)
1050 # both t=status and unadorned GET should be identical
1051 d.addCallback(lambda res: self.GET("/operations/125"))
1052 d.addCallback(_got_html)
1054 d.addCallback(getman, "html")
1055 d.addCallback(_got_html)
1056 d.addCallback(getman, "text")
1057 def _got_text(manifest):
1058 self.failUnless("\nsub " + self._sub_uri + "\n" in manifest)
1059 self.failUnless("\nsub/baz.txt URI:CHK:" in manifest)
1060 d.addCallback(_got_text)
1061 d.addCallback(getman, "JSON")
1063 data = res["manifest"]
1065 for (path_list, cap) in data:
1066 got[tuple(path_list)] = cap
1067 self.failUnlessEqual(got[(u"sub",)], self._sub_uri)
1068 self.failUnless((u"sub",u"baz.txt") in got)
1069 self.failUnless("finished" in res)
1070 self.failUnless("origin" in res)
1071 self.failUnless("storage-index" in res)
1072 self.failUnless("verifycaps" in res)
1073 self.failUnless("stats" in res)
1074 d.addCallback(_got_json)
1077 def test_POST_DIRURL_deepsize_no_ophandle(self):
1078 d = self.shouldFail2(error.Error,
1079 "test_POST_DIRURL_deepsize_no_ophandle",
1081 "slow operation requires ophandle=",
1082 self.POST, self.public_url, t="start-deep-size")
1085 def test_POST_DIRURL_deepsize(self):
1086 d = self.POST(self.public_url + "/foo/?t=start-deep-size&ophandle=126",
1087 followRedirect=True)
1088 d.addCallback(self.wait_for_operation, "126")
1089 d.addCallback(self.get_operation_results, "126", "json")
1090 def _got_json(data):
1091 self.failUnlessEqual(data["finished"], True)
1093 self.failUnless(size > 1000)
1094 d.addCallback(_got_json)
1095 d.addCallback(self.get_operation_results, "126", "text")
1097 mo = re.search(r'^size: (\d+)$', res, re.M)
1098 self.failUnless(mo, res)
1099 size = int(mo.group(1))
1100 # with directories, the size varies.
1101 self.failUnless(size > 1000)
1102 d.addCallback(_got_text)
1105 def test_POST_DIRURL_deepstats_no_ophandle(self):
1106 d = self.shouldFail2(error.Error,
1107 "test_POST_DIRURL_deepstats_no_ophandle",
1109 "slow operation requires ophandle=",
1110 self.POST, self.public_url, t="start-deep-stats")
1113 def test_POST_DIRURL_deepstats(self):
1114 d = self.POST(self.public_url + "/foo/?t=start-deep-stats&ophandle=127",
1115 followRedirect=True)
1116 d.addCallback(self.wait_for_operation, "127")
1117 d.addCallback(self.get_operation_results, "127", "json")
1118 def _got_json(stats):
1119 expected = {"count-immutable-files": 3,
1120 "count-mutable-files": 0,
1121 "count-literal-files": 0,
1123 "count-directories": 3,
1124 "size-immutable-files": 57,
1125 "size-literal-files": 0,
1126 #"size-directories": 1912, # varies
1127 #"largest-directory": 1590,
1128 "largest-directory-children": 5,
1129 "largest-immutable-file": 19,
1131 for k,v in expected.iteritems():
1132 self.failUnlessEqual(stats[k], v,
1133 "stats[%s] was %s, not %s" %
1135 self.failUnlessEqual(stats["size-files-histogram"],
1137 d.addCallback(_got_json)
1140 def test_POST_DIRURL_stream_manifest(self):
1141 d = self.POST(self.public_url + "/foo/?t=stream-manifest")
1143 self.failUnless(res.endswith("\n"))
1144 units = [simplejson.loads(t) for t in res[:-1].split("\n")]
1145 self.failUnlessEqual(len(units), 7)
1146 self.failUnlessEqual(units[-1]["type"], "stats")
1148 self.failUnlessEqual(first["path"], [])
1149 self.failUnlessEqual(first["cap"], self._foo_uri)
1150 self.failUnlessEqual(first["type"], "directory")
1151 baz = [u for u in units[:-1] if u["cap"] == self._baz_file_uri][0]
1152 self.failUnlessEqual(baz["path"], ["sub", "baz.txt"])
1153 self.failIfEqual(baz["storage-index"], None)
1154 self.failIfEqual(baz["verifycap"], None)
1155 self.failIfEqual(baz["repaircap"], None)
1157 d.addCallback(_check)
1160 def test_GET_DIRURL_uri(self):
1161 d = self.GET(self.public_url + "/foo?t=uri")
1163 self.failUnlessEqual(res, self._foo_uri)
1164 d.addCallback(_check)
1167 def test_GET_DIRURL_readonly_uri(self):
1168 d = self.GET(self.public_url + "/foo?t=readonly-uri")
1170 self.failUnlessEqual(res, self._foo_readonly_uri)
1171 d.addCallback(_check)
1174 def test_PUT_NEWDIRURL(self):
1175 d = self.PUT(self.public_url + "/foo/newdir?t=mkdir", "")
1176 d.addCallback(lambda res:
1177 self.failUnlessNodeHasChild(self._foo_node, u"newdir"))
1178 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
1179 d.addCallback(self.failUnlessNodeKeysAre, [])
1182 def test_POST_NEWDIRURL(self):
1183 d = self.POST2(self.public_url + "/foo/newdir?t=mkdir", "")
1184 d.addCallback(lambda res:
1185 self.failUnlessNodeHasChild(self._foo_node, u"newdir"))
1186 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
1187 d.addCallback(self.failUnlessNodeKeysAre, [])
1190 def test_POST_NEWDIRURL_emptyname(self):
1191 # an empty pathname component (i.e. a double-slash) is disallowed
1192 d = self.shouldFail2(error.Error, "test_POST_NEWDIRURL_emptyname",
1194 "The webapi does not allow empty pathname components, i.e. a double slash",
1195 self.POST, self.public_url + "//?t=mkdir")
1198 def test_POST_NEWDIRURL_initial_children(self):
1199 (newkids, caps) = self._create_initial_children()
1200 d = self.POST2(self.public_url + "/foo/newdir?t=mkdir-with-children",
1201 simplejson.dumps(newkids))
1203 n = self.s.create_node_from_uri(uri.strip())
1204 d2 = self.failUnlessNodeKeysAre(n, newkids.keys())
1205 d2.addCallback(lambda ign:
1206 self.failUnlessROChildURIIs(n, u"child-imm",
1208 d2.addCallback(lambda ign:
1209 self.failUnlessRWChildURIIs(n, u"child-mutable",
1211 d2.addCallback(lambda ign:
1212 self.failUnlessROChildURIIs(n, u"child-mutable-ro",
1214 d2.addCallback(lambda ign:
1215 self.failUnlessROChildURIIs(n, u"unknownchild-ro",
1216 caps['unknown_rocap']))
1217 d2.addCallback(lambda ign:
1218 self.failUnlessRWChildURIIs(n, u"unknownchild-rw",
1219 caps['unknown_rwcap']))
1220 d2.addCallback(lambda ign:
1221 self.failUnlessROChildURIIs(n, u"unknownchild-imm",
1222 caps['unknown_immcap']))
1223 d2.addCallback(lambda ign:
1224 self.failUnlessRWChildURIIs(n, u"dirchild",
1226 d2.addCallback(lambda ign:
1227 self.failUnlessROChildURIIs(n, u"dirchild-lit",
1229 d2.addCallback(lambda ign:
1230 self.failUnlessROChildURIIs(n, u"dirchild-empty",
1231 caps['emptydircap']))
1233 d.addCallback(_check)
1234 d.addCallback(lambda res:
1235 self.failUnlessNodeHasChild(self._foo_node, u"newdir"))
1236 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
1237 d.addCallback(self.failUnlessNodeKeysAre, newkids.keys())
1238 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
1239 d.addCallback(self.failUnlessROChildURIIs, u"child-imm", caps['filecap1'])
1242 def test_POST_NEWDIRURL_immutable(self):
1243 (newkids, caps) = self._create_immutable_children()
1244 d = self.POST2(self.public_url + "/foo/newdir?t=mkdir-immutable",
1245 simplejson.dumps(newkids))
1247 n = self.s.create_node_from_uri(uri.strip())
1248 d2 = self.failUnlessNodeKeysAre(n, newkids.keys())
1249 d2.addCallback(lambda ign:
1250 self.failUnlessROChildURIIs(n, u"child-imm",
1252 d2.addCallback(lambda ign:
1253 self.failUnlessROChildURIIs(n, u"unknownchild-imm",
1254 caps['unknown_immcap']))
1255 d2.addCallback(lambda ign:
1256 self.failUnlessROChildURIIs(n, u"dirchild-imm",
1258 d2.addCallback(lambda ign:
1259 self.failUnlessROChildURIIs(n, u"dirchild-lit",
1261 d2.addCallback(lambda ign:
1262 self.failUnlessROChildURIIs(n, u"dirchild-empty",
1263 caps['emptydircap']))
1265 d.addCallback(_check)
1266 d.addCallback(lambda res:
1267 self.failUnlessNodeHasChild(self._foo_node, u"newdir"))
1268 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
1269 d.addCallback(self.failUnlessNodeKeysAre, newkids.keys())
1270 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
1271 d.addCallback(self.failUnlessROChildURIIs, u"child-imm", caps['filecap1'])
1272 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
1273 d.addCallback(self.failUnlessROChildURIIs, u"unknownchild-imm", caps['unknown_immcap'])
1274 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
1275 d.addCallback(self.failUnlessROChildURIIs, u"dirchild-imm", caps['immdircap'])
1276 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
1277 d.addCallback(self.failUnlessROChildURIIs, u"dirchild-lit", caps['litdircap'])
1278 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
1279 d.addCallback(self.failUnlessROChildURIIs, u"dirchild-empty", caps['emptydircap'])
1280 d.addErrback(self.explain_web_error)
1283 def test_POST_NEWDIRURL_immutable_bad(self):
1284 (newkids, caps) = self._create_initial_children()
1285 d = self.shouldFail2(error.Error, "test_POST_NEWDIRURL_immutable_bad",
1287 "needed to be immutable but was not",
1289 self.public_url + "/foo/newdir?t=mkdir-immutable",
1290 simplejson.dumps(newkids))
1293 def test_PUT_NEWDIRURL_exists(self):
1294 d = self.PUT(self.public_url + "/foo/sub?t=mkdir", "")
1295 d.addCallback(lambda res:
1296 self.failUnlessNodeHasChild(self._foo_node, u"sub"))
1297 d.addCallback(lambda res: self._foo_node.get(u"sub"))
1298 d.addCallback(self.failUnlessNodeKeysAre, [u"baz.txt"])
1301 def test_PUT_NEWDIRURL_blocked(self):
1302 d = self.shouldFail2(error.Error, "PUT_NEWDIRURL_blocked",
1303 "409 Conflict", "Unable to create directory 'bar.txt': a file was in the way",
1305 self.public_url + "/foo/bar.txt/sub?t=mkdir", "")
1306 d.addCallback(lambda res:
1307 self.failUnlessNodeHasChild(self._foo_node, u"sub"))
1308 d.addCallback(lambda res: self._foo_node.get(u"sub"))
1309 d.addCallback(self.failUnlessNodeKeysAre, [u"baz.txt"])
1312 def test_PUT_NEWDIRURL_mkdir_p(self):
1313 d = defer.succeed(None)
1314 d.addCallback(lambda res: self.POST(self.public_url + "/foo", t='mkdir', name='mkp'))
1315 d.addCallback(lambda res: self.failUnlessNodeHasChild(self._foo_node, u"mkp"))
1316 d.addCallback(lambda res: self._foo_node.get(u"mkp"))
1317 def mkdir_p(mkpnode):
1318 url = '/uri/%s?t=mkdir-p&path=/sub1/sub2' % urllib.quote(mkpnode.get_uri())
1320 def made_subsub(ssuri):
1321 d = self._foo_node.get_child_at_path(u"mkp/sub1/sub2")
1322 d.addCallback(lambda ssnode: self.failUnlessEqual(ssnode.get_uri(), ssuri))
1324 d.addCallback(lambda uri2: self.failUnlessEqual(uri2, ssuri))
1326 d.addCallback(made_subsub)
1328 d.addCallback(mkdir_p)
1331 def test_PUT_NEWDIRURL_mkdirs(self):
1332 d = self.PUT(self.public_url + "/foo/subdir/newdir?t=mkdir", "")
1333 d.addCallback(lambda res:
1334 self.failIfNodeHasChild(self._foo_node, u"newdir"))
1335 d.addCallback(lambda res:
1336 self.failUnlessNodeHasChild(self._foo_node, u"subdir"))
1337 d.addCallback(lambda res:
1338 self._foo_node.get_child_at_path(u"subdir/newdir"))
1339 d.addCallback(self.failUnlessNodeKeysAre, [])
1342 def test_DELETE_DIRURL(self):
1343 d = self.DELETE(self.public_url + "/foo")
1344 d.addCallback(lambda res:
1345 self.failIfNodeHasChild(self.public_root, u"foo"))
1348 def test_DELETE_DIRURL_missing(self):
1349 d = self.DELETE(self.public_url + "/foo/missing")
1350 d.addBoth(self.should404, "test_DELETE_DIRURL_missing")
1351 d.addCallback(lambda res:
1352 self.failUnlessNodeHasChild(self.public_root, u"foo"))
1355 def test_DELETE_DIRURL_missing2(self):
1356 d = self.DELETE(self.public_url + "/missing")
1357 d.addBoth(self.should404, "test_DELETE_DIRURL_missing2")
1360 def dump_root(self):
1362 w = webish.DirnodeWalkerMixin()
1363 def visitor(childpath, childnode, metadata):
1365 d = w.walk(self.public_root, visitor)
1368 def failUnlessNodeKeysAre(self, node, expected_keys):
1369 for k in expected_keys:
1370 assert isinstance(k, unicode)
1372 def _check(children):
1373 self.failUnlessEqual(sorted(children.keys()), sorted(expected_keys))
1374 d.addCallback(_check)
1376 def failUnlessNodeHasChild(self, node, name):
1377 assert isinstance(name, unicode)
1379 def _check(children):
1380 self.failUnless(name in children)
1381 d.addCallback(_check)
1383 def failIfNodeHasChild(self, node, name):
1384 assert isinstance(name, unicode)
1386 def _check(children):
1387 self.failIf(name in children)
1388 d.addCallback(_check)
1391 def failUnlessChildContentsAre(self, node, name, expected_contents):
1392 assert isinstance(name, unicode)
1393 d = node.get_child_at_path(name)
1394 d.addCallback(lambda node: download_to_data(node))
1395 def _check(contents):
1396 self.failUnlessEqual(contents, expected_contents)
1397 d.addCallback(_check)
1400 def failUnlessMutableChildContentsAre(self, node, name, expected_contents):
1401 assert isinstance(name, unicode)
1402 d = node.get_child_at_path(name)
1403 d.addCallback(lambda node: node.download_best_version())
1404 def _check(contents):
1405 self.failUnlessEqual(contents, expected_contents)
1406 d.addCallback(_check)
1409 def failUnlessRWChildURIIs(self, node, name, expected_uri):
1410 assert isinstance(name, unicode)
1411 d = node.get_child_at_path(name)
1413 self.failUnless(child.is_unknown() or not child.is_readonly())
1414 self.failUnlessEqual(child.get_uri(), expected_uri.strip())
1415 self.failUnlessEqual(child.get_write_uri(), expected_uri.strip())
1416 expected_ro_uri = self._make_readonly(expected_uri)
1418 self.failUnlessEqual(child.get_readonly_uri(), expected_ro_uri.strip())
1419 d.addCallback(_check)
1422 def failUnlessROChildURIIs(self, node, name, expected_uri):
1423 assert isinstance(name, unicode)
1424 d = node.get_child_at_path(name)
1426 self.failUnless(child.is_unknown() or child.is_readonly())
1427 self.failUnlessEqual(child.get_write_uri(), None)
1428 self.failUnlessEqual(child.get_uri(), expected_uri.strip())
1429 self.failUnlessEqual(child.get_readonly_uri(), expected_uri.strip())
1430 d.addCallback(_check)
1433 def failUnlessURIMatchesRWChild(self, got_uri, node, name):
1434 assert isinstance(name, unicode)
1435 d = node.get_child_at_path(name)
1437 self.failUnless(child.is_unknown() or not child.is_readonly())
1438 self.failUnlessEqual(child.get_uri(), got_uri.strip())
1439 self.failUnlessEqual(child.get_write_uri(), got_uri.strip())
1440 expected_ro_uri = self._make_readonly(got_uri)
1442 self.failUnlessEqual(child.get_readonly_uri(), expected_ro_uri.strip())
1443 d.addCallback(_check)
1446 def failUnlessURIMatchesROChild(self, got_uri, node, name):
1447 assert isinstance(name, unicode)
1448 d = node.get_child_at_path(name)
1450 self.failUnless(child.is_unknown() or child.is_readonly())
1451 self.failUnlessEqual(child.get_write_uri(), None)
1452 self.failUnlessEqual(got_uri.strip(), child.get_uri())
1453 self.failUnlessEqual(got_uri.strip(), child.get_readonly_uri())
1454 d.addCallback(_check)
1457 def failUnlessCHKURIHasContents(self, got_uri, contents):
1458 self.failUnless(FakeCHKFileNode.all_contents[got_uri] == contents)
1460 def test_POST_upload(self):
1461 d = self.POST(self.public_url + "/foo", t="upload",
1462 file=("new.txt", self.NEWFILE_CONTENTS))
1464 d.addCallback(self.failUnlessURIMatchesROChild, fn, u"new.txt")
1465 d.addCallback(lambda res:
1466 self.failUnlessChildContentsAre(fn, u"new.txt",
1467 self.NEWFILE_CONTENTS))
1470 def test_POST_upload_unicode(self):
1471 filename = u"n\u00e9wer.txt" # n e-acute w e r . t x t
1472 d = self.POST(self.public_url + "/foo", t="upload",
1473 file=(filename, self.NEWFILE_CONTENTS))
1475 d.addCallback(self.failUnlessURIMatchesROChild, fn, filename)
1476 d.addCallback(lambda res:
1477 self.failUnlessChildContentsAre(fn, filename,
1478 self.NEWFILE_CONTENTS))
1479 target_url = self.public_url + "/foo/" + filename.encode("utf-8")
1480 d.addCallback(lambda res: self.GET(target_url))
1481 d.addCallback(lambda contents: self.failUnlessEqual(contents,
1482 self.NEWFILE_CONTENTS,
1486 def test_POST_upload_unicode_named(self):
1487 filename = u"n\u00e9wer.txt" # n e-acute w e r . t x t
1488 d = self.POST(self.public_url + "/foo", t="upload",
1490 file=("overridden", self.NEWFILE_CONTENTS))
1492 d.addCallback(self.failUnlessURIMatchesROChild, fn, filename)
1493 d.addCallback(lambda res:
1494 self.failUnlessChildContentsAre(fn, filename,
1495 self.NEWFILE_CONTENTS))
1496 target_url = self.public_url + "/foo/" + filename.encode("utf-8")
1497 d.addCallback(lambda res: self.GET(target_url))
1498 d.addCallback(lambda contents: self.failUnlessEqual(contents,
1499 self.NEWFILE_CONTENTS,
1503 def test_POST_upload_no_link(self):
1504 d = self.POST("/uri", t="upload",
1505 file=("new.txt", self.NEWFILE_CONTENTS))
1506 def _check_upload_results(page):
1507 # this should be a page which describes the results of the upload
1508 # that just finished.
1509 self.failUnless("Upload Results:" in page)
1510 self.failUnless("URI:" in page)
1511 uri_re = re.compile("URI: <tt><span>(.*)</span>")
1512 mo = uri_re.search(page)
1513 self.failUnless(mo, page)
1514 new_uri = mo.group(1)
1516 d.addCallback(_check_upload_results)
1517 d.addCallback(self.failUnlessCHKURIHasContents, self.NEWFILE_CONTENTS)
1520 def test_POST_upload_no_link_whendone(self):
1521 d = self.POST("/uri", t="upload", when_done="/",
1522 file=("new.txt", self.NEWFILE_CONTENTS))
1523 d.addBoth(self.shouldRedirect, "/")
1526 def shouldRedirect2(self, which, checker, callable, *args, **kwargs):
1527 d = defer.maybeDeferred(callable, *args, **kwargs)
1529 if isinstance(res, failure.Failure):
1530 res.trap(error.PageRedirect)
1531 statuscode = res.value.status
1532 target = res.value.location
1533 return checker(statuscode, target)
1534 self.fail("%s: callable was supposed to redirect, not return '%s'"
1539 def test_POST_upload_no_link_whendone_results(self):
1540 def check(statuscode, target):
1541 self.failUnlessEqual(statuscode, str(http.FOUND))
1542 self.failUnless(target.startswith(self.webish_url), target)
1543 return client.getPage(target, method="GET")
1544 d = self.shouldRedirect2("test_POST_upload_no_link_whendone_results",
1546 self.POST, "/uri", t="upload",
1547 when_done="/uri/%(uri)s",
1548 file=("new.txt", self.NEWFILE_CONTENTS))
1549 d.addCallback(lambda res:
1550 self.failUnlessEqual(res, self.NEWFILE_CONTENTS))
1553 def test_POST_upload_no_link_mutable(self):
1554 d = self.POST("/uri", t="upload", mutable="true",
1555 file=("new.txt", self.NEWFILE_CONTENTS))
1556 def _check(filecap):
1557 filecap = filecap.strip()
1558 self.failUnless(filecap.startswith("URI:SSK:"), filecap)
1559 self.filecap = filecap
1560 u = uri.WriteableSSKFileURI.init_from_string(filecap)
1561 self.failUnless(u.get_storage_index() in FakeMutableFileNode.all_contents)
1562 n = self.s.create_node_from_uri(filecap)
1563 return n.download_best_version()
1564 d.addCallback(_check)
1566 self.failUnlessEqual(data, self.NEWFILE_CONTENTS)
1567 return self.GET("/uri/%s" % urllib.quote(self.filecap))
1568 d.addCallback(_check2)
1570 self.failUnlessEqual(data, self.NEWFILE_CONTENTS)
1571 return self.GET("/file/%s" % urllib.quote(self.filecap))
1572 d.addCallback(_check3)
1574 self.failUnlessEqual(data, self.NEWFILE_CONTENTS)
1575 d.addCallback(_check4)
1578 def test_POST_upload_no_link_mutable_toobig(self):
1579 d = self.shouldFail2(error.Error,
1580 "test_POST_upload_no_link_mutable_toobig",
1581 "413 Request Entity Too Large",
1582 "SDMF is limited to one segment, and 10001 > 10000",
1584 "/uri", t="upload", mutable="true",
1586 "b" * (self.s.MUTABLE_SIZELIMIT+1)) )
1589 def test_POST_upload_mutable(self):
1590 # this creates a mutable file
1591 d = self.POST(self.public_url + "/foo", t="upload", mutable="true",
1592 file=("new.txt", self.NEWFILE_CONTENTS))
1594 d.addCallback(self.failUnlessURIMatchesRWChild, fn, u"new.txt")
1595 d.addCallback(lambda res:
1596 self.failUnlessMutableChildContentsAre(fn, u"new.txt",
1597 self.NEWFILE_CONTENTS))
1598 d.addCallback(lambda res: self._foo_node.get(u"new.txt"))
1600 self.failUnless(IMutableFileNode.providedBy(newnode))
1601 self.failUnless(newnode.is_mutable())
1602 self.failIf(newnode.is_readonly())
1603 self._mutable_node = newnode
1604 self._mutable_uri = newnode.get_uri()
1607 # now upload it again and make sure that the URI doesn't change
1608 NEWER_CONTENTS = self.NEWFILE_CONTENTS + "newer\n"
1609 d.addCallback(lambda res:
1610 self.POST(self.public_url + "/foo", t="upload",
1612 file=("new.txt", NEWER_CONTENTS)))
1613 d.addCallback(self.failUnlessURIMatchesRWChild, fn, u"new.txt")
1614 d.addCallback(lambda res:
1615 self.failUnlessMutableChildContentsAre(fn, u"new.txt",
1617 d.addCallback(lambda res: self._foo_node.get(u"new.txt"))
1619 self.failUnless(IMutableFileNode.providedBy(newnode))
1620 self.failUnless(newnode.is_mutable())
1621 self.failIf(newnode.is_readonly())
1622 self.failUnlessEqual(self._mutable_uri, newnode.get_uri())
1623 d.addCallback(_got2)
1625 # upload a second time, using PUT instead of POST
1626 NEW2_CONTENTS = NEWER_CONTENTS + "overwrite with PUT\n"
1627 d.addCallback(lambda res:
1628 self.PUT(self.public_url + "/foo/new.txt", NEW2_CONTENTS))
1629 d.addCallback(self.failUnlessURIMatchesRWChild, fn, u"new.txt")
1630 d.addCallback(lambda res:
1631 self.failUnlessMutableChildContentsAre(fn, u"new.txt",
1634 # finally list the directory, since mutable files are displayed
1635 # slightly differently
1637 d.addCallback(lambda res:
1638 self.GET(self.public_url + "/foo/",
1639 followRedirect=True))
1640 def _check_page(res):
1641 # TODO: assert more about the contents
1642 self.failUnless("SSK" in res)
1644 d.addCallback(_check_page)
1646 d.addCallback(lambda res: self._foo_node.get(u"new.txt"))
1648 self.failUnless(IMutableFileNode.providedBy(newnode))
1649 self.failUnless(newnode.is_mutable())
1650 self.failIf(newnode.is_readonly())
1651 self.failUnlessEqual(self._mutable_uri, newnode.get_uri())
1652 d.addCallback(_got3)
1654 # look at the JSON form of the enclosing directory
1655 d.addCallback(lambda res:
1656 self.GET(self.public_url + "/foo/?t=json",
1657 followRedirect=True))
1658 def _check_page_json(res):
1659 parsed = simplejson.loads(res)
1660 self.failUnlessEqual(parsed[0], "dirnode")
1661 children = dict( [(unicode(name),value)
1663 in parsed[1]["children"].iteritems()] )
1664 self.failUnless("new.txt" in children)
1665 new_json = children["new.txt"]
1666 self.failUnlessEqual(new_json[0], "filenode")
1667 self.failUnless(new_json[1]["mutable"])
1668 self.failUnlessEqual(new_json[1]["rw_uri"], self._mutable_uri)
1669 ro_uri = unicode(self._mutable_node.get_readonly().to_string())
1670 self.failUnlessEqual(new_json[1]["ro_uri"], ro_uri)
1671 d.addCallback(_check_page_json)
1673 # and the JSON form of the file
1674 d.addCallback(lambda res:
1675 self.GET(self.public_url + "/foo/new.txt?t=json"))
1676 def _check_file_json(res):
1677 parsed = simplejson.loads(res)
1678 self.failUnlessEqual(parsed[0], "filenode")
1679 self.failUnless(parsed[1]["mutable"])
1680 self.failUnlessEqual(parsed[1]["rw_uri"], self._mutable_uri)
1681 ro_uri = unicode(self._mutable_node.get_readonly().to_string())
1682 self.failUnlessEqual(parsed[1]["ro_uri"], ro_uri)
1683 d.addCallback(_check_file_json)
1685 # and look at t=uri and t=readonly-uri
1686 d.addCallback(lambda res:
1687 self.GET(self.public_url + "/foo/new.txt?t=uri"))
1688 d.addCallback(lambda res: self.failUnlessEqual(res, self._mutable_uri))
1689 d.addCallback(lambda res:
1690 self.GET(self.public_url + "/foo/new.txt?t=readonly-uri"))
1691 def _check_ro_uri(res):
1692 ro_uri = unicode(self._mutable_node.get_readonly().to_string())
1693 self.failUnlessEqual(res, ro_uri)
1694 d.addCallback(_check_ro_uri)
1696 # make sure we can get to it from /uri/URI
1697 d.addCallback(lambda res:
1698 self.GET("/uri/%s" % urllib.quote(self._mutable_uri)))
1699 d.addCallback(lambda res:
1700 self.failUnlessEqual(res, NEW2_CONTENTS))
1702 # and that HEAD computes the size correctly
1703 d.addCallback(lambda res:
1704 self.HEAD(self.public_url + "/foo/new.txt",
1705 return_response=True))
1706 def _got_headers((res, status, headers)):
1707 self.failUnlessEqual(res, "")
1708 self.failUnlessEqual(headers["content-length"][0],
1709 str(len(NEW2_CONTENTS)))
1710 self.failUnlessEqual(headers["content-type"], ["text/plain"])
1711 d.addCallback(_got_headers)
1713 # make sure that size errors are displayed correctly for overwrite
1714 d.addCallback(lambda res:
1715 self.shouldFail2(error.Error,
1716 "test_POST_upload_mutable-toobig",
1717 "413 Request Entity Too Large",
1718 "SDMF is limited to one segment, and 10001 > 10000",
1720 self.public_url + "/foo", t="upload",
1723 "b" * (self.s.MUTABLE_SIZELIMIT+1)),
1726 d.addErrback(self.dump_error)
1729 def test_POST_upload_mutable_toobig(self):
1730 d = self.shouldFail2(error.Error,
1731 "test_POST_upload_mutable_toobig",
1732 "413 Request Entity Too Large",
1733 "SDMF is limited to one segment, and 10001 > 10000",
1735 self.public_url + "/foo",
1736 t="upload", mutable="true",
1738 "b" * (self.s.MUTABLE_SIZELIMIT+1)) )
1741 def dump_error(self, f):
1742 # if the web server returns an error code (like 400 Bad Request),
1743 # web.client.getPage puts the HTTP response body into the .response
1744 # attribute of the exception object that it gives back. It does not
1745 # appear in the Failure's repr(), so the ERROR that trial displays
1746 # will be rather terse and unhelpful. addErrback this method to the
1747 # end of your chain to get more information out of these errors.
1748 if f.check(error.Error):
1749 print "web.error.Error:"
1751 print f.value.response
1754 def test_POST_upload_replace(self):
1755 d = self.POST(self.public_url + "/foo", t="upload",
1756 file=("bar.txt", self.NEWFILE_CONTENTS))
1758 d.addCallback(self.failUnlessURIMatchesROChild, fn, u"bar.txt")
1759 d.addCallback(lambda res:
1760 self.failUnlessChildContentsAre(fn, u"bar.txt",
1761 self.NEWFILE_CONTENTS))
1764 def test_POST_upload_no_replace_ok(self):
1765 d = self.POST(self.public_url + "/foo?replace=false", t="upload",
1766 file=("new.txt", self.NEWFILE_CONTENTS))
1767 d.addCallback(lambda res: self.GET(self.public_url + "/foo/new.txt"))
1768 d.addCallback(lambda res: self.failUnlessEqual(res,
1769 self.NEWFILE_CONTENTS))
1772 def test_POST_upload_no_replace_queryarg(self):
1773 d = self.POST(self.public_url + "/foo?replace=false", t="upload",
1774 file=("bar.txt", self.NEWFILE_CONTENTS))
1775 d.addBoth(self.shouldFail, error.Error,
1776 "POST_upload_no_replace_queryarg",
1778 "There was already a child by that name, and you asked me "
1779 "to not replace it")
1780 d.addCallback(lambda res: self.GET(self.public_url + "/foo/bar.txt"))
1781 d.addCallback(self.failUnlessIsBarDotTxt)
1784 def test_POST_upload_no_replace_field(self):
1785 d = self.POST(self.public_url + "/foo", t="upload", replace="false",
1786 file=("bar.txt", self.NEWFILE_CONTENTS))
1787 d.addBoth(self.shouldFail, error.Error, "POST_upload_no_replace_field",
1789 "There was already a child by that name, and you asked me "
1790 "to not replace it")
1791 d.addCallback(lambda res: self.GET(self.public_url + "/foo/bar.txt"))
1792 d.addCallback(self.failUnlessIsBarDotTxt)
1795 def test_POST_upload_whendone(self):
1796 d = self.POST(self.public_url + "/foo", t="upload", when_done="/THERE",
1797 file=("new.txt", self.NEWFILE_CONTENTS))
1798 d.addBoth(self.shouldRedirect, "/THERE")
1800 d.addCallback(lambda res:
1801 self.failUnlessChildContentsAre(fn, u"new.txt",
1802 self.NEWFILE_CONTENTS))
1805 def test_POST_upload_named(self):
1807 d = self.POST(self.public_url + "/foo", t="upload",
1808 name="new.txt", file=self.NEWFILE_CONTENTS)
1809 d.addCallback(self.failUnlessURIMatchesROChild, fn, u"new.txt")
1810 d.addCallback(lambda res:
1811 self.failUnlessChildContentsAre(fn, u"new.txt",
1812 self.NEWFILE_CONTENTS))
1815 def test_POST_upload_named_badfilename(self):
1816 d = self.POST(self.public_url + "/foo", t="upload",
1817 name="slashes/are/bad.txt", file=self.NEWFILE_CONTENTS)
1818 d.addBoth(self.shouldFail, error.Error,
1819 "test_POST_upload_named_badfilename",
1821 "name= may not contain a slash",
1823 # make sure that nothing was added
1824 d.addCallback(lambda res:
1825 self.failUnlessNodeKeysAre(self._foo_node,
1826 [u"bar.txt", u"blockingfile",
1827 u"empty", u"n\u00fc.txt",
1831 def test_POST_FILEURL_check(self):
1832 bar_url = self.public_url + "/foo/bar.txt"
1833 d = self.POST(bar_url, t="check")
1835 self.failUnless("Healthy :" in res)
1836 d.addCallback(_check)
1837 redir_url = "http://allmydata.org/TARGET"
1838 def _check2(statuscode, target):
1839 self.failUnlessEqual(statuscode, str(http.FOUND))
1840 self.failUnlessEqual(target, redir_url)
1841 d.addCallback(lambda res:
1842 self.shouldRedirect2("test_POST_FILEURL_check",
1846 when_done=redir_url))
1847 d.addCallback(lambda res:
1848 self.POST(bar_url, t="check", return_to=redir_url))
1850 self.failUnless("Healthy :" in res)
1851 self.failUnless("Return to file" in res)
1852 self.failUnless(redir_url in res)
1853 d.addCallback(_check3)
1855 d.addCallback(lambda res:
1856 self.POST(bar_url, t="check", output="JSON"))
1857 def _check_json(res):
1858 data = simplejson.loads(res)
1859 self.failUnless("storage-index" in data)
1860 self.failUnless(data["results"]["healthy"])
1861 d.addCallback(_check_json)
1865 def test_POST_FILEURL_check_and_repair(self):
1866 bar_url = self.public_url + "/foo/bar.txt"
1867 d = self.POST(bar_url, t="check", repair="true")
1869 self.failUnless("Healthy :" in res)
1870 d.addCallback(_check)
1871 redir_url = "http://allmydata.org/TARGET"
1872 def _check2(statuscode, target):
1873 self.failUnlessEqual(statuscode, str(http.FOUND))
1874 self.failUnlessEqual(target, redir_url)
1875 d.addCallback(lambda res:
1876 self.shouldRedirect2("test_POST_FILEURL_check_and_repair",
1879 t="check", repair="true",
1880 when_done=redir_url))
1881 d.addCallback(lambda res:
1882 self.POST(bar_url, t="check", return_to=redir_url))
1884 self.failUnless("Healthy :" in res)
1885 self.failUnless("Return to file" in res)
1886 self.failUnless(redir_url in res)
1887 d.addCallback(_check3)
1890 def test_POST_DIRURL_check(self):
1891 foo_url = self.public_url + "/foo/"
1892 d = self.POST(foo_url, t="check")
1894 self.failUnless("Healthy :" in res, res)
1895 d.addCallback(_check)
1896 redir_url = "http://allmydata.org/TARGET"
1897 def _check2(statuscode, target):
1898 self.failUnlessEqual(statuscode, str(http.FOUND))
1899 self.failUnlessEqual(target, redir_url)
1900 d.addCallback(lambda res:
1901 self.shouldRedirect2("test_POST_DIRURL_check",
1905 when_done=redir_url))
1906 d.addCallback(lambda res:
1907 self.POST(foo_url, t="check", return_to=redir_url))
1909 self.failUnless("Healthy :" in res, res)
1910 self.failUnless("Return to file/directory" in res)
1911 self.failUnless(redir_url in res)
1912 d.addCallback(_check3)
1914 d.addCallback(lambda res:
1915 self.POST(foo_url, t="check", output="JSON"))
1916 def _check_json(res):
1917 data = simplejson.loads(res)
1918 self.failUnless("storage-index" in data)
1919 self.failUnless(data["results"]["healthy"])
1920 d.addCallback(_check_json)
1924 def test_POST_DIRURL_check_and_repair(self):
1925 foo_url = self.public_url + "/foo/"
1926 d = self.POST(foo_url, t="check", repair="true")
1928 self.failUnless("Healthy :" in res, res)
1929 d.addCallback(_check)
1930 redir_url = "http://allmydata.org/TARGET"
1931 def _check2(statuscode, target):
1932 self.failUnlessEqual(statuscode, str(http.FOUND))
1933 self.failUnlessEqual(target, redir_url)
1934 d.addCallback(lambda res:
1935 self.shouldRedirect2("test_POST_DIRURL_check_and_repair",
1938 t="check", repair="true",
1939 when_done=redir_url))
1940 d.addCallback(lambda res:
1941 self.POST(foo_url, t="check", return_to=redir_url))
1943 self.failUnless("Healthy :" in res)
1944 self.failUnless("Return to file/directory" in res)
1945 self.failUnless(redir_url in res)
1946 d.addCallback(_check3)
1949 def wait_for_operation(self, ignored, ophandle):
1950 url = "/operations/" + ophandle
1951 url += "?t=status&output=JSON"
1954 data = simplejson.loads(res)
1955 if not data["finished"]:
1956 d = self.stall(delay=1.0)
1957 d.addCallback(self.wait_for_operation, ophandle)
1963 def get_operation_results(self, ignored, ophandle, output=None):
1964 url = "/operations/" + ophandle
1967 url += "&output=" + output
1970 if output and output.lower() == "json":
1971 return simplejson.loads(res)
1976 def test_POST_DIRURL_deepcheck_no_ophandle(self):
1977 d = self.shouldFail2(error.Error,
1978 "test_POST_DIRURL_deepcheck_no_ophandle",
1980 "slow operation requires ophandle=",
1981 self.POST, self.public_url, t="start-deep-check")
1984 def test_POST_DIRURL_deepcheck(self):
1985 def _check_redirect(statuscode, target):
1986 self.failUnlessEqual(statuscode, str(http.FOUND))
1987 self.failUnless(target.endswith("/operations/123"))
1988 d = self.shouldRedirect2("test_POST_DIRURL_deepcheck", _check_redirect,
1989 self.POST, self.public_url,
1990 t="start-deep-check", ophandle="123")
1991 d.addCallback(self.wait_for_operation, "123")
1992 def _check_json(data):
1993 self.failUnlessEqual(data["finished"], True)
1994 self.failUnlessEqual(data["count-objects-checked"], 8)
1995 self.failUnlessEqual(data["count-objects-healthy"], 8)
1996 d.addCallback(_check_json)
1997 d.addCallback(self.get_operation_results, "123", "html")
1998 def _check_html(res):
1999 self.failUnless("Objects Checked: <span>8</span>" in res)
2000 self.failUnless("Objects Healthy: <span>8</span>" in res)
2001 d.addCallback(_check_html)
2003 d.addCallback(lambda res:
2004 self.GET("/operations/123/"))
2005 d.addCallback(_check_html) # should be the same as without the slash
2007 d.addCallback(lambda res:
2008 self.shouldFail2(error.Error, "one", "404 Not Found",
2009 "No detailed results for SI bogus",
2010 self.GET, "/operations/123/bogus"))
2012 foo_si = self._foo_node.get_storage_index()
2013 foo_si_s = base32.b2a(foo_si)
2014 d.addCallback(lambda res:
2015 self.GET("/operations/123/%s?output=JSON" % foo_si_s))
2016 def _check_foo_json(res):
2017 data = simplejson.loads(res)
2018 self.failUnlessEqual(data["storage-index"], foo_si_s)
2019 self.failUnless(data["results"]["healthy"])
2020 d.addCallback(_check_foo_json)
2023 def test_POST_DIRURL_deepcheck_and_repair(self):
2024 d = self.POST(self.public_url, t="start-deep-check", repair="true",
2025 ophandle="124", output="json", followRedirect=True)
2026 d.addCallback(self.wait_for_operation, "124")
2027 def _check_json(data):
2028 self.failUnlessEqual(data["finished"], True)
2029 self.failUnlessEqual(data["count-objects-checked"], 8)
2030 self.failUnlessEqual(data["count-objects-healthy-pre-repair"], 8)
2031 self.failUnlessEqual(data["count-objects-unhealthy-pre-repair"], 0)
2032 self.failUnlessEqual(data["count-corrupt-shares-pre-repair"], 0)
2033 self.failUnlessEqual(data["count-repairs-attempted"], 0)
2034 self.failUnlessEqual(data["count-repairs-successful"], 0)
2035 self.failUnlessEqual(data["count-repairs-unsuccessful"], 0)
2036 self.failUnlessEqual(data["count-objects-healthy-post-repair"], 8)
2037 self.failUnlessEqual(data["count-objects-unhealthy-post-repair"], 0)
2038 self.failUnlessEqual(data["count-corrupt-shares-post-repair"], 0)
2039 d.addCallback(_check_json)
2040 d.addCallback(self.get_operation_results, "124", "html")
2041 def _check_html(res):
2042 self.failUnless("Objects Checked: <span>8</span>" in res)
2044 self.failUnless("Objects Healthy (before repair): <span>8</span>" in res)
2045 self.failUnless("Objects Unhealthy (before repair): <span>0</span>" in res)
2046 self.failUnless("Corrupt Shares (before repair): <span>0</span>" in res)
2048 self.failUnless("Repairs Attempted: <span>0</span>" in res)
2049 self.failUnless("Repairs Successful: <span>0</span>" in res)
2050 self.failUnless("Repairs Unsuccessful: <span>0</span>" in res)
2052 self.failUnless("Objects Healthy (after repair): <span>8</span>" in res)
2053 self.failUnless("Objects Unhealthy (after repair): <span>0</span>" in res)
2054 self.failUnless("Corrupt Shares (after repair): <span>0</span>" in res)
2055 d.addCallback(_check_html)
2058 def test_POST_FILEURL_bad_t(self):
2059 d = self.shouldFail2(error.Error, "POST_bad_t", "400 Bad Request",
2060 "POST to file: bad t=bogus",
2061 self.POST, self.public_url + "/foo/bar.txt",
2065 def test_POST_mkdir(self): # return value?
2066 d = self.POST(self.public_url + "/foo", t="mkdir", name="newdir")
2067 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
2068 d.addCallback(self.failUnlessNodeKeysAre, [])
2071 def test_POST_mkdir_initial_children(self):
2072 (newkids, caps) = self._create_initial_children()
2073 d = self.POST2(self.public_url +
2074 "/foo?t=mkdir-with-children&name=newdir",
2075 simplejson.dumps(newkids))
2076 d.addCallback(lambda res:
2077 self.failUnlessNodeHasChild(self._foo_node, u"newdir"))
2078 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
2079 d.addCallback(self.failUnlessNodeKeysAre, newkids.keys())
2080 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
2081 d.addCallback(self.failUnlessROChildURIIs, u"child-imm", caps['filecap1'])
2084 def test_POST_mkdir_immutable(self):
2085 (newkids, caps) = self._create_immutable_children()
2086 d = self.POST2(self.public_url +
2087 "/foo?t=mkdir-immutable&name=newdir",
2088 simplejson.dumps(newkids))
2089 d.addCallback(lambda res:
2090 self.failUnlessNodeHasChild(self._foo_node, u"newdir"))
2091 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
2092 d.addCallback(self.failUnlessNodeKeysAre, newkids.keys())
2093 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
2094 d.addCallback(self.failUnlessROChildURIIs, u"child-imm", caps['filecap1'])
2095 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
2096 d.addCallback(self.failUnlessROChildURIIs, u"unknownchild-imm", caps['unknown_immcap'])
2097 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
2098 d.addCallback(self.failUnlessROChildURIIs, u"dirchild-imm", caps['immdircap'])
2099 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
2100 d.addCallback(self.failUnlessROChildURIIs, u"dirchild-lit", caps['litdircap'])
2101 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
2102 d.addCallback(self.failUnlessROChildURIIs, u"dirchild-empty", caps['emptydircap'])
2105 def test_POST_mkdir_immutable_bad(self):
2106 (newkids, caps) = self._create_initial_children()
2107 d = self.shouldFail2(error.Error, "test_POST_mkdir_immutable_bad",
2109 "needed to be immutable but was not",
2112 "/foo?t=mkdir-immutable&name=newdir",
2113 simplejson.dumps(newkids))
2116 def test_POST_mkdir_2(self):
2117 d = self.POST(self.public_url + "/foo/newdir?t=mkdir", "")
2118 d.addCallback(lambda res:
2119 self.failUnlessNodeHasChild(self._foo_node, u"newdir"))
2120 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
2121 d.addCallback(self.failUnlessNodeKeysAre, [])
2124 def test_POST_mkdirs_2(self):
2125 d = self.POST(self.public_url + "/foo/bardir/newdir?t=mkdir", "")
2126 d.addCallback(lambda res:
2127 self.failUnlessNodeHasChild(self._foo_node, u"bardir"))
2128 d.addCallback(lambda res: self._foo_node.get(u"bardir"))
2129 d.addCallback(lambda bardirnode: bardirnode.get(u"newdir"))
2130 d.addCallback(self.failUnlessNodeKeysAre, [])
2133 def test_POST_mkdir_no_parentdir_noredirect(self):
2134 d = self.POST("/uri?t=mkdir")
2135 def _after_mkdir(res):
2136 uri.DirectoryURI.init_from_string(res)
2137 d.addCallback(_after_mkdir)
2140 def test_POST_mkdir_no_parentdir_noredirect2(self):
2141 # make sure form-based arguments (as on the welcome page) still work
2142 d = self.POST("/uri", t="mkdir")
2143 def _after_mkdir(res):
2144 uri.DirectoryURI.init_from_string(res)
2145 d.addCallback(_after_mkdir)
2146 d.addErrback(self.explain_web_error)
2149 def test_POST_mkdir_no_parentdir_redirect(self):
2150 d = self.POST("/uri?t=mkdir&redirect_to_result=true")
2151 d.addBoth(self.shouldRedirect, None, statuscode='303')
2152 def _check_target(target):
2153 target = urllib.unquote(target)
2154 self.failUnless(target.startswith("uri/URI:DIR2:"), target)
2155 d.addCallback(_check_target)
2158 def test_POST_mkdir_no_parentdir_redirect2(self):
2159 d = self.POST("/uri", t="mkdir", redirect_to_result="true")
2160 d.addBoth(self.shouldRedirect, None, statuscode='303')
2161 def _check_target(target):
2162 target = urllib.unquote(target)
2163 self.failUnless(target.startswith("uri/URI:DIR2:"), target)
2164 d.addCallback(_check_target)
2165 d.addErrback(self.explain_web_error)
2168 def _make_readonly(self, u):
2169 ro_uri = uri.from_string(u).get_readonly()
2172 return ro_uri.to_string()
2174 def _create_initial_children(self):
2175 contents, n, filecap1 = self.makefile(12)
2176 md1 = {"metakey1": "metavalue1"}
2177 filecap2 = make_mutable_file_uri()
2178 node3 = self.s.create_node_from_uri(make_mutable_file_uri())
2179 filecap3 = node3.get_readonly_uri()
2180 node4 = self.s.create_node_from_uri(make_mutable_file_uri())
2181 dircap = DirectoryNode(node4, None, None).get_uri()
2182 litdircap = "URI:DIR2-LIT:ge3dumj2mewdcotyfqydulbshj5x2lbm"
2183 emptydircap = "URI:DIR2-LIT:"
2184 newkids = {u"child-imm": ["filenode", {"rw_uri": filecap1,
2185 "ro_uri": self._make_readonly(filecap1),
2186 "metadata": md1, }],
2187 u"child-mutable": ["filenode", {"rw_uri": filecap2,
2188 "ro_uri": self._make_readonly(filecap2)}],
2189 u"child-mutable-ro": ["filenode", {"ro_uri": filecap3}],
2190 u"unknownchild-rw": ["unknown", {"rw_uri": unknown_rwcap,
2191 "ro_uri": unknown_rocap}],
2192 u"unknownchild-ro": ["unknown", {"ro_uri": unknown_rocap}],
2193 u"unknownchild-imm": ["unknown", {"ro_uri": unknown_immcap}],
2194 u"dirchild": ["dirnode", {"rw_uri": dircap,
2195 "ro_uri": self._make_readonly(dircap)}],
2196 u"dirchild-lit": ["dirnode", {"ro_uri": litdircap}],
2197 u"dirchild-empty": ["dirnode", {"ro_uri": emptydircap}],
2199 return newkids, {'filecap1': filecap1,
2200 'filecap2': filecap2,
2201 'filecap3': filecap3,
2202 'unknown_rwcap': unknown_rwcap,
2203 'unknown_rocap': unknown_rocap,
2204 'unknown_immcap': unknown_immcap,
2206 'litdircap': litdircap,
2207 'emptydircap': emptydircap}
2209 def _create_immutable_children(self):
2210 contents, n, filecap1 = self.makefile(12)
2211 md1 = {"metakey1": "metavalue1"}
2212 tnode = create_chk_filenode("immutable directory contents\n"*10)
2213 dnode = DirectoryNode(tnode, None, None)
2214 assert not dnode.is_mutable()
2215 immdircap = dnode.get_uri()
2216 litdircap = "URI:DIR2-LIT:ge3dumj2mewdcotyfqydulbshj5x2lbm"
2217 emptydircap = "URI:DIR2-LIT:"
2218 newkids = {u"child-imm": ["filenode", {"ro_uri": filecap1,
2219 "metadata": md1, }],
2220 u"unknownchild-imm": ["unknown", {"ro_uri": unknown_immcap}],
2221 u"dirchild-imm": ["dirnode", {"ro_uri": immdircap}],
2222 u"dirchild-lit": ["dirnode", {"ro_uri": litdircap}],
2223 u"dirchild-empty": ["dirnode", {"ro_uri": emptydircap}],
2225 return newkids, {'filecap1': filecap1,
2226 'unknown_immcap': unknown_immcap,
2227 'immdircap': immdircap,
2228 'litdircap': litdircap,
2229 'emptydircap': emptydircap}
2231 def test_POST_mkdir_no_parentdir_initial_children(self):
2232 (newkids, caps) = self._create_initial_children()
2233 d = self.POST2("/uri?t=mkdir-with-children", simplejson.dumps(newkids))
2234 def _after_mkdir(res):
2235 self.failUnless(res.startswith("URI:DIR"), res)
2236 n = self.s.create_node_from_uri(res)
2237 d2 = self.failUnlessNodeKeysAre(n, newkids.keys())
2238 d2.addCallback(lambda ign:
2239 self.failUnlessROChildURIIs(n, u"child-imm",
2241 d2.addCallback(lambda ign:
2242 self.failUnlessRWChildURIIs(n, u"child-mutable",
2244 d2.addCallback(lambda ign:
2245 self.failUnlessROChildURIIs(n, u"child-mutable-ro",
2247 d2.addCallback(lambda ign:
2248 self.failUnlessRWChildURIIs(n, u"unknownchild-rw",
2249 caps['unknown_rwcap']))
2250 d2.addCallback(lambda ign:
2251 self.failUnlessROChildURIIs(n, u"unknownchild-ro",
2252 caps['unknown_rocap']))
2253 d2.addCallback(lambda ign:
2254 self.failUnlessROChildURIIs(n, u"unknownchild-imm",
2255 caps['unknown_immcap']))
2256 d2.addCallback(lambda ign:
2257 self.failUnlessRWChildURIIs(n, u"dirchild",
2260 d.addCallback(_after_mkdir)
2263 def test_POST_mkdir_no_parentdir_unexpected_children(self):
2264 # the regular /uri?t=mkdir operation is specified to ignore its body.
2265 # Only t=mkdir-with-children pays attention to it.
2266 (newkids, caps) = self._create_initial_children()
2267 d = self.shouldHTTPError("POST t=mkdir unexpected children",
2269 "t=mkdir does not accept children=, "
2270 "try t=mkdir-with-children instead",
2271 self.POST2, "/uri?t=mkdir", # without children
2272 simplejson.dumps(newkids))
2275 def test_POST_noparent_bad(self):
2276 d = self.shouldHTTPError("POST /uri?t=bogus", 400, "Bad Request",
2277 "/uri accepts only PUT, PUT?t=mkdir, "
2278 "POST?t=upload, and POST?t=mkdir",
2279 self.POST, "/uri?t=bogus")
2282 def test_POST_mkdir_no_parentdir_immutable(self):
2283 (newkids, caps) = self._create_immutable_children()
2284 d = self.POST2("/uri?t=mkdir-immutable", simplejson.dumps(newkids))
2285 def _after_mkdir(res):
2286 self.failUnless(res.startswith("URI:DIR"), res)
2287 n = self.s.create_node_from_uri(res)
2288 d2 = self.failUnlessNodeKeysAre(n, newkids.keys())
2289 d2.addCallback(lambda ign:
2290 self.failUnlessROChildURIIs(n, u"child-imm",
2292 d2.addCallback(lambda ign:
2293 self.failUnlessROChildURIIs(n, u"unknownchild-imm",
2294 caps['unknown_immcap']))
2295 d2.addCallback(lambda ign:
2296 self.failUnlessROChildURIIs(n, u"dirchild-imm",
2298 d2.addCallback(lambda ign:
2299 self.failUnlessROChildURIIs(n, u"dirchild-lit",
2301 d2.addCallback(lambda ign:
2302 self.failUnlessROChildURIIs(n, u"dirchild-empty",
2303 caps['emptydircap']))
2305 d.addCallback(_after_mkdir)
2308 def test_POST_mkdir_no_parentdir_immutable_bad(self):
2309 (newkids, caps) = self._create_initial_children()
2310 d = self.shouldFail2(error.Error,
2311 "test_POST_mkdir_no_parentdir_immutable_bad",
2313 "needed to be immutable but was not",
2315 "/uri?t=mkdir-immutable",
2316 simplejson.dumps(newkids))
2319 def test_welcome_page_mkdir_button(self):
2320 # Fetch the welcome page.
2322 def _after_get_welcome_page(res):
2323 MKDIR_BUTTON_RE = re.compile(
2324 '<form action="([^"]*)" method="post".*?'
2325 '<input type="hidden" name="t" value="([^"]*)" />'
2326 '<input type="hidden" name="([^"]*)" value="([^"]*)" />'
2327 '<input type="submit" value="Create a directory" />',
2329 mo = MKDIR_BUTTON_RE.search(res)
2330 formaction = mo.group(1)
2332 formaname = mo.group(3)
2333 formavalue = mo.group(4)
2334 return (formaction, formt, formaname, formavalue)
2335 d.addCallback(_after_get_welcome_page)
2336 def _after_parse_form(res):
2337 (formaction, formt, formaname, formavalue) = res
2338 return self.POST("/%s?t=%s&%s=%s" % (formaction, formt, formaname, formavalue))
2339 d.addCallback(_after_parse_form)
2340 d.addBoth(self.shouldRedirect, None, statuscode='303')
2343 def test_POST_mkdir_replace(self): # return value?
2344 d = self.POST(self.public_url + "/foo", t="mkdir", name="sub")
2345 d.addCallback(lambda res: self._foo_node.get(u"sub"))
2346 d.addCallback(self.failUnlessNodeKeysAre, [])
2349 def test_POST_mkdir_no_replace_queryarg(self): # return value?
2350 d = self.POST(self.public_url + "/foo?replace=false", t="mkdir", name="sub")
2351 d.addBoth(self.shouldFail, error.Error,
2352 "POST_mkdir_no_replace_queryarg",
2354 "There was already a child by that name, and you asked me "
2355 "to not replace it")
2356 d.addCallback(lambda res: self._foo_node.get(u"sub"))
2357 d.addCallback(self.failUnlessNodeKeysAre, [u"baz.txt"])
2360 def test_POST_mkdir_no_replace_field(self): # return value?
2361 d = self.POST(self.public_url + "/foo", t="mkdir", name="sub",
2363 d.addBoth(self.shouldFail, error.Error, "POST_mkdir_no_replace_field",
2365 "There was already a child by that name, and you asked me "
2366 "to not replace it")
2367 d.addCallback(lambda res: self._foo_node.get(u"sub"))
2368 d.addCallback(self.failUnlessNodeKeysAre, [u"baz.txt"])
2371 def test_POST_mkdir_whendone_field(self):
2372 d = self.POST(self.public_url + "/foo",
2373 t="mkdir", name="newdir", when_done="/THERE")
2374 d.addBoth(self.shouldRedirect, "/THERE")
2375 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
2376 d.addCallback(self.failUnlessNodeKeysAre, [])
2379 def test_POST_mkdir_whendone_queryarg(self):
2380 d = self.POST(self.public_url + "/foo?when_done=/THERE",
2381 t="mkdir", name="newdir")
2382 d.addBoth(self.shouldRedirect, "/THERE")
2383 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
2384 d.addCallback(self.failUnlessNodeKeysAre, [])
2387 def test_POST_bad_t(self):
2388 d = self.shouldFail2(error.Error, "POST_bad_t", "400 Bad Request",
2389 "POST to a directory with bad t=BOGUS",
2390 self.POST, self.public_url + "/foo", t="BOGUS")
2393 def test_POST_set_children(self, command_name="set_children"):
2394 contents9, n9, newuri9 = self.makefile(9)
2395 contents10, n10, newuri10 = self.makefile(10)
2396 contents11, n11, newuri11 = self.makefile(11)
2399 "atomic_added_1": [ "filenode", { "rw_uri": "%s",
2402 "ctime": 1002777696.7564139,
2403 "mtime": 1002777696.7564139
2406 "atomic_added_2": [ "filenode", { "rw_uri": "%s",
2409 "ctime": 1002777696.7564139,
2410 "mtime": 1002777696.7564139
2413 "atomic_added_3": [ "filenode", { "rw_uri": "%s",
2416 "ctime": 1002777696.7564139,
2417 "mtime": 1002777696.7564139
2420 }""" % (newuri9, newuri10, newuri11)
2422 url = self.webish_url + self.public_url + "/foo" + "?t=" + command_name
2424 d = client.getPage(url, method="POST", postdata=reqbody)
2426 self.failUnlessURIMatchesROChild(newuri9, self._foo_node, u"atomic_added_1")
2427 self.failUnlessURIMatchesROChild(newuri10, self._foo_node, u"atomic_added_2")
2428 self.failUnlessURIMatchesROChild(newuri11, self._foo_node, u"atomic_added_3")
2430 d.addCallback(_then)
2431 d.addErrback(self.dump_error)
2434 def test_POST_set_children_with_hyphen(self):
2435 return self.test_POST_set_children(command_name="set-children")
2437 def test_POST_link_uri(self):
2438 contents, n, newuri = self.makefile(8)
2439 d = self.POST(self.public_url + "/foo", t="uri", name="new.txt", uri=newuri)
2440 d.addCallback(self.failUnlessURIMatchesROChild, self._foo_node, u"new.txt")
2441 d.addCallback(lambda res:
2442 self.failUnlessChildContentsAre(self._foo_node, u"new.txt",
2446 def test_POST_link_uri_replace(self):
2447 contents, n, newuri = self.makefile(8)
2448 d = self.POST(self.public_url + "/foo", t="uri", name="bar.txt", uri=newuri)
2449 d.addCallback(self.failUnlessURIMatchesROChild, self._foo_node, u"bar.txt")
2450 d.addCallback(lambda res:
2451 self.failUnlessChildContentsAre(self._foo_node, u"bar.txt",
2455 def test_POST_link_uri_unknown_bad(self):
2456 d = self.POST(self.public_url + "/foo", t="uri", name="future.txt", uri=unknown_rwcap)
2457 d.addBoth(self.shouldFail, error.Error,
2458 "POST_link_uri_unknown_bad",
2460 "unknown cap in a write slot")
2463 def test_POST_link_uri_unknown_ro_good(self):
2464 d = self.POST(self.public_url + "/foo", t="uri", name="future-ro.txt", uri=unknown_rocap)
2465 d.addCallback(self.failUnlessURIMatchesROChild, self._foo_node, u"future-ro.txt")
2468 def test_POST_link_uri_unknown_imm_good(self):
2469 d = self.POST(self.public_url + "/foo", t="uri", name="future-imm.txt", uri=unknown_immcap)
2470 d.addCallback(self.failUnlessURIMatchesROChild, self._foo_node, u"future-imm.txt")
2473 def test_POST_link_uri_no_replace_queryarg(self):
2474 contents, n, newuri = self.makefile(8)
2475 d = self.POST(self.public_url + "/foo?replace=false", t="uri",
2476 name="bar.txt", uri=newuri)
2477 d.addBoth(self.shouldFail, error.Error,
2478 "POST_link_uri_no_replace_queryarg",
2480 "There was already a child by that name, and you asked me "
2481 "to not replace it")
2482 d.addCallback(lambda res: self.GET(self.public_url + "/foo/bar.txt"))
2483 d.addCallback(self.failUnlessIsBarDotTxt)
2486 def test_POST_link_uri_no_replace_field(self):
2487 contents, n, newuri = self.makefile(8)
2488 d = self.POST(self.public_url + "/foo", t="uri", replace="false",
2489 name="bar.txt", uri=newuri)
2490 d.addBoth(self.shouldFail, error.Error,
2491 "POST_link_uri_no_replace_field",
2493 "There was already a child by that name, and you asked me "
2494 "to not replace it")
2495 d.addCallback(lambda res: self.GET(self.public_url + "/foo/bar.txt"))
2496 d.addCallback(self.failUnlessIsBarDotTxt)
2499 def test_POST_delete(self):
2500 d = self.POST(self.public_url + "/foo", t="delete", name="bar.txt")
2501 d.addCallback(lambda res: self._foo_node.list())
2502 def _check(children):
2503 self.failIf(u"bar.txt" in children)
2504 d.addCallback(_check)
2507 def test_POST_rename_file(self):
2508 d = self.POST(self.public_url + "/foo", t="rename",
2509 from_name="bar.txt", to_name='wibble.txt')
2510 d.addCallback(lambda res:
2511 self.failIfNodeHasChild(self._foo_node, u"bar.txt"))
2512 d.addCallback(lambda res:
2513 self.failUnlessNodeHasChild(self._foo_node, u"wibble.txt"))
2514 d.addCallback(lambda res: self.GET(self.public_url + "/foo/wibble.txt"))
2515 d.addCallback(self.failUnlessIsBarDotTxt)
2516 d.addCallback(lambda res: self.GET(self.public_url + "/foo/wibble.txt?t=json"))
2517 d.addCallback(self.failUnlessIsBarJSON)
2520 def test_POST_rename_file_redundant(self):
2521 d = self.POST(self.public_url + "/foo", t="rename",
2522 from_name="bar.txt", to_name='bar.txt')
2523 d.addCallback(lambda res:
2524 self.failUnlessNodeHasChild(self._foo_node, u"bar.txt"))
2525 d.addCallback(lambda res: self.GET(self.public_url + "/foo/bar.txt"))
2526 d.addCallback(self.failUnlessIsBarDotTxt)
2527 d.addCallback(lambda res: self.GET(self.public_url + "/foo/bar.txt?t=json"))
2528 d.addCallback(self.failUnlessIsBarJSON)
2531 def test_POST_rename_file_replace(self):
2532 # rename a file and replace a directory with it
2533 d = self.POST(self.public_url + "/foo", t="rename",
2534 from_name="bar.txt", to_name='empty')
2535 d.addCallback(lambda res:
2536 self.failIfNodeHasChild(self._foo_node, u"bar.txt"))
2537 d.addCallback(lambda res:
2538 self.failUnlessNodeHasChild(self._foo_node, u"empty"))
2539 d.addCallback(lambda res: self.GET(self.public_url + "/foo/empty"))
2540 d.addCallback(self.failUnlessIsBarDotTxt)
2541 d.addCallback(lambda res: self.GET(self.public_url + "/foo/empty?t=json"))
2542 d.addCallback(self.failUnlessIsBarJSON)
2545 def test_POST_rename_file_no_replace_queryarg(self):
2546 # rename a file and replace a directory with it
2547 d = self.POST(self.public_url + "/foo?replace=false", t="rename",
2548 from_name="bar.txt", to_name='empty')
2549 d.addBoth(self.shouldFail, error.Error,
2550 "POST_rename_file_no_replace_queryarg",
2552 "There was already a child by that name, and you asked me "
2553 "to not replace it")
2554 d.addCallback(lambda res: self.GET(self.public_url + "/foo/empty?t=json"))
2555 d.addCallback(self.failUnlessIsEmptyJSON)
2558 def test_POST_rename_file_no_replace_field(self):
2559 # rename a file and replace a directory with it
2560 d = self.POST(self.public_url + "/foo", t="rename", replace="false",
2561 from_name="bar.txt", to_name='empty')
2562 d.addBoth(self.shouldFail, error.Error,
2563 "POST_rename_file_no_replace_field",
2565 "There was already a child by that name, and you asked me "
2566 "to not replace it")
2567 d.addCallback(lambda res: self.GET(self.public_url + "/foo/empty?t=json"))
2568 d.addCallback(self.failUnlessIsEmptyJSON)
2571 def failUnlessIsEmptyJSON(self, res):
2572 data = simplejson.loads(res)
2573 self.failUnlessEqual(data[0], "dirnode", data)
2574 self.failUnlessEqual(len(data[1]["children"]), 0)
2576 def test_POST_rename_file_slash_fail(self):
2577 d = self.POST(self.public_url + "/foo", t="rename",
2578 from_name="bar.txt", to_name='kirk/spock.txt')
2579 d.addBoth(self.shouldFail, error.Error,
2580 "test_POST_rename_file_slash_fail",
2582 "to_name= may not contain a slash",
2584 d.addCallback(lambda res:
2585 self.failUnlessNodeHasChild(self._foo_node, u"bar.txt"))
2588 def test_POST_rename_dir(self):
2589 d = self.POST(self.public_url, t="rename",
2590 from_name="foo", to_name='plunk')
2591 d.addCallback(lambda res:
2592 self.failIfNodeHasChild(self.public_root, u"foo"))
2593 d.addCallback(lambda res:
2594 self.failUnlessNodeHasChild(self.public_root, u"plunk"))
2595 d.addCallback(lambda res: self.GET(self.public_url + "/plunk?t=json"))
2596 d.addCallback(self.failUnlessIsFooJSON)
2599 def shouldRedirect(self, res, target=None, statuscode=None, which=""):
2600 """ If target is not None then the redirection has to go to target. If
2601 statuscode is not None then the redirection has to be accomplished with
2602 that HTTP status code."""
2603 if not isinstance(res, failure.Failure):
2604 to_where = (target is None) and "somewhere" or ("to " + target)
2605 self.fail("%s: we were expecting to get redirected %s, not get an"
2606 " actual page: %s" % (which, to_where, res))
2607 res.trap(error.PageRedirect)
2608 if statuscode is not None:
2609 self.failUnlessEqual(res.value.status, statuscode,
2610 "%s: not a redirect" % which)
2611 if target is not None:
2612 # the PageRedirect does not seem to capture the uri= query arg
2613 # properly, so we can't check for it.
2614 realtarget = self.webish_url + target
2615 self.failUnlessEqual(res.value.location, realtarget,
2616 "%s: wrong target" % which)
2617 return res.value.location
2619 def test_GET_URI_form(self):
2620 base = "/uri?uri=%s" % self._bar_txt_uri
2621 # this is supposed to give us a redirect to /uri/$URI, plus arguments
2622 targetbase = "/uri/%s" % urllib.quote(self._bar_txt_uri)
2624 d.addBoth(self.shouldRedirect, targetbase)
2625 d.addCallback(lambda res: self.GET(base+"&filename=bar.txt"))
2626 d.addBoth(self.shouldRedirect, targetbase+"?filename=bar.txt")
2627 d.addCallback(lambda res: self.GET(base+"&t=json"))
2628 d.addBoth(self.shouldRedirect, targetbase+"?t=json")
2629 d.addCallback(self.log, "about to get file by uri")
2630 d.addCallback(lambda res: self.GET(base, followRedirect=True))
2631 d.addCallback(self.failUnlessIsBarDotTxt)
2632 d.addCallback(self.log, "got file by uri, about to get dir by uri")
2633 d.addCallback(lambda res: self.GET("/uri?uri=%s&t=json" % self._foo_uri,
2634 followRedirect=True))
2635 d.addCallback(self.failUnlessIsFooJSON)
2636 d.addCallback(self.log, "got dir by uri")
2640 def test_GET_URI_form_bad(self):
2641 d = self.shouldFail2(error.Error, "test_GET_URI_form_bad",
2642 "400 Bad Request", "GET /uri requires uri=",
2646 def test_GET_rename_form(self):
2647 d = self.GET(self.public_url + "/foo?t=rename-form&name=bar.txt",
2648 followRedirect=True)
2650 self.failUnless('name="when_done" value="."' in res, res)
2651 self.failUnless(re.search(r'name="from_name" value="bar\.txt"', res))
2652 d.addCallback(_check)
2655 def log(self, res, msg):
2656 #print "MSG: %s RES: %s" % (msg, res)
2660 def test_GET_URI_URL(self):
2661 base = "/uri/%s" % self._bar_txt_uri
2663 d.addCallback(self.failUnlessIsBarDotTxt)
2664 d.addCallback(lambda res: self.GET(base+"?filename=bar.txt"))
2665 d.addCallback(self.failUnlessIsBarDotTxt)
2666 d.addCallback(lambda res: self.GET(base+"?filename=bar.txt&save=true"))
2667 d.addCallback(self.failUnlessIsBarDotTxt)
2670 def test_GET_URI_URL_dir(self):
2671 base = "/uri/%s?t=json" % self._foo_uri
2673 d.addCallback(self.failUnlessIsFooJSON)
2676 def test_GET_URI_URL_missing(self):
2677 base = "/uri/%s" % self._bad_file_uri
2678 d = self.shouldHTTPError("test_GET_URI_URL_missing",
2679 http.GONE, None, "NotEnoughSharesError",
2681 # TODO: how can we exercise both sides of WebDownloadTarget.fail
2682 # here? we must arrange for a download to fail after target.open()
2683 # has been called, and then inspect the response to see that it is
2684 # shorter than we expected.
2687 def test_PUT_DIRURL_uri(self):
2688 d = self.s.create_dirnode()
2690 new_uri = dn.get_uri()
2691 # replace /foo with a new (empty) directory
2692 d = self.PUT(self.public_url + "/foo?t=uri", new_uri)
2693 d.addCallback(lambda res:
2694 self.failUnlessEqual(res.strip(), new_uri))
2695 d.addCallback(lambda res:
2696 self.failUnlessRWChildURIIs(self.public_root,
2700 d.addCallback(_made_dir)
2703 def test_PUT_DIRURL_uri_noreplace(self):
2704 d = self.s.create_dirnode()
2706 new_uri = dn.get_uri()
2707 # replace /foo with a new (empty) directory, but ask that
2708 # replace=false, so it should fail
2709 d = self.shouldFail2(error.Error, "test_PUT_DIRURL_uri_noreplace",
2710 "409 Conflict", "There was already a child by that name, and you asked me to not replace it",
2712 self.public_url + "/foo?t=uri&replace=false",
2714 d.addCallback(lambda res:
2715 self.failUnlessRWChildURIIs(self.public_root,
2719 d.addCallback(_made_dir)
2722 def test_PUT_DIRURL_bad_t(self):
2723 d = self.shouldFail2(error.Error, "test_PUT_DIRURL_bad_t",
2724 "400 Bad Request", "PUT to a directory",
2725 self.PUT, self.public_url + "/foo?t=BOGUS", "")
2726 d.addCallback(lambda res:
2727 self.failUnlessRWChildURIIs(self.public_root,
2732 def test_PUT_NEWFILEURL_uri(self):
2733 contents, n, new_uri = self.makefile(8)
2734 d = self.PUT(self.public_url + "/foo/new.txt?t=uri", new_uri)
2735 d.addCallback(lambda res: self.failUnlessEqual(res.strip(), new_uri))
2736 d.addCallback(lambda res:
2737 self.failUnlessChildContentsAre(self._foo_node, u"new.txt",
2741 def test_PUT_NEWFILEURL_uri_replace(self):
2742 contents, n, new_uri = self.makefile(8)
2743 d = self.PUT(self.public_url + "/foo/bar.txt?t=uri", new_uri)
2744 d.addCallback(lambda res: self.failUnlessEqual(res.strip(), new_uri))
2745 d.addCallback(lambda res:
2746 self.failUnlessChildContentsAre(self._foo_node, u"bar.txt",
2750 def test_PUT_NEWFILEURL_uri_no_replace(self):
2751 contents, n, new_uri = self.makefile(8)
2752 d = self.PUT(self.public_url + "/foo/bar.txt?t=uri&replace=false", new_uri)
2753 d.addBoth(self.shouldFail, error.Error, "PUT_NEWFILEURL_uri_no_replace",
2755 "There was already a child by that name, and you asked me "
2756 "to not replace it")
2759 def test_PUT_NEWFILEURL_uri_unknown_bad(self):
2760 d = self.PUT(self.public_url + "/foo/put-future.txt?t=uri", unknown_rwcap)
2761 d.addBoth(self.shouldFail, error.Error,
2762 "POST_put_uri_unknown_bad",
2764 "unknown cap in a write slot")
2767 def test_PUT_NEWFILEURL_uri_unknown_ro_good(self):
2768 d = self.PUT(self.public_url + "/foo/put-future-ro.txt?t=uri", unknown_rocap)
2769 d.addCallback(self.failUnlessURIMatchesROChild, self._foo_node,
2770 u"put-future-ro.txt")
2773 def test_PUT_NEWFILEURL_uri_unknown_imm_good(self):
2774 d = self.PUT(self.public_url + "/foo/put-future-imm.txt?t=uri", unknown_immcap)
2775 d.addCallback(self.failUnlessURIMatchesROChild, self._foo_node,
2776 u"put-future-imm.txt")
2779 def test_PUT_NEWFILE_URI(self):
2780 file_contents = "New file contents here\n"
2781 d = self.PUT("/uri", file_contents)
2783 assert isinstance(uri, str), uri
2784 self.failUnless(uri in FakeCHKFileNode.all_contents)
2785 self.failUnlessEqual(FakeCHKFileNode.all_contents[uri],
2787 return self.GET("/uri/%s" % uri)
2788 d.addCallback(_check)
2790 self.failUnlessEqual(res, file_contents)
2791 d.addCallback(_check2)
2794 def test_PUT_NEWFILE_URI_not_mutable(self):
2795 file_contents = "New file contents here\n"
2796 d = self.PUT("/uri?mutable=false", file_contents)
2798 assert isinstance(uri, str), uri
2799 self.failUnless(uri in FakeCHKFileNode.all_contents)
2800 self.failUnlessEqual(FakeCHKFileNode.all_contents[uri],
2802 return self.GET("/uri/%s" % uri)
2803 d.addCallback(_check)
2805 self.failUnlessEqual(res, file_contents)
2806 d.addCallback(_check2)
2809 def test_PUT_NEWFILE_URI_only_PUT(self):
2810 d = self.PUT("/uri?t=bogus", "")
2811 d.addBoth(self.shouldFail, error.Error,
2812 "PUT_NEWFILE_URI_only_PUT",
2814 "/uri accepts only PUT, PUT?t=mkdir, POST?t=upload, and POST?t=mkdir")
2817 def test_PUT_NEWFILE_URI_mutable(self):
2818 file_contents = "New file contents here\n"
2819 d = self.PUT("/uri?mutable=true", file_contents)
2820 def _check1(filecap):
2821 filecap = filecap.strip()
2822 self.failUnless(filecap.startswith("URI:SSK:"), filecap)
2823 self.filecap = filecap
2824 u = uri.WriteableSSKFileURI.init_from_string(filecap)
2825 self.failUnless(u.get_storage_index() in FakeMutableFileNode.all_contents)
2826 n = self.s.create_node_from_uri(filecap)
2827 return n.download_best_version()
2828 d.addCallback(_check1)
2830 self.failUnlessEqual(data, file_contents)
2831 return self.GET("/uri/%s" % urllib.quote(self.filecap))
2832 d.addCallback(_check2)
2834 self.failUnlessEqual(res, file_contents)
2835 d.addCallback(_check3)
2838 def test_PUT_mkdir(self):
2839 d = self.PUT("/uri?t=mkdir", "")
2841 n = self.s.create_node_from_uri(uri.strip())
2842 d2 = self.failUnlessNodeKeysAre(n, [])
2843 d2.addCallback(lambda res:
2844 self.GET("/uri/%s?t=json" % uri))
2846 d.addCallback(_check)
2847 d.addCallback(self.failUnlessIsEmptyJSON)
2850 def test_POST_check(self):
2851 d = self.POST(self.public_url + "/foo", t="check", name="bar.txt")
2853 # this returns a string form of the results, which are probably
2854 # None since we're using fake filenodes.
2855 # TODO: verify that the check actually happened, by changing
2856 # FakeCHKFileNode to count how many times .check() has been
2859 d.addCallback(_done)
2862 def test_bad_method(self):
2863 url = self.webish_url + self.public_url + "/foo/bar.txt"
2864 d = self.shouldHTTPError("test_bad_method",
2865 501, "Not Implemented",
2866 "I don't know how to treat a BOGUS request.",
2867 client.getPage, url, method="BOGUS")
2870 def test_short_url(self):
2871 url = self.webish_url + "/uri"
2872 d = self.shouldHTTPError("test_short_url", 501, "Not Implemented",
2873 "I don't know how to treat a DELETE request.",
2874 client.getPage, url, method="DELETE")
2877 def test_ophandle_bad(self):
2878 url = self.webish_url + "/operations/bogus?t=status"
2879 d = self.shouldHTTPError("test_ophandle_bad", 404, "404 Not Found",
2880 "unknown/expired handle 'bogus'",
2881 client.getPage, url)
2884 def test_ophandle_cancel(self):
2885 d = self.POST(self.public_url + "/foo/?t=start-manifest&ophandle=128",
2886 followRedirect=True)
2887 d.addCallback(lambda ignored:
2888 self.GET("/operations/128?t=status&output=JSON"))
2890 data = simplejson.loads(res)
2891 self.failUnless("finished" in data, res)
2892 monitor = self.ws.root.child_operations.handles["128"][0]
2893 d = self.POST("/operations/128?t=cancel&output=JSON")
2895 data = simplejson.loads(res)
2896 self.failUnless("finished" in data, res)
2897 # t=cancel causes the handle to be forgotten
2898 self.failUnless(monitor.is_cancelled())
2899 d.addCallback(_check2)
2901 d.addCallback(_check1)
2902 d.addCallback(lambda ignored:
2903 self.shouldHTTPError("test_ophandle_cancel",
2904 404, "404 Not Found",
2905 "unknown/expired handle '128'",
2907 "/operations/128?t=status&output=JSON"))
2910 def test_ophandle_retainfor(self):
2911 d = self.POST(self.public_url + "/foo/?t=start-manifest&ophandle=129&retain-for=60",
2912 followRedirect=True)
2913 d.addCallback(lambda ignored:
2914 self.GET("/operations/129?t=status&output=JSON&retain-for=0"))
2916 data = simplejson.loads(res)
2917 self.failUnless("finished" in data, res)
2918 d.addCallback(_check1)
2919 # the retain-for=0 will cause the handle to be expired very soon
2920 d.addCallback(lambda ign:
2921 self.clock.advance(2.0))
2922 d.addCallback(lambda ignored:
2923 self.shouldHTTPError("test_ophandle_retainfor",
2924 404, "404 Not Found",
2925 "unknown/expired handle '129'",
2927 "/operations/129?t=status&output=JSON"))
2930 def test_ophandle_release_after_complete(self):
2931 d = self.POST(self.public_url + "/foo/?t=start-manifest&ophandle=130",
2932 followRedirect=True)
2933 d.addCallback(self.wait_for_operation, "130")
2934 d.addCallback(lambda ignored:
2935 self.GET("/operations/130?t=status&output=JSON&release-after-complete=true"))
2936 # the release-after-complete=true will cause the handle to be expired
2937 d.addCallback(lambda ignored:
2938 self.shouldHTTPError("test_ophandle_release_after_complete",
2939 404, "404 Not Found",
2940 "unknown/expired handle '130'",
2942 "/operations/130?t=status&output=JSON"))
2945 def test_uncollected_ophandle_expiration(self):
2946 # uncollected ophandles should expire after 4 days
2947 def _make_uncollected_ophandle(ophandle):
2948 d = self.POST(self.public_url +
2949 "/foo/?t=start-manifest&ophandle=%d" % ophandle,
2950 followRedirect=False)
2951 # When we start the operation, the webapi server will want
2952 # to redirect us to the page for the ophandle, so we get
2953 # confirmation that the operation has started. If the
2954 # manifest operation has finished by the time we get there,
2955 # following that redirect (by setting followRedirect=True
2956 # above) has the side effect of collecting the ophandle that
2957 # we've just created, which means that we can't use the
2958 # ophandle to test the uncollected timeout anymore. So,
2959 # instead, catch the 302 here and don't follow it.
2960 d.addBoth(self.should302, "uncollected_ophandle_creation")
2962 # Create an ophandle, don't collect it, then advance the clock by
2963 # 4 days - 1 second and make sure that the ophandle is still there.
2964 d = _make_uncollected_ophandle(131)
2965 d.addCallback(lambda ign:
2966 self.clock.advance((96*60*60) - 1)) # 96 hours = 4 days
2967 d.addCallback(lambda ign:
2968 self.GET("/operations/131?t=status&output=JSON"))
2970 data = simplejson.loads(res)
2971 self.failUnless("finished" in data, res)
2972 d.addCallback(_check1)
2973 # Create an ophandle, don't collect it, then try to collect it
2974 # after 4 days. It should be gone.
2975 d.addCallback(lambda ign:
2976 _make_uncollected_ophandle(132))
2977 d.addCallback(lambda ign:
2978 self.clock.advance(96*60*60))
2979 d.addCallback(lambda ign:
2980 self.shouldHTTPError("test_uncollected_ophandle_expired_after_100_hours",
2981 404, "404 Not Found",
2982 "unknown/expired handle '132'",
2984 "/operations/132?t=status&output=JSON"))
2987 def test_collected_ophandle_expiration(self):
2988 # collected ophandles should expire after 1 day
2989 def _make_collected_ophandle(ophandle):
2990 d = self.POST(self.public_url +
2991 "/foo/?t=start-manifest&ophandle=%d" % ophandle,
2992 followRedirect=True)
2993 # By following the initial redirect, we collect the ophandle
2994 # we've just created.
2996 # Create a collected ophandle, then collect it after 23 hours
2997 # and 59 seconds to make sure that it is still there.
2998 d = _make_collected_ophandle(133)
2999 d.addCallback(lambda ign:
3000 self.clock.advance((24*60*60) - 1))
3001 d.addCallback(lambda ign:
3002 self.GET("/operations/133?t=status&output=JSON"))
3004 data = simplejson.loads(res)
3005 self.failUnless("finished" in data, res)
3006 d.addCallback(_check1)
3007 # Create another uncollected ophandle, then try to collect it
3008 # after 24 hours to make sure that it is gone.
3009 d.addCallback(lambda ign:
3010 _make_collected_ophandle(134))
3011 d.addCallback(lambda ign:
3012 self.clock.advance(24*60*60))
3013 d.addCallback(lambda ign:
3014 self.shouldHTTPError("test_collected_ophandle_expired_after_1000_minutes",
3015 404, "404 Not Found",
3016 "unknown/expired handle '134'",
3018 "/operations/134?t=status&output=JSON"))
3021 def test_incident(self):
3022 d = self.POST("/report_incident", details="eek")
3024 self.failUnless("Thank you for your report!" in res, res)
3025 d.addCallback(_done)
3028 def test_static(self):
3029 webdir = os.path.join(self.staticdir, "subdir")
3030 fileutil.make_dirs(webdir)
3031 f = open(os.path.join(webdir, "hello.txt"), "wb")
3035 d = self.GET("/static/subdir/hello.txt")
3037 self.failUnlessEqual(res, "hello")
3038 d.addCallback(_check)
3042 class Util(unittest.TestCase, ShouldFailMixin):
3043 def test_load_file(self):
3044 # This will raise an exception unless a well-formed XML file is found under that name.
3045 common.getxmlfile('directory.xhtml').load()
3047 def test_parse_replace_arg(self):
3048 self.failUnlessEqual(common.parse_replace_arg("true"), True)
3049 self.failUnlessEqual(common.parse_replace_arg("false"), False)
3050 self.failUnlessEqual(common.parse_replace_arg("only-files"),
3052 self.shouldFail(AssertionError, "test_parse_replace_arg", "",
3053 common.parse_replace_arg, "only_fles")
3055 def test_abbreviate_time(self):
3056 self.failUnlessEqual(common.abbreviate_time(None), "")
3057 self.failUnlessEqual(common.abbreviate_time(1.234), "1.23s")
3058 self.failUnlessEqual(common.abbreviate_time(0.123), "123ms")
3059 self.failUnlessEqual(common.abbreviate_time(0.00123), "1.2ms")
3060 self.failUnlessEqual(common.abbreviate_time(0.000123), "123us")
3062 def test_abbreviate_rate(self):
3063 self.failUnlessEqual(common.abbreviate_rate(None), "")
3064 self.failUnlessEqual(common.abbreviate_rate(1234000), "1.23MBps")
3065 self.failUnlessEqual(common.abbreviate_rate(12340), "12.3kBps")
3066 self.failUnlessEqual(common.abbreviate_rate(123), "123Bps")
3068 def test_abbreviate_size(self):
3069 self.failUnlessEqual(common.abbreviate_size(None), "")
3070 self.failUnlessEqual(common.abbreviate_size(1.23*1000*1000*1000), "1.23GB")
3071 self.failUnlessEqual(common.abbreviate_size(1.23*1000*1000), "1.23MB")
3072 self.failUnlessEqual(common.abbreviate_size(1230), "1.2kB")
3073 self.failUnlessEqual(common.abbreviate_size(123), "123B")
3075 def test_plural(self):
3077 return "%d second%s" % (s, status.plural(s))
3078 self.failUnlessEqual(convert(0), "0 seconds")
3079 self.failUnlessEqual(convert(1), "1 second")
3080 self.failUnlessEqual(convert(2), "2 seconds")
3082 return "has share%s: %s" % (status.plural(s), ",".join(s))
3083 self.failUnlessEqual(convert2([]), "has shares: ")
3084 self.failUnlessEqual(convert2(["1"]), "has share: 1")
3085 self.failUnlessEqual(convert2(["1","2"]), "has shares: 1,2")
3088 class Grid(GridTestMixin, WebErrorMixin, unittest.TestCase, ShouldFailMixin):
3090 def CHECK(self, ign, which, args, clientnum=0):
3091 fileurl = self.fileurls[which]
3092 url = fileurl + "?" + args
3093 return self.GET(url, method="POST", clientnum=clientnum)
3095 def test_filecheck(self):
3096 self.basedir = "web/Grid/filecheck"
3098 c0 = self.g.clients[0]
3101 d = c0.upload(upload.Data(DATA, convergence=""))
3102 def _stash_uri(ur, which):
3103 self.uris[which] = ur.uri
3104 d.addCallback(_stash_uri, "good")
3105 d.addCallback(lambda ign:
3106 c0.upload(upload.Data(DATA+"1", convergence="")))
3107 d.addCallback(_stash_uri, "sick")
3108 d.addCallback(lambda ign:
3109 c0.upload(upload.Data(DATA+"2", convergence="")))
3110 d.addCallback(_stash_uri, "dead")
3111 def _stash_mutable_uri(n, which):
3112 self.uris[which] = n.get_uri()
3113 assert isinstance(self.uris[which], str)
3114 d.addCallback(lambda ign: c0.create_mutable_file(DATA+"3"))
3115 d.addCallback(_stash_mutable_uri, "corrupt")
3116 d.addCallback(lambda ign:
3117 c0.upload(upload.Data("literal", convergence="")))
3118 d.addCallback(_stash_uri, "small")
3119 d.addCallback(lambda ign: c0.create_immutable_dirnode({}))
3120 d.addCallback(_stash_mutable_uri, "smalldir")
3122 def _compute_fileurls(ignored):
3124 for which in self.uris:
3125 self.fileurls[which] = "uri/" + urllib.quote(self.uris[which])
3126 d.addCallback(_compute_fileurls)
3128 def _clobber_shares(ignored):
3129 good_shares = self.find_shares(self.uris["good"])
3130 self.failUnlessEqual(len(good_shares), 10)
3131 sick_shares = self.find_shares(self.uris["sick"])
3132 os.unlink(sick_shares[0][2])
3133 dead_shares = self.find_shares(self.uris["dead"])
3134 for i in range(1, 10):
3135 os.unlink(dead_shares[i][2])
3136 c_shares = self.find_shares(self.uris["corrupt"])
3137 cso = CorruptShareOptions()
3138 cso.stdout = StringIO()
3139 cso.parseOptions([c_shares[0][2]])
3141 d.addCallback(_clobber_shares)
3143 d.addCallback(self.CHECK, "good", "t=check")
3144 def _got_html_good(res):
3145 self.failUnless("Healthy" in res, res)
3146 self.failIf("Not Healthy" in res, res)
3147 d.addCallback(_got_html_good)
3148 d.addCallback(self.CHECK, "good", "t=check&return_to=somewhere")
3149 def _got_html_good_return_to(res):
3150 self.failUnless("Healthy" in res, res)
3151 self.failIf("Not Healthy" in res, res)
3152 self.failUnless('<a href="somewhere">Return to file'
3154 d.addCallback(_got_html_good_return_to)
3155 d.addCallback(self.CHECK, "good", "t=check&output=json")
3156 def _got_json_good(res):
3157 r = simplejson.loads(res)
3158 self.failUnlessEqual(r["summary"], "Healthy")
3159 self.failUnless(r["results"]["healthy"])
3160 self.failIf(r["results"]["needs-rebalancing"])
3161 self.failUnless(r["results"]["recoverable"])
3162 d.addCallback(_got_json_good)
3164 d.addCallback(self.CHECK, "small", "t=check")
3165 def _got_html_small(res):
3166 self.failUnless("Literal files are always healthy" in res, res)
3167 self.failIf("Not Healthy" in res, res)
3168 d.addCallback(_got_html_small)
3169 d.addCallback(self.CHECK, "small", "t=check&return_to=somewhere")
3170 def _got_html_small_return_to(res):
3171 self.failUnless("Literal files are always healthy" in res, res)
3172 self.failIf("Not Healthy" in res, res)
3173 self.failUnless('<a href="somewhere">Return to file'
3175 d.addCallback(_got_html_small_return_to)
3176 d.addCallback(self.CHECK, "small", "t=check&output=json")
3177 def _got_json_small(res):
3178 r = simplejson.loads(res)
3179 self.failUnlessEqual(r["storage-index"], "")
3180 self.failUnless(r["results"]["healthy"])
3181 d.addCallback(_got_json_small)
3183 d.addCallback(self.CHECK, "smalldir", "t=check")
3184 def _got_html_smalldir(res):
3185 self.failUnless("Literal files are always healthy" in res, res)
3186 self.failIf("Not Healthy" in res, res)
3187 d.addCallback(_got_html_smalldir)
3188 d.addCallback(self.CHECK, "smalldir", "t=check&output=json")
3189 def _got_json_smalldir(res):
3190 r = simplejson.loads(res)
3191 self.failUnlessEqual(r["storage-index"], "")
3192 self.failUnless(r["results"]["healthy"])
3193 d.addCallback(_got_json_smalldir)
3195 d.addCallback(self.CHECK, "sick", "t=check")
3196 def _got_html_sick(res):
3197 self.failUnless("Not Healthy" in res, res)
3198 d.addCallback(_got_html_sick)
3199 d.addCallback(self.CHECK, "sick", "t=check&output=json")
3200 def _got_json_sick(res):
3201 r = simplejson.loads(res)
3202 self.failUnlessEqual(r["summary"],
3203 "Not Healthy: 9 shares (enc 3-of-10)")
3204 self.failIf(r["results"]["healthy"])
3205 self.failIf(r["results"]["needs-rebalancing"])
3206 self.failUnless(r["results"]["recoverable"])
3207 d.addCallback(_got_json_sick)
3209 d.addCallback(self.CHECK, "dead", "t=check")
3210 def _got_html_dead(res):
3211 self.failUnless("Not Healthy" in res, res)
3212 d.addCallback(_got_html_dead)
3213 d.addCallback(self.CHECK, "dead", "t=check&output=json")
3214 def _got_json_dead(res):
3215 r = simplejson.loads(res)
3216 self.failUnlessEqual(r["summary"],
3217 "Not Healthy: 1 shares (enc 3-of-10)")
3218 self.failIf(r["results"]["healthy"])
3219 self.failIf(r["results"]["needs-rebalancing"])
3220 self.failIf(r["results"]["recoverable"])
3221 d.addCallback(_got_json_dead)
3223 d.addCallback(self.CHECK, "corrupt", "t=check&verify=true")
3224 def _got_html_corrupt(res):
3225 self.failUnless("Not Healthy! : Unhealthy" in res, res)
3226 d.addCallback(_got_html_corrupt)
3227 d.addCallback(self.CHECK, "corrupt", "t=check&verify=true&output=json")
3228 def _got_json_corrupt(res):
3229 r = simplejson.loads(res)
3230 self.failUnless("Unhealthy: 9 shares (enc 3-of-10)" in r["summary"],
3232 self.failIf(r["results"]["healthy"])
3233 self.failUnless(r["results"]["recoverable"])
3234 self.failUnlessEqual(r["results"]["count-shares-good"], 9)
3235 self.failUnlessEqual(r["results"]["count-corrupt-shares"], 1)
3236 d.addCallback(_got_json_corrupt)
3238 d.addErrback(self.explain_web_error)
3241 def test_repair_html(self):
3242 self.basedir = "web/Grid/repair_html"
3244 c0 = self.g.clients[0]
3247 d = c0.upload(upload.Data(DATA, convergence=""))
3248 def _stash_uri(ur, which):
3249 self.uris[which] = ur.uri
3250 d.addCallback(_stash_uri, "good")
3251 d.addCallback(lambda ign:
3252 c0.upload(upload.Data(DATA+"1", convergence="")))
3253 d.addCallback(_stash_uri, "sick")
3254 d.addCallback(lambda ign:
3255 c0.upload(upload.Data(DATA+"2", convergence="")))
3256 d.addCallback(_stash_uri, "dead")
3257 def _stash_mutable_uri(n, which):
3258 self.uris[which] = n.get_uri()
3259 assert isinstance(self.uris[which], str)
3260 d.addCallback(lambda ign: c0.create_mutable_file(DATA+"3"))
3261 d.addCallback(_stash_mutable_uri, "corrupt")
3263 def _compute_fileurls(ignored):
3265 for which in self.uris:
3266 self.fileurls[which] = "uri/" + urllib.quote(self.uris[which])
3267 d.addCallback(_compute_fileurls)
3269 def _clobber_shares(ignored):
3270 good_shares = self.find_shares(self.uris["good"])
3271 self.failUnlessEqual(len(good_shares), 10)
3272 sick_shares = self.find_shares(self.uris["sick"])
3273 os.unlink(sick_shares[0][2])
3274 dead_shares = self.find_shares(self.uris["dead"])
3275 for i in range(1, 10):
3276 os.unlink(dead_shares[i][2])
3277 c_shares = self.find_shares(self.uris["corrupt"])
3278 cso = CorruptShareOptions()
3279 cso.stdout = StringIO()
3280 cso.parseOptions([c_shares[0][2]])
3282 d.addCallback(_clobber_shares)
3284 d.addCallback(self.CHECK, "good", "t=check&repair=true")
3285 def _got_html_good(res):
3286 self.failUnless("Healthy" in res, res)
3287 self.failIf("Not Healthy" in res, res)
3288 self.failUnless("No repair necessary" in res, res)
3289 d.addCallback(_got_html_good)
3291 d.addCallback(self.CHECK, "sick", "t=check&repair=true")
3292 def _got_html_sick(res):
3293 self.failUnless("Healthy : healthy" in res, res)
3294 self.failIf("Not Healthy" in res, res)
3295 self.failUnless("Repair successful" in res, res)
3296 d.addCallback(_got_html_sick)
3298 # repair of a dead file will fail, of course, but it isn't yet
3299 # clear how this should be reported. Right now it shows up as
3302 #d.addCallback(self.CHECK, "dead", "t=check&repair=true")
3303 #def _got_html_dead(res):
3305 # self.failUnless("Healthy : healthy" in res, res)
3306 # self.failIf("Not Healthy" in res, res)
3307 # self.failUnless("No repair necessary" in res, res)
3308 #d.addCallback(_got_html_dead)
3310 d.addCallback(self.CHECK, "corrupt", "t=check&verify=true&repair=true")
3311 def _got_html_corrupt(res):
3312 self.failUnless("Healthy : Healthy" in res, res)
3313 self.failIf("Not Healthy" in res, res)
3314 self.failUnless("Repair successful" in res, res)
3315 d.addCallback(_got_html_corrupt)
3317 d.addErrback(self.explain_web_error)
3320 def test_repair_json(self):
3321 self.basedir = "web/Grid/repair_json"
3323 c0 = self.g.clients[0]
3326 d = c0.upload(upload.Data(DATA+"1", convergence=""))
3327 def _stash_uri(ur, which):
3328 self.uris[which] = ur.uri
3329 d.addCallback(_stash_uri, "sick")
3331 def _compute_fileurls(ignored):
3333 for which in self.uris:
3334 self.fileurls[which] = "uri/" + urllib.quote(self.uris[which])
3335 d.addCallback(_compute_fileurls)
3337 def _clobber_shares(ignored):
3338 sick_shares = self.find_shares(self.uris["sick"])
3339 os.unlink(sick_shares[0][2])
3340 d.addCallback(_clobber_shares)
3342 d.addCallback(self.CHECK, "sick", "t=check&repair=true&output=json")
3343 def _got_json_sick(res):
3344 r = simplejson.loads(res)
3345 self.failUnlessEqual(r["repair-attempted"], True)
3346 self.failUnlessEqual(r["repair-successful"], True)
3347 self.failUnlessEqual(r["pre-repair-results"]["summary"],
3348 "Not Healthy: 9 shares (enc 3-of-10)")
3349 self.failIf(r["pre-repair-results"]["results"]["healthy"])
3350 self.failUnlessEqual(r["post-repair-results"]["summary"], "healthy")
3351 self.failUnless(r["post-repair-results"]["results"]["healthy"])
3352 d.addCallback(_got_json_sick)
3354 d.addErrback(self.explain_web_error)
3357 def test_unknown(self, immutable=False):
3358 self.basedir = "web/Grid/unknown"
3360 self.basedir = "web/Grid/unknown-immutable"
3363 c0 = self.g.clients[0]
3367 # the future cap format may contain slashes, which must be tolerated
3368 expected_info_url = "uri/%s?t=info" % urllib.quote(unknown_rwcap,
3372 name = u"future-imm"
3373 future_node = UnknownNode(None, unknown_immcap, deep_immutable=True)
3374 d = c0.create_immutable_dirnode({name: (future_node, {})})
3377 future_node = UnknownNode(unknown_rwcap, unknown_rocap)
3378 d = c0.create_dirnode()
3380 def _stash_root_and_create_file(n):
3382 self.rooturl = "uri/" + urllib.quote(n.get_uri()) + "/"
3383 self.rourl = "uri/" + urllib.quote(n.get_readonly_uri()) + "/"
3385 return self.rootnode.set_node(name, future_node)
3386 d.addCallback(_stash_root_and_create_file)
3388 # make sure directory listing tolerates unknown nodes
3389 d.addCallback(lambda ign: self.GET(self.rooturl))
3390 def _check_directory_html(res, expected_type_suffix):
3391 pattern = re.compile(r'<td>\?%s</td>[ \t\n\r]*'
3392 '<td>%s</td>' % (expected_type_suffix, str(name)),
3394 self.failUnless(re.search(pattern, res), res)
3395 # find the More Info link for name, should be relative
3396 mo = re.search(r'<a href="([^"]+)">More Info</a>', res)
3397 info_url = mo.group(1)
3398 self.failUnlessEqual(info_url, "%s?t=info" % (str(name),))
3400 d.addCallback(_check_directory_html, "-IMM")
3402 d.addCallback(_check_directory_html, "")
3404 d.addCallback(lambda ign: self.GET(self.rooturl+"?t=json"))
3405 def _check_directory_json(res, expect_rw_uri):
3406 data = simplejson.loads(res)
3407 self.failUnlessEqual(data[0], "dirnode")
3408 f = data[1]["children"][name]
3409 self.failUnlessEqual(f[0], "unknown")
3411 self.failUnlessEqual(f[1]["rw_uri"], unknown_rwcap)
3413 self.failIfIn("rw_uri", f[1])
3415 self.failUnlessEqual(f[1]["ro_uri"], unknown_immcap, data)
3417 self.failUnlessEqual(f[1]["ro_uri"], unknown_rocap)
3418 self.failUnless("metadata" in f[1])
3419 d.addCallback(_check_directory_json, expect_rw_uri=not immutable)
3421 def _check_info(res, expect_rw_uri, expect_ro_uri):
3422 self.failUnlessIn("Object Type: <span>unknown</span>", res)
3424 self.failUnlessIn(unknown_rwcap, res)
3427 self.failUnlessIn(unknown_immcap, res)
3429 self.failUnlessIn(unknown_rocap, res)
3431 self.failIfIn(unknown_rocap, res)
3432 self.failIfIn("Raw data as", res)
3433 self.failIfIn("Directory writecap", res)
3434 self.failIfIn("Checker Operations", res)
3435 self.failIfIn("Mutable File Operations", res)
3436 self.failIfIn("Directory Operations", res)
3438 # FIXME: these should have expect_rw_uri=not immutable; I don't know
3439 # why they fail. Possibly related to ticket #922.
3441 d.addCallback(lambda ign: self.GET(expected_info_url))
3442 d.addCallback(_check_info, expect_rw_uri=False, expect_ro_uri=False)
3443 d.addCallback(lambda ign: self.GET("%s%s?t=info" % (self.rooturl, str(name))))
3444 d.addCallback(_check_info, expect_rw_uri=False, expect_ro_uri=True)
3446 def _check_json(res, expect_rw_uri):
3447 data = simplejson.loads(res)
3448 self.failUnlessEqual(data[0], "unknown")
3450 self.failUnlessEqual(data[1]["rw_uri"], unknown_rwcap)
3452 self.failIfIn("rw_uri", data[1])
3455 self.failUnlessEqual(data[1]["ro_uri"], unknown_immcap)
3456 self.failUnlessEqual(data[1]["mutable"], False)
3458 self.failUnlessEqual(data[1]["ro_uri"], unknown_rocap)
3459 self.failUnlessEqual(data[1]["mutable"], True)
3461 self.failUnlessEqual(data[1]["ro_uri"], unknown_rocap)
3462 self.failIf("mutable" in data[1], data[1])
3464 # TODO: check metadata contents
3465 self.failUnless("metadata" in data[1])
3467 d.addCallback(lambda ign: self.GET("%s%s?t=json" % (self.rooturl, str(name))))
3468 d.addCallback(_check_json, expect_rw_uri=not immutable)
3470 # and make sure that a read-only version of the directory can be
3471 # rendered too. This version will not have unknown_rwcap, whether
3472 # or not future_node was immutable.
3473 d.addCallback(lambda ign: self.GET(self.rourl))
3475 d.addCallback(_check_directory_html, "-IMM")
3477 d.addCallback(_check_directory_html, "-RO")
3479 d.addCallback(lambda ign: self.GET(self.rourl+"?t=json"))
3480 d.addCallback(_check_directory_json, expect_rw_uri=False)
3482 d.addCallback(lambda ign: self.GET("%s%s?t=json" % (self.rourl, str(name))))
3483 d.addCallback(_check_json, expect_rw_uri=False)
3485 # TODO: check that getting t=info from the Info link in the ro directory
3486 # works, and does not include the writecap URI.
3489 def test_immutable_unknown(self):
3490 return self.test_unknown(immutable=True)
3492 def test_mutant_dirnodes_are_omitted(self):
3493 self.basedir = "web/Grid/mutant_dirnodes_are_omitted"
3496 c = self.g.clients[0]
3501 lonely_uri = "URI:LIT:n5xgk" # LIT for "one"
3502 mut_write_uri = "URI:SSK:vfvcbdfbszyrsaxchgevhmmlii:euw4iw7bbnkrrwpzuburbhppuxhc3gwxv26f6imekhz7zyw2ojnq"
3503 mut_read_uri = "URI:SSK-RO:e3mdrzfwhoq42hy5ubcz6rp3o4:ybyibhnp3vvwuq2vaw2ckjmesgkklfs6ghxleztqidihjyofgw7q"
3505 # This method tests mainly dirnode, but we'd have to duplicate code in order to
3506 # test the dirnode and web layers separately.
3508 # 'lonely' is a valid LIT child, 'ro' is a mutant child with an SSK-RO readcap,
3509 # and 'write-in-ro' is a mutant child with an SSK writecap in the ro_uri field.
3510 # When the directory is read, the mutants should be silently disposed of, leaving
3511 # their lonely sibling.
3512 # We don't test the case of a retrieving a cap from the encrypted rw_uri field,
3513 # because immutable directories don't have a writecap and therefore that field
3514 # isn't (and can't be) decrypted.
3515 # TODO: The field still exists in the netstring. Technically we should check what
3516 # happens if something is put there (_unpack_contents should raise ValueError),
3517 # but that can wait.
3519 lonely_child = nm.create_from_cap(lonely_uri)
3520 mutant_ro_child = nm.create_from_cap(mut_read_uri)
3521 mutant_write_in_ro_child = nm.create_from_cap(mut_write_uri)
3523 def _by_hook_or_by_crook():
3525 for n in [mutant_ro_child, mutant_write_in_ro_child]:
3526 n.is_allowed_in_immutable_directory = _by_hook_or_by_crook
3528 mutant_write_in_ro_child.get_write_uri = lambda: None
3529 mutant_write_in_ro_child.get_readonly_uri = lambda: mut_write_uri
3531 kids = {u"lonely": (lonely_child, {}),
3532 u"ro": (mutant_ro_child, {}),
3533 u"write-in-ro": (mutant_write_in_ro_child, {}),
3535 d = c.create_immutable_dirnode(kids)
3538 self.failUnless(isinstance(dn, dirnode.DirectoryNode))
3539 self.failIf(dn.is_mutable())
3540 self.failUnless(dn.is_readonly())
3541 # This checks that if we somehow ended up calling dn._decrypt_rwcapdata, it would fail.
3542 self.failIf(hasattr(dn._node, 'get_writekey'))
3544 self.failUnless("RO-IMM" in rep)
3546 self.failUnlessIn("CHK", cap.to_string())
3549 self.rooturl = "uri/" + urllib.quote(dn.get_uri()) + "/"
3550 return download_to_data(dn._node)
3551 d.addCallback(_created)
3553 def _check_data(data):
3554 # Decode the netstring representation of the directory to check that all children
3555 # are present. This is a bit of an abstraction violation, but there's not really
3556 # any other way to do it given that the real DirectoryNode._unpack_contents would
3557 # strip the mutant children out (which is what we're trying to test, later).
3560 while position < len(data):
3561 entries, position = split_netstring(data, 1, position)
3563 (name_utf8, ro_uri, rwcapdata, metadata_s), subpos = split_netstring(entry, 4)
3564 name = name_utf8.decode("utf-8")
3565 self.failUnless(rwcapdata == "")
3566 self.failUnless(name in kids)
3567 (expected_child, ign) = kids[name]
3568 self.failUnlessEqual(ro_uri, expected_child.get_readonly_uri())
3571 self.failUnlessEqual(numkids, 3)
3572 return self.rootnode.list()
3573 d.addCallback(_check_data)
3575 # Now when we use the real directory listing code, the mutants should be absent.
3576 def _check_kids(children):
3577 self.failUnlessEqual(sorted(children.keys()), [u"lonely"])
3578 lonely_node, lonely_metadata = children[u"lonely"]
3580 self.failUnlessEqual(lonely_node.get_write_uri(), None)
3581 self.failUnlessEqual(lonely_node.get_readonly_uri(), lonely_uri)
3582 d.addCallback(_check_kids)
3584 d.addCallback(lambda ign: nm.create_from_cap(self.cap.to_string()))
3585 d.addCallback(lambda n: n.list())
3586 d.addCallback(_check_kids) # again with dirnode recreated from cap
3588 # Make sure the lonely child can be listed in HTML...
3589 d.addCallback(lambda ign: self.GET(self.rooturl))
3590 def _check_html(res):
3591 self.failIfIn("URI:SSK", res)
3592 get_lonely = "".join([r'<td>FILE</td>',
3594 r'<a href="[^"]+%s[^"]+">lonely</a>' % (urllib.quote(lonely_uri),),
3596 r'\s+<td>%d</td>' % len("one"),
3598 self.failUnless(re.search(get_lonely, res), res)
3600 # find the More Info link for name, should be relative
3601 mo = re.search(r'<a href="([^"]+)">More Info</a>', res)
3602 info_url = mo.group(1)
3603 self.failUnless(info_url.endswith(urllib.quote(lonely_uri) + "?t=info"), info_url)
3604 d.addCallback(_check_html)
3607 d.addCallback(lambda ign: self.GET(self.rooturl+"?t=json"))
3608 def _check_json(res):
3609 data = simplejson.loads(res)
3610 self.failUnlessEqual(data[0], "dirnode")
3611 listed_children = data[1]["children"]
3612 self.failUnlessEqual(sorted(listed_children.keys()), [u"lonely"])
3613 ll_type, ll_data = listed_children[u"lonely"]
3614 self.failUnlessEqual(ll_type, "filenode")
3615 self.failIf("rw_uri" in ll_data)
3616 self.failUnlessEqual(ll_data["ro_uri"], lonely_uri)
3617 d.addCallback(_check_json)
3620 def test_deep_check(self):
3621 self.basedir = "web/Grid/deep_check"
3623 c0 = self.g.clients[0]
3627 d = c0.create_dirnode()
3628 def _stash_root_and_create_file(n):
3630 self.fileurls["root"] = "uri/" + urllib.quote(n.get_uri()) + "/"
3631 return n.add_file(u"good", upload.Data(DATA, convergence=""))
3632 d.addCallback(_stash_root_and_create_file)
3633 def _stash_uri(fn, which):
3634 self.uris[which] = fn.get_uri()
3636 d.addCallback(_stash_uri, "good")
3637 d.addCallback(lambda ign:
3638 self.rootnode.add_file(u"small",
3639 upload.Data("literal",
3641 d.addCallback(_stash_uri, "small")
3642 d.addCallback(lambda ign:
3643 self.rootnode.add_file(u"sick",
3644 upload.Data(DATA+"1",
3646 d.addCallback(_stash_uri, "sick")
3648 # this tests that deep-check and stream-manifest will ignore
3649 # UnknownNode instances. Hopefully this will also cover deep-stats.
3650 future_node = UnknownNode(unknown_rwcap, unknown_rocap)
3651 d.addCallback(lambda ign: self.rootnode.set_node(u"future", future_node))
3653 def _clobber_shares(ignored):
3654 self.delete_shares_numbered(self.uris["sick"], [0,1])
3655 d.addCallback(_clobber_shares)
3663 d.addCallback(self.CHECK, "root", "t=stream-deep-check")
3666 units = [simplejson.loads(line)
3667 for line in res.splitlines()
3670 print "response is:", res
3671 print "undecodeable line was '%s'" % line
3673 self.failUnlessEqual(len(units), 5+1)
3674 # should be parent-first
3676 self.failUnlessEqual(u0["path"], [])
3677 self.failUnlessEqual(u0["type"], "directory")
3678 self.failUnlessEqual(u0["cap"], self.rootnode.get_uri())
3679 u0cr = u0["check-results"]
3680 self.failUnlessEqual(u0cr["results"]["count-shares-good"], 10)
3682 ugood = [u for u in units
3683 if u["type"] == "file" and u["path"] == [u"good"]][0]
3684 self.failUnlessEqual(ugood["cap"], self.uris["good"])
3685 ugoodcr = ugood["check-results"]
3686 self.failUnlessEqual(ugoodcr["results"]["count-shares-good"], 10)
3689 self.failUnlessEqual(stats["type"], "stats")
3691 self.failUnlessEqual(s["count-immutable-files"], 2)
3692 self.failUnlessEqual(s["count-literal-files"], 1)
3693 self.failUnlessEqual(s["count-directories"], 1)
3694 self.failUnlessEqual(s["count-unknown"], 1)
3695 d.addCallback(_done)
3697 d.addCallback(self.CHECK, "root", "t=stream-manifest")
3698 def _check_manifest(res):
3699 self.failUnless(res.endswith("\n"))
3700 units = [simplejson.loads(t) for t in res[:-1].split("\n")]
3701 self.failUnlessEqual(len(units), 5+1)
3702 self.failUnlessEqual(units[-1]["type"], "stats")
3704 self.failUnlessEqual(first["path"], [])
3705 self.failUnlessEqual(first["cap"], self.rootnode.get_uri())
3706 self.failUnlessEqual(first["type"], "directory")
3707 stats = units[-1]["stats"]
3708 self.failUnlessEqual(stats["count-immutable-files"], 2)
3709 self.failUnlessEqual(stats["count-literal-files"], 1)
3710 self.failUnlessEqual(stats["count-mutable-files"], 0)
3711 self.failUnlessEqual(stats["count-immutable-files"], 2)
3712 self.failUnlessEqual(stats["count-unknown"], 1)
3713 d.addCallback(_check_manifest)
3715 # now add root/subdir and root/subdir/grandchild, then make subdir
3716 # unrecoverable, then see what happens
3718 d.addCallback(lambda ign:
3719 self.rootnode.create_subdirectory(u"subdir"))
3720 d.addCallback(_stash_uri, "subdir")
3721 d.addCallback(lambda subdir_node:
3722 subdir_node.add_file(u"grandchild",
3723 upload.Data(DATA+"2",
3725 d.addCallback(_stash_uri, "grandchild")
3727 d.addCallback(lambda ign:
3728 self.delete_shares_numbered(self.uris["subdir"],
3736 # root/subdir [unrecoverable]
3737 # root/subdir/grandchild
3739 # how should a streaming-JSON API indicate fatal error?
3740 # answer: emit ERROR: instead of a JSON string
3742 d.addCallback(self.CHECK, "root", "t=stream-manifest")
3743 def _check_broken_manifest(res):
3744 lines = res.splitlines()
3746 for (i,line) in enumerate(lines)
3747 if line.startswith("ERROR:")]
3749 self.fail("no ERROR: in output: %s" % (res,))
3750 first_error = error_lines[0]
3751 error_line = lines[first_error]
3752 error_msg = lines[first_error+1:]
3753 error_msg_s = "\n".join(error_msg) + "\n"
3754 self.failUnlessIn("ERROR: UnrecoverableFileError(no recoverable versions)",
3756 self.failUnless(len(error_msg) > 2, error_msg_s) # some traceback
3757 units = [simplejson.loads(line) for line in lines[:first_error]]
3758 self.failUnlessEqual(len(units), 6) # includes subdir
3759 last_unit = units[-1]
3760 self.failUnlessEqual(last_unit["path"], ["subdir"])
3761 d.addCallback(_check_broken_manifest)
3763 d.addCallback(self.CHECK, "root", "t=stream-deep-check")
3764 def _check_broken_deepcheck(res):
3765 lines = res.splitlines()
3767 for (i,line) in enumerate(lines)
3768 if line.startswith("ERROR:")]
3770 self.fail("no ERROR: in output: %s" % (res,))
3771 first_error = error_lines[0]
3772 error_line = lines[first_error]
3773 error_msg = lines[first_error+1:]
3774 error_msg_s = "\n".join(error_msg) + "\n"
3775 self.failUnlessIn("ERROR: UnrecoverableFileError(no recoverable versions)",
3777 self.failUnless(len(error_msg) > 2, error_msg_s) # some traceback
3778 units = [simplejson.loads(line) for line in lines[:first_error]]
3779 self.failUnlessEqual(len(units), 6) # includes subdir
3780 last_unit = units[-1]
3781 self.failUnlessEqual(last_unit["path"], ["subdir"])
3782 r = last_unit["check-results"]["results"]
3783 self.failUnlessEqual(r["count-recoverable-versions"], 0)
3784 self.failUnlessEqual(r["count-shares-good"], 1)
3785 self.failUnlessEqual(r["recoverable"], False)
3786 d.addCallback(_check_broken_deepcheck)
3788 d.addErrback(self.explain_web_error)
3791 def test_deep_check_and_repair(self):
3792 self.basedir = "web/Grid/deep_check_and_repair"
3794 c0 = self.g.clients[0]
3798 d = c0.create_dirnode()
3799 def _stash_root_and_create_file(n):
3801 self.fileurls["root"] = "uri/" + urllib.quote(n.get_uri()) + "/"
3802 return n.add_file(u"good", upload.Data(DATA, convergence=""))
3803 d.addCallback(_stash_root_and_create_file)
3804 def _stash_uri(fn, which):
3805 self.uris[which] = fn.get_uri()
3806 d.addCallback(_stash_uri, "good")
3807 d.addCallback(lambda ign:
3808 self.rootnode.add_file(u"small",
3809 upload.Data("literal",
3811 d.addCallback(_stash_uri, "small")
3812 d.addCallback(lambda ign:
3813 self.rootnode.add_file(u"sick",
3814 upload.Data(DATA+"1",
3816 d.addCallback(_stash_uri, "sick")
3817 #d.addCallback(lambda ign:
3818 # self.rootnode.add_file(u"dead",
3819 # upload.Data(DATA+"2",
3821 #d.addCallback(_stash_uri, "dead")
3823 #d.addCallback(lambda ign: c0.create_mutable_file("mutable"))
3824 #d.addCallback(lambda fn: self.rootnode.set_node(u"corrupt", fn))
3825 #d.addCallback(_stash_uri, "corrupt")
3827 def _clobber_shares(ignored):
3828 good_shares = self.find_shares(self.uris["good"])
3829 self.failUnlessEqual(len(good_shares), 10)
3830 sick_shares = self.find_shares(self.uris["sick"])
3831 os.unlink(sick_shares[0][2])
3832 #dead_shares = self.find_shares(self.uris["dead"])
3833 #for i in range(1, 10):
3834 # os.unlink(dead_shares[i][2])
3836 #c_shares = self.find_shares(self.uris["corrupt"])
3837 #cso = CorruptShareOptions()
3838 #cso.stdout = StringIO()
3839 #cso.parseOptions([c_shares[0][2]])
3841 d.addCallback(_clobber_shares)
3844 # root/good CHK, 10 shares
3846 # root/sick CHK, 9 shares
3848 d.addCallback(self.CHECK, "root", "t=stream-deep-check&repair=true")
3850 units = [simplejson.loads(line)
3851 for line in res.splitlines()
3853 self.failUnlessEqual(len(units), 4+1)
3854 # should be parent-first
3856 self.failUnlessEqual(u0["path"], [])
3857 self.failUnlessEqual(u0["type"], "directory")
3858 self.failUnlessEqual(u0["cap"], self.rootnode.get_uri())
3859 u0crr = u0["check-and-repair-results"]
3860 self.failUnlessEqual(u0crr["repair-attempted"], False)
3861 self.failUnlessEqual(u0crr["pre-repair-results"]["results"]["count-shares-good"], 10)
3863 ugood = [u for u in units
3864 if u["type"] == "file" and u["path"] == [u"good"]][0]
3865 self.failUnlessEqual(ugood["cap"], self.uris["good"])
3866 ugoodcrr = ugood["check-and-repair-results"]
3867 self.failUnlessEqual(ugoodcrr["repair-attempted"], False)
3868 self.failUnlessEqual(ugoodcrr["pre-repair-results"]["results"]["count-shares-good"], 10)
3870 usick = [u for u in units
3871 if u["type"] == "file" and u["path"] == [u"sick"]][0]
3872 self.failUnlessEqual(usick["cap"], self.uris["sick"])
3873 usickcrr = usick["check-and-repair-results"]
3874 self.failUnlessEqual(usickcrr["repair-attempted"], True)
3875 self.failUnlessEqual(usickcrr["repair-successful"], True)
3876 self.failUnlessEqual(usickcrr["pre-repair-results"]["results"]["count-shares-good"], 9)
3877 self.failUnlessEqual(usickcrr["post-repair-results"]["results"]["count-shares-good"], 10)
3880 self.failUnlessEqual(stats["type"], "stats")
3882 self.failUnlessEqual(s["count-immutable-files"], 2)
3883 self.failUnlessEqual(s["count-literal-files"], 1)
3884 self.failUnlessEqual(s["count-directories"], 1)
3885 d.addCallback(_done)
3887 d.addErrback(self.explain_web_error)
3890 def _count_leases(self, ignored, which):
3891 u = self.uris[which]
3892 shares = self.find_shares(u)
3894 for shnum, serverid, fn in shares:
3895 sf = get_share_file(fn)
3896 num_leases = len(list(sf.get_leases()))
3897 lease_counts.append( (fn, num_leases) )
3900 def _assert_leasecount(self, lease_counts, expected):
3901 for (fn, num_leases) in lease_counts:
3902 if num_leases != expected:
3903 self.fail("expected %d leases, have %d, on %s" %
3904 (expected, num_leases, fn))
3906 def test_add_lease(self):
3907 self.basedir = "web/Grid/add_lease"
3908 self.set_up_grid(num_clients=2)
3909 c0 = self.g.clients[0]
3912 d = c0.upload(upload.Data(DATA, convergence=""))
3913 def _stash_uri(ur, which):
3914 self.uris[which] = ur.uri
3915 d.addCallback(_stash_uri, "one")
3916 d.addCallback(lambda ign:
3917 c0.upload(upload.Data(DATA+"1", convergence="")))
3918 d.addCallback(_stash_uri, "two")
3919 def _stash_mutable_uri(n, which):
3920 self.uris[which] = n.get_uri()
3921 assert isinstance(self.uris[which], str)
3922 d.addCallback(lambda ign: c0.create_mutable_file(DATA+"2"))
3923 d.addCallback(_stash_mutable_uri, "mutable")
3925 def _compute_fileurls(ignored):
3927 for which in self.uris:
3928 self.fileurls[which] = "uri/" + urllib.quote(self.uris[which])
3929 d.addCallback(_compute_fileurls)
3931 d.addCallback(self._count_leases, "one")
3932 d.addCallback(self._assert_leasecount, 1)
3933 d.addCallback(self._count_leases, "two")
3934 d.addCallback(self._assert_leasecount, 1)
3935 d.addCallback(self._count_leases, "mutable")
3936 d.addCallback(self._assert_leasecount, 1)
3938 d.addCallback(self.CHECK, "one", "t=check") # no add-lease
3939 def _got_html_good(res):
3940 self.failUnless("Healthy" in res, res)
3941 self.failIf("Not Healthy" in res, res)
3942 d.addCallback(_got_html_good)
3944 d.addCallback(self._count_leases, "one")
3945 d.addCallback(self._assert_leasecount, 1)
3946 d.addCallback(self._count_leases, "two")
3947 d.addCallback(self._assert_leasecount, 1)
3948 d.addCallback(self._count_leases, "mutable")
3949 d.addCallback(self._assert_leasecount, 1)
3951 # this CHECK uses the original client, which uses the same
3952 # lease-secrets, so it will just renew the original lease
3953 d.addCallback(self.CHECK, "one", "t=check&add-lease=true")
3954 d.addCallback(_got_html_good)
3956 d.addCallback(self._count_leases, "one")
3957 d.addCallback(self._assert_leasecount, 1)
3958 d.addCallback(self._count_leases, "two")
3959 d.addCallback(self._assert_leasecount, 1)
3960 d.addCallback(self._count_leases, "mutable")
3961 d.addCallback(self._assert_leasecount, 1)
3963 # this CHECK uses an alternate client, which adds a second lease
3964 d.addCallback(self.CHECK, "one", "t=check&add-lease=true", clientnum=1)
3965 d.addCallback(_got_html_good)
3967 d.addCallback(self._count_leases, "one")
3968 d.addCallback(self._assert_leasecount, 2)
3969 d.addCallback(self._count_leases, "two")
3970 d.addCallback(self._assert_leasecount, 1)
3971 d.addCallback(self._count_leases, "mutable")
3972 d.addCallback(self._assert_leasecount, 1)
3974 d.addCallback(self.CHECK, "mutable", "t=check&add-lease=true")
3975 d.addCallback(_got_html_good)
3977 d.addCallback(self._count_leases, "one")
3978 d.addCallback(self._assert_leasecount, 2)
3979 d.addCallback(self._count_leases, "two")
3980 d.addCallback(self._assert_leasecount, 1)
3981 d.addCallback(self._count_leases, "mutable")
3982 d.addCallback(self._assert_leasecount, 1)
3984 d.addCallback(self.CHECK, "mutable", "t=check&add-lease=true",
3986 d.addCallback(_got_html_good)
3988 d.addCallback(self._count_leases, "one")
3989 d.addCallback(self._assert_leasecount, 2)
3990 d.addCallback(self._count_leases, "two")
3991 d.addCallback(self._assert_leasecount, 1)
3992 d.addCallback(self._count_leases, "mutable")
3993 d.addCallback(self._assert_leasecount, 2)
3995 d.addErrback(self.explain_web_error)
3998 def test_deep_add_lease(self):
3999 self.basedir = "web/Grid/deep_add_lease"
4000 self.set_up_grid(num_clients=2)
4001 c0 = self.g.clients[0]
4005 d = c0.create_dirnode()
4006 def _stash_root_and_create_file(n):
4008 self.uris["root"] = n.get_uri()
4009 self.fileurls["root"] = "uri/" + urllib.quote(n.get_uri()) + "/"
4010 return n.add_file(u"one", upload.Data(DATA, convergence=""))
4011 d.addCallback(_stash_root_and_create_file)
4012 def _stash_uri(fn, which):
4013 self.uris[which] = fn.get_uri()
4014 d.addCallback(_stash_uri, "one")
4015 d.addCallback(lambda ign:
4016 self.rootnode.add_file(u"small",
4017 upload.Data("literal",
4019 d.addCallback(_stash_uri, "small")
4021 d.addCallback(lambda ign: c0.create_mutable_file("mutable"))
4022 d.addCallback(lambda fn: self.rootnode.set_node(u"mutable", fn))
4023 d.addCallback(_stash_uri, "mutable")
4025 d.addCallback(self.CHECK, "root", "t=stream-deep-check") # no add-lease
4027 units = [simplejson.loads(line)
4028 for line in res.splitlines()
4030 # root, one, small, mutable, stats
4031 self.failUnlessEqual(len(units), 4+1)
4032 d.addCallback(_done)
4034 d.addCallback(self._count_leases, "root")
4035 d.addCallback(self._assert_leasecount, 1)
4036 d.addCallback(self._count_leases, "one")
4037 d.addCallback(self._assert_leasecount, 1)
4038 d.addCallback(self._count_leases, "mutable")
4039 d.addCallback(self._assert_leasecount, 1)
4041 d.addCallback(self.CHECK, "root", "t=stream-deep-check&add-lease=true")
4042 d.addCallback(_done)
4044 d.addCallback(self._count_leases, "root")
4045 d.addCallback(self._assert_leasecount, 1)
4046 d.addCallback(self._count_leases, "one")
4047 d.addCallback(self._assert_leasecount, 1)
4048 d.addCallback(self._count_leases, "mutable")
4049 d.addCallback(self._assert_leasecount, 1)
4051 d.addCallback(self.CHECK, "root", "t=stream-deep-check&add-lease=true",
4053 d.addCallback(_done)
4055 d.addCallback(self._count_leases, "root")
4056 d.addCallback(self._assert_leasecount, 2)
4057 d.addCallback(self._count_leases, "one")
4058 d.addCallback(self._assert_leasecount, 2)
4059 d.addCallback(self._count_leases, "mutable")
4060 d.addCallback(self._assert_leasecount, 2)
4062 d.addErrback(self.explain_web_error)
4066 def test_exceptions(self):
4067 self.basedir = "web/Grid/exceptions"
4068 self.set_up_grid(num_clients=1, num_servers=2)
4069 c0 = self.g.clients[0]
4070 c0.DEFAULT_ENCODING_PARAMETERS['happy'] = 2
4073 d = c0.create_dirnode()
4075 self.fileurls["root"] = "uri/" + urllib.quote(n.get_uri()) + "/"
4076 self.fileurls["imaginary"] = self.fileurls["root"] + "imaginary"
4078 d.addCallback(_stash_root)
4079 d.addCallback(lambda ign: c0.upload(upload.Data(DATA, convergence="")))
4081 self.fileurls["1share"] = "uri/" + urllib.quote(ur.uri)
4082 self.delete_shares_numbered(ur.uri, range(1,10))
4084 u = uri.from_string(ur.uri)
4085 u.key = testutil.flip_bit(u.key, 0)
4086 baduri = u.to_string()
4087 self.fileurls["0shares"] = "uri/" + urllib.quote(baduri)
4088 d.addCallback(_stash_bad)
4089 d.addCallback(lambda ign: c0.create_dirnode())
4090 def _mangle_dirnode_1share(n):
4092 url = self.fileurls["dir-1share"] = "uri/" + urllib.quote(u) + "/"
4093 self.fileurls["dir-1share-json"] = url + "?t=json"
4094 self.delete_shares_numbered(u, range(1,10))
4095 d.addCallback(_mangle_dirnode_1share)
4096 d.addCallback(lambda ign: c0.create_dirnode())
4097 def _mangle_dirnode_0share(n):
4099 url = self.fileurls["dir-0share"] = "uri/" + urllib.quote(u) + "/"
4100 self.fileurls["dir-0share-json"] = url + "?t=json"
4101 self.delete_shares_numbered(u, range(0,10))
4102 d.addCallback(_mangle_dirnode_0share)
4104 # NotEnoughSharesError should be reported sensibly, with a
4105 # text/plain explanation of the problem, and perhaps some
4106 # information on which shares *could* be found.
4108 d.addCallback(lambda ignored:
4109 self.shouldHTTPError("GET unrecoverable",
4110 410, "Gone", "NoSharesError",
4111 self.GET, self.fileurls["0shares"]))
4112 def _check_zero_shares(body):
4113 self.failIf("<html>" in body, body)
4114 body = " ".join(body.strip().split())
4115 exp = ("NoSharesError: no shares could be found. "
4116 "Zero shares usually indicates a corrupt URI, or that "
4117 "no servers were connected, but it might also indicate "
4118 "severe corruption. You should perform a filecheck on "
4119 "this object to learn more. The full error message is: "
4120 "Failed to get enough shareholders: have 0, need 3")
4121 self.failUnlessEqual(exp, body)
4122 d.addCallback(_check_zero_shares)
4125 d.addCallback(lambda ignored:
4126 self.shouldHTTPError("GET 1share",
4127 410, "Gone", "NotEnoughSharesError",
4128 self.GET, self.fileurls["1share"]))
4129 def _check_one_share(body):
4130 self.failIf("<html>" in body, body)
4131 body = " ".join(body.strip().split())
4132 exp = ("NotEnoughSharesError: This indicates that some "
4133 "servers were unavailable, or that shares have been "
4134 "lost to server departure, hard drive failure, or disk "
4135 "corruption. You should perform a filecheck on "
4136 "this object to learn more. The full error message is:"
4137 " Failed to get enough shareholders: have 1, need 3")
4138 self.failUnlessEqual(exp, body)
4139 d.addCallback(_check_one_share)
4141 d.addCallback(lambda ignored:
4142 self.shouldHTTPError("GET imaginary",
4143 404, "Not Found", None,
4144 self.GET, self.fileurls["imaginary"]))
4145 def _missing_child(body):
4146 self.failUnless("No such child: imaginary" in body, body)
4147 d.addCallback(_missing_child)
4149 d.addCallback(lambda ignored: self.GET(self.fileurls["dir-0share"]))
4150 def _check_0shares_dir_html(body):
4151 self.failUnless("<html>" in body, body)
4152 # we should see the regular page, but without the child table or
4154 body = " ".join(body.strip().split())
4155 self.failUnlessIn('href="?t=info">More info on this directory',
4157 exp = ("UnrecoverableFileError: the directory (or mutable file) "
4158 "could not be retrieved, because there were insufficient "
4159 "good shares. This might indicate that no servers were "
4160 "connected, insufficient servers were connected, the URI "
4161 "was corrupt, or that shares have been lost due to server "
4162 "departure, hard drive failure, or disk corruption. You "
4163 "should perform a filecheck on this object to learn more.")
4164 self.failUnlessIn(exp, body)
4165 self.failUnlessIn("No upload forms: directory is unreadable", body)
4166 d.addCallback(_check_0shares_dir_html)
4168 d.addCallback(lambda ignored: self.GET(self.fileurls["dir-1share"]))
4169 def _check_1shares_dir_html(body):
4170 # at some point, we'll split UnrecoverableFileError into 0-shares
4171 # and some-shares like we did for immutable files (since there
4172 # are different sorts of advice to offer in each case). For now,
4173 # they present the same way.
4174 self.failUnless("<html>" in body, body)
4175 body = " ".join(body.strip().split())
4176 self.failUnlessIn('href="?t=info">More info on this directory',
4178 exp = ("UnrecoverableFileError: the directory (or mutable file) "
4179 "could not be retrieved, because there were insufficient "
4180 "good shares. This might indicate that no servers were "
4181 "connected, insufficient servers were connected, the URI "
4182 "was corrupt, or that shares have been lost due to server "
4183 "departure, hard drive failure, or disk corruption. You "
4184 "should perform a filecheck on this object to learn more.")
4185 self.failUnlessIn(exp, body)
4186 self.failUnlessIn("No upload forms: directory is unreadable", body)
4187 d.addCallback(_check_1shares_dir_html)
4189 d.addCallback(lambda ignored:
4190 self.shouldHTTPError("GET dir-0share-json",
4191 410, "Gone", "UnrecoverableFileError",
4193 self.fileurls["dir-0share-json"]))
4194 def _check_unrecoverable_file(body):
4195 self.failIf("<html>" in body, body)
4196 body = " ".join(body.strip().split())
4197 exp = ("UnrecoverableFileError: the directory (or mutable file) "
4198 "could not be retrieved, because there were insufficient "
4199 "good shares. This might indicate that no servers were "
4200 "connected, insufficient servers were connected, the URI "
4201 "was corrupt, or that shares have been lost due to server "
4202 "departure, hard drive failure, or disk corruption. You "
4203 "should perform a filecheck on this object to learn more.")
4204 self.failUnlessEqual(exp, body)
4205 d.addCallback(_check_unrecoverable_file)
4207 d.addCallback(lambda ignored:
4208 self.shouldHTTPError("GET dir-1share-json",
4209 410, "Gone", "UnrecoverableFileError",
4211 self.fileurls["dir-1share-json"]))
4212 d.addCallback(_check_unrecoverable_file)
4214 d.addCallback(lambda ignored:
4215 self.shouldHTTPError("GET imaginary",
4216 404, "Not Found", None,
4217 self.GET, self.fileurls["imaginary"]))
4219 # attach a webapi child that throws a random error, to test how it
4221 w = c0.getServiceNamed("webish")
4222 w.root.putChild("ERRORBOOM", ErrorBoom())
4224 # "Accept: */*" : should get a text/html stack trace
4225 # "Accept: text/plain" : should get a text/plain stack trace
4226 # "Accept: text/plain, application/octet-stream" : text/plain (CLI)
4227 # no Accept header: should get a text/html stack trace
4229 d.addCallback(lambda ignored:
4230 self.shouldHTTPError("GET errorboom_html",
4231 500, "Internal Server Error", None,
4232 self.GET, "ERRORBOOM",
4233 headers={"accept": ["*/*"]}))
4234 def _internal_error_html1(body):
4235 self.failUnless("<html>" in body, "expected HTML, not '%s'" % body)
4236 d.addCallback(_internal_error_html1)
4238 d.addCallback(lambda ignored:
4239 self.shouldHTTPError("GET errorboom_text",
4240 500, "Internal Server Error", None,
4241 self.GET, "ERRORBOOM",
4242 headers={"accept": ["text/plain"]}))
4243 def _internal_error_text2(body):
4244 self.failIf("<html>" in body, body)
4245 self.failUnless(body.startswith("Traceback "), body)
4246 d.addCallback(_internal_error_text2)
4248 CLI_accepts = "text/plain, application/octet-stream"
4249 d.addCallback(lambda ignored:
4250 self.shouldHTTPError("GET errorboom_text",
4251 500, "Internal Server Error", None,
4252 self.GET, "ERRORBOOM",
4253 headers={"accept": [CLI_accepts]}))
4254 def _internal_error_text3(body):
4255 self.failIf("<html>" in body, body)
4256 self.failUnless(body.startswith("Traceback "), body)
4257 d.addCallback(_internal_error_text3)
4259 d.addCallback(lambda ignored:
4260 self.shouldHTTPError("GET errorboom_text",
4261 500, "Internal Server Error", None,
4262 self.GET, "ERRORBOOM"))
4263 def _internal_error_html4(body):
4264 self.failUnless("<html>" in body, "expected HTML, not '%s'" % body)
4265 d.addCallback(_internal_error_html4)
4267 def _flush_errors(res):
4268 # Trial: please ignore the CompletelyUnhandledError in the logs
4269 self.flushLoggedErrors(CompletelyUnhandledError)
4271 d.addBoth(_flush_errors)
4275 class CompletelyUnhandledError(Exception):
4277 class ErrorBoom(rend.Page):
4278 def beforeRender(self, ctx):
4279 raise CompletelyUnhandledError("whoops")