1 import os.path, re, urllib
3 from StringIO import StringIO
4 from twisted.application import service
5 from twisted.trial import unittest
6 from twisted.internet import defer, reactor
7 from twisted.internet.task import Clock
8 from twisted.web import client, error, http
9 from twisted.python import failure, log
10 from nevow import rend
11 from allmydata import interfaces, uri, webish, dirnode
12 from allmydata.storage.shares import get_share_file
13 from allmydata.storage_client import StorageFarmBroker
14 from allmydata.immutable import upload, download
15 from allmydata.dirnode import DirectoryNode
16 from allmydata.nodemaker import NodeMaker
17 from allmydata.unknown import UnknownNode
18 from allmydata.web import status, common
19 from allmydata.scripts.debug import CorruptShareOptions, corrupt_share
20 from allmydata.util import fileutil, base32
21 from allmydata.util.consumer import download_to_data
22 from allmydata.util.netstring import split_netstring
23 from allmydata.test.common import FakeCHKFileNode, FakeMutableFileNode, \
24 create_chk_filenode, WebErrorMixin, ShouldFailMixin, make_mutable_file_uri
25 from allmydata.interfaces import IMutableFileNode
26 from allmydata.mutable import servermap, publish, retrieve
27 import common_util as testutil
28 from allmydata.test.no_network import GridTestMixin
29 from allmydata.test.common_web import HTTPClientGETFactory, \
31 from allmydata.client import Client, SecretHolder
33 # create a fake uploader/downloader, and a couple of fake dirnodes, then
34 # create a webserver that works against them
36 timeout = 480 # Most of these take longer than 240 seconds on Francois's arm box.
38 class FakeStatsProvider:
40 stats = {'stats': {}, 'counters': {}}
43 class FakeNodeMaker(NodeMaker):
44 def _create_lit(self, cap):
45 return FakeCHKFileNode(cap)
46 def _create_immutable(self, cap):
47 return FakeCHKFileNode(cap)
48 def _create_mutable(self, cap):
49 return FakeMutableFileNode(None, None, None, None).init_from_cap(cap)
50 def create_mutable_file(self, contents="", keysize=None):
51 n = FakeMutableFileNode(None, None, None, None)
52 return n.create(contents)
54 class FakeUploader(service.Service):
56 def upload(self, uploadable, history=None):
57 d = uploadable.get_size()
58 d.addCallback(lambda size: uploadable.read(size))
61 n = create_chk_filenode(data)
62 results = upload.UploadResults()
63 results.uri = n.get_uri()
65 d.addCallback(_got_data)
67 def get_helper_info(self):
71 _all_upload_status = [upload.UploadStatus()]
72 _all_download_status = [download.DownloadStatus()]
73 _all_mapupdate_statuses = [servermap.UpdateStatus()]
74 _all_publish_statuses = [publish.PublishStatus()]
75 _all_retrieve_statuses = [retrieve.RetrieveStatus()]
77 def list_all_upload_statuses(self):
78 return self._all_upload_status
79 def list_all_download_statuses(self):
80 return self._all_download_status
81 def list_all_mapupdate_statuses(self):
82 return self._all_mapupdate_statuses
83 def list_all_publish_statuses(self):
84 return self._all_publish_statuses
85 def list_all_retrieve_statuses(self):
86 return self._all_retrieve_statuses
87 def list_all_helper_statuses(self):
90 class FakeClient(Client):
92 # don't upcall to Client.__init__, since we only want to initialize a
94 service.MultiService.__init__(self)
95 self.nodeid = "fake_nodeid"
96 self.nickname = "fake_nickname"
97 self.introducer_furl = "None"
98 self.stats_provider = FakeStatsProvider()
99 self._secret_holder = SecretHolder("lease secret", "convergence secret")
101 self.convergence = "some random string"
102 self.storage_broker = StorageFarmBroker(None, permute_peers=True)
103 self.introducer_client = None
104 self.history = FakeHistory()
105 self.uploader = FakeUploader()
106 self.uploader.setServiceParent(self)
107 self.nodemaker = FakeNodeMaker(None, self._secret_holder, None,
108 self.uploader, None, None,
111 def startService(self):
112 return service.MultiService.startService(self)
113 def stopService(self):
114 return service.MultiService.stopService(self)
116 MUTABLE_SIZELIMIT = FakeMutableFileNode.MUTABLE_SIZELIMIT
118 class WebMixin(object):
120 self.s = FakeClient()
121 self.s.startService()
122 self.staticdir = self.mktemp()
124 self.ws = webish.WebishServer(self.s, "0", staticdir=self.staticdir,
126 self.ws.setServiceParent(self.s)
127 self.webish_port = port = self.ws.listener._port.getHost().port
128 self.webish_url = "http://localhost:%d" % port
130 l = [ self.s.create_dirnode() for x in range(6) ]
131 d = defer.DeferredList(l)
133 self.public_root = res[0][1]
134 assert interfaces.IDirectoryNode.providedBy(self.public_root), res
135 self.public_url = "/uri/" + self.public_root.get_uri()
136 self.private_root = res[1][1]
140 self._foo_uri = foo.get_uri()
141 self._foo_readonly_uri = foo.get_readonly_uri()
142 self._foo_verifycap = foo.get_verify_cap().to_string()
143 # NOTE: we ignore the deferred on all set_uri() calls, because we
144 # know the fake nodes do these synchronously
145 self.public_root.set_uri(u"foo", foo.get_uri(),
146 foo.get_readonly_uri())
148 self.BAR_CONTENTS, n, self._bar_txt_uri = self.makefile(0)
149 foo.set_uri(u"bar.txt", self._bar_txt_uri, self._bar_txt_uri)
150 self._bar_txt_verifycap = n.get_verify_cap().to_string()
152 foo.set_uri(u"empty", res[3][1].get_uri(),
153 res[3][1].get_readonly_uri())
154 sub_uri = res[4][1].get_uri()
155 self._sub_uri = sub_uri
156 foo.set_uri(u"sub", sub_uri, sub_uri)
157 sub = self.s.create_node_from_uri(sub_uri)
159 _ign, n, blocking_uri = self.makefile(1)
160 foo.set_uri(u"blockingfile", blocking_uri, blocking_uri)
162 unicode_filename = u"n\u00fc.txt" # n u-umlaut . t x t
163 # ok, unicode calls it LATIN SMALL LETTER U WITH DIAERESIS but I
164 # still think of it as an umlaut
165 foo.set_uri(unicode_filename, self._bar_txt_uri, self._bar_txt_uri)
167 _ign, n, baz_file = self.makefile(2)
168 self._baz_file_uri = baz_file
169 sub.set_uri(u"baz.txt", baz_file, baz_file)
171 _ign, n, self._bad_file_uri = self.makefile(3)
172 # this uri should not be downloadable
173 del FakeCHKFileNode.all_contents[self._bad_file_uri]
176 self.public_root.set_uri(u"reedownlee", rodir.get_readonly_uri(),
177 rodir.get_readonly_uri())
178 rodir.set_uri(u"nor", baz_file, baz_file)
183 # public/foo/blockingfile
186 # public/foo/sub/baz.txt
188 # public/reedownlee/nor
189 self.NEWFILE_CONTENTS = "newfile contents\n"
191 return foo.get_metadata_for(u"bar.txt")
193 def _got_metadata(metadata):
194 self._bar_txt_metadata = metadata
195 d.addCallback(_got_metadata)
198 def makefile(self, number):
199 contents = "contents of file %s\n" % number
200 n = create_chk_filenode(contents)
201 return contents, n, n.get_uri()
204 return self.s.stopService()
206 def failUnlessIsBarDotTxt(self, res):
207 self.failUnlessEqual(res, self.BAR_CONTENTS, res)
209 def failUnlessIsBarJSON(self, res):
210 data = simplejson.loads(res)
211 self.failUnless(isinstance(data, list))
212 self.failUnlessEqual(data[0], u"filenode")
213 self.failUnless(isinstance(data[1], dict))
214 self.failIf(data[1]["mutable"])
215 self.failIf("rw_uri" in data[1]) # immutable
216 self.failUnlessEqual(data[1]["ro_uri"], self._bar_txt_uri)
217 self.failUnlessEqual(data[1]["verify_uri"], self._bar_txt_verifycap)
218 self.failUnlessEqual(data[1]["size"], len(self.BAR_CONTENTS))
220 def failUnlessIsFooJSON(self, res):
221 data = simplejson.loads(res)
222 self.failUnless(isinstance(data, list))
223 self.failUnlessEqual(data[0], "dirnode", res)
224 self.failUnless(isinstance(data[1], dict))
225 self.failUnless(data[1]["mutable"])
226 self.failUnless("rw_uri" in data[1]) # mutable
227 self.failUnlessEqual(data[1]["rw_uri"], self._foo_uri)
228 self.failUnlessEqual(data[1]["ro_uri"], self._foo_readonly_uri)
229 self.failUnlessEqual(data[1]["verify_uri"], self._foo_verifycap)
231 kidnames = sorted([unicode(n) for n in data[1]["children"]])
232 self.failUnlessEqual(kidnames,
233 [u"bar.txt", u"blockingfile", u"empty",
234 u"n\u00fc.txt", u"sub"])
235 kids = dict( [(unicode(name),value)
237 in data[1]["children"].iteritems()] )
238 self.failUnlessEqual(kids[u"sub"][0], "dirnode")
239 self.failUnless("metadata" in kids[u"sub"][1])
240 self.failUnless("ctime" in kids[u"sub"][1]["metadata"])
241 self.failUnless("mtime" in kids[u"sub"][1]["metadata"])
242 self.failUnlessEqual(kids[u"bar.txt"][0], "filenode")
243 self.failUnlessEqual(kids[u"bar.txt"][1]["size"], len(self.BAR_CONTENTS))
244 self.failUnlessEqual(kids[u"bar.txt"][1]["ro_uri"], self._bar_txt_uri)
245 self.failUnlessEqual(kids[u"bar.txt"][1]["verify_uri"],
246 self._bar_txt_verifycap)
247 self.failUnlessEqual(kids[u"bar.txt"][1]["metadata"]["ctime"],
248 self._bar_txt_metadata["ctime"])
249 self.failUnlessEqual(kids[u"n\u00fc.txt"][1]["ro_uri"],
252 def GET(self, urlpath, followRedirect=False, return_response=False,
254 # if return_response=True, this fires with (data, statuscode,
255 # respheaders) instead of just data.
256 assert not isinstance(urlpath, unicode)
257 url = self.webish_url + urlpath
258 factory = HTTPClientGETFactory(url, method="GET",
259 followRedirect=followRedirect, **kwargs)
260 reactor.connectTCP("localhost", self.webish_port, factory)
263 return (data, factory.status, factory.response_headers)
265 d.addCallback(_got_data)
266 return factory.deferred
268 def HEAD(self, urlpath, return_response=False, **kwargs):
269 # this requires some surgery, because twisted.web.client doesn't want
270 # to give us back the response headers.
271 factory = HTTPClientHEADFactory(urlpath, method="HEAD", **kwargs)
272 reactor.connectTCP("localhost", self.webish_port, factory)
275 return (data, factory.status, factory.response_headers)
277 d.addCallback(_got_data)
278 return factory.deferred
280 def PUT(self, urlpath, data, **kwargs):
281 url = self.webish_url + urlpath
282 return client.getPage(url, method="PUT", postdata=data, **kwargs)
284 def DELETE(self, urlpath):
285 url = self.webish_url + urlpath
286 return client.getPage(url, method="DELETE")
288 def POST(self, urlpath, followRedirect=False, **fields):
289 sepbase = "boogabooga"
293 form.append('Content-Disposition: form-data; name="_charset"')
297 for name, value in fields.iteritems():
298 if isinstance(value, tuple):
299 filename, value = value
300 form.append('Content-Disposition: form-data; name="%s"; '
301 'filename="%s"' % (name, filename.encode("utf-8")))
303 form.append('Content-Disposition: form-data; name="%s"' % name)
305 if isinstance(value, unicode):
306 value = value.encode("utf-8")
309 assert isinstance(value, str)
316 body = "\r\n".join(form) + "\r\n"
317 headers["content-type"] = "multipart/form-data; boundary=%s" % sepbase
318 return self.POST2(urlpath, body, headers, followRedirect)
320 def POST2(self, urlpath, body="", headers={}, followRedirect=False):
321 url = self.webish_url + urlpath
322 return client.getPage(url, method="POST", postdata=body,
323 headers=headers, followRedirect=followRedirect)
325 def shouldFail(self, res, expected_failure, which,
326 substring=None, response_substring=None):
327 if isinstance(res, failure.Failure):
328 res.trap(expected_failure)
330 self.failUnless(substring in str(res),
331 "substring '%s' not in '%s'"
332 % (substring, str(res)))
333 if response_substring:
334 self.failUnless(response_substring in res.value.response,
335 "response substring '%s' not in '%s'"
336 % (response_substring, res.value.response))
338 self.fail("%s was supposed to raise %s, not get '%s'" %
339 (which, expected_failure, res))
341 def shouldFail2(self, expected_failure, which, substring,
343 callable, *args, **kwargs):
344 assert substring is None or isinstance(substring, str)
345 assert response_substring is None or isinstance(response_substring, str)
346 d = defer.maybeDeferred(callable, *args, **kwargs)
348 if isinstance(res, failure.Failure):
349 res.trap(expected_failure)
351 self.failUnless(substring in str(res),
352 "%s: substring '%s' not in '%s'"
353 % (which, substring, str(res)))
354 if response_substring:
355 self.failUnless(response_substring in res.value.response,
356 "%s: response substring '%s' not in '%s'"
358 response_substring, res.value.response))
360 self.fail("%s was supposed to raise %s, not get '%s'" %
361 (which, expected_failure, res))
365 def should404(self, res, which):
366 if isinstance(res, failure.Failure):
367 res.trap(error.Error)
368 self.failUnlessEqual(res.value.status, "404")
370 self.fail("%s was supposed to Error(404), not get '%s'" %
373 def should302(self, res, which):
374 if isinstance(res, failure.Failure):
375 res.trap(error.Error)
376 self.failUnlessEqual(res.value.status, "302")
378 self.fail("%s was supposed to Error(302), not get '%s'" %
382 class Web(WebMixin, WebErrorMixin, testutil.StallMixin, unittest.TestCase):
383 def test_create(self):
386 def test_welcome(self):
389 self.failUnless('Welcome To Tahoe-LAFS' in res, res)
391 self.s.basedir = 'web/test_welcome'
392 fileutil.make_dirs("web/test_welcome")
393 fileutil.make_dirs("web/test_welcome/private")
395 d.addCallback(_check)
398 def test_provisioning(self):
399 d = self.GET("/provisioning/")
401 self.failUnless('Tahoe Provisioning Tool' in res)
402 fields = {'filled': True,
403 "num_users": int(50e3),
404 "files_per_user": 1000,
405 "space_per_user": int(1e9),
406 "sharing_ratio": 1.0,
407 "encoding_parameters": "3-of-10-5",
409 "ownership_mode": "A",
410 "download_rate": 100,
415 return self.POST("/provisioning/", **fields)
417 d.addCallback(_check)
419 self.failUnless('Tahoe Provisioning Tool' in res)
420 self.failUnless("Share space consumed: 167.01TB" in res)
422 fields = {'filled': True,
423 "num_users": int(50e6),
424 "files_per_user": 1000,
425 "space_per_user": int(5e9),
426 "sharing_ratio": 1.0,
427 "encoding_parameters": "25-of-100-50",
428 "num_servers": 30000,
429 "ownership_mode": "E",
430 "drive_failure_model": "U",
432 "download_rate": 1000,
437 return self.POST("/provisioning/", **fields)
438 d.addCallback(_check2)
440 self.failUnless("Share space consumed: huge!" in res)
441 fields = {'filled': True}
442 return self.POST("/provisioning/", **fields)
443 d.addCallback(_check3)
445 self.failUnless("Share space consumed:" in res)
446 d.addCallback(_check4)
449 def test_reliability_tool(self):
451 from allmydata import reliability
452 _hush_pyflakes = reliability
455 raise unittest.SkipTest("reliability tool requires NumPy")
457 d = self.GET("/reliability/")
459 self.failUnless('Tahoe Reliability Tool' in res)
460 fields = {'drive_lifetime': "8Y",
465 "check_period": "1M",
466 "report_period": "3M",
469 return self.POST("/reliability/", **fields)
471 d.addCallback(_check)
473 self.failUnless('Tahoe Reliability Tool' in res)
474 r = r'Probability of loss \(no maintenance\):\s+<span>0.033591'
475 self.failUnless(re.search(r, res), res)
476 d.addCallback(_check2)
479 def test_status(self):
480 h = self.s.get_history()
481 dl_num = h.list_all_download_statuses()[0].get_counter()
482 ul_num = h.list_all_upload_statuses()[0].get_counter()
483 mu_num = h.list_all_mapupdate_statuses()[0].get_counter()
484 pub_num = h.list_all_publish_statuses()[0].get_counter()
485 ret_num = h.list_all_retrieve_statuses()[0].get_counter()
486 d = self.GET("/status", followRedirect=True)
488 self.failUnless('Upload and Download Status' in res, res)
489 self.failUnless('"down-%d"' % dl_num in res, res)
490 self.failUnless('"up-%d"' % ul_num in res, res)
491 self.failUnless('"mapupdate-%d"' % mu_num in res, res)
492 self.failUnless('"publish-%d"' % pub_num in res, res)
493 self.failUnless('"retrieve-%d"' % ret_num in res, res)
494 d.addCallback(_check)
495 d.addCallback(lambda res: self.GET("/status/?t=json"))
496 def _check_json(res):
497 data = simplejson.loads(res)
498 self.failUnless(isinstance(data, dict))
499 #active = data["active"]
500 # TODO: test more. We need a way to fake an active operation
502 d.addCallback(_check_json)
504 d.addCallback(lambda res: self.GET("/status/down-%d" % dl_num))
506 self.failUnless("File Download Status" in res, res)
507 d.addCallback(_check_dl)
508 d.addCallback(lambda res: self.GET("/status/up-%d" % ul_num))
510 self.failUnless("File Upload Status" in res, res)
511 d.addCallback(_check_ul)
512 d.addCallback(lambda res: self.GET("/status/mapupdate-%d" % mu_num))
513 def _check_mapupdate(res):
514 self.failUnless("Mutable File Servermap Update Status" in res, res)
515 d.addCallback(_check_mapupdate)
516 d.addCallback(lambda res: self.GET("/status/publish-%d" % pub_num))
517 def _check_publish(res):
518 self.failUnless("Mutable File Publish Status" in res, res)
519 d.addCallback(_check_publish)
520 d.addCallback(lambda res: self.GET("/status/retrieve-%d" % ret_num))
521 def _check_retrieve(res):
522 self.failUnless("Mutable File Retrieve Status" in res, res)
523 d.addCallback(_check_retrieve)
527 def test_status_numbers(self):
528 drrm = status.DownloadResultsRendererMixin()
529 self.failUnlessEqual(drrm.render_time(None, None), "")
530 self.failUnlessEqual(drrm.render_time(None, 2.5), "2.50s")
531 self.failUnlessEqual(drrm.render_time(None, 0.25), "250ms")
532 self.failUnlessEqual(drrm.render_time(None, 0.0021), "2.1ms")
533 self.failUnlessEqual(drrm.render_time(None, 0.000123), "123us")
534 self.failUnlessEqual(drrm.render_rate(None, None), "")
535 self.failUnlessEqual(drrm.render_rate(None, 2500000), "2.50MBps")
536 self.failUnlessEqual(drrm.render_rate(None, 30100), "30.1kBps")
537 self.failUnlessEqual(drrm.render_rate(None, 123), "123Bps")
539 urrm = status.UploadResultsRendererMixin()
540 self.failUnlessEqual(urrm.render_time(None, None), "")
541 self.failUnlessEqual(urrm.render_time(None, 2.5), "2.50s")
542 self.failUnlessEqual(urrm.render_time(None, 0.25), "250ms")
543 self.failUnlessEqual(urrm.render_time(None, 0.0021), "2.1ms")
544 self.failUnlessEqual(urrm.render_time(None, 0.000123), "123us")
545 self.failUnlessEqual(urrm.render_rate(None, None), "")
546 self.failUnlessEqual(urrm.render_rate(None, 2500000), "2.50MBps")
547 self.failUnlessEqual(urrm.render_rate(None, 30100), "30.1kBps")
548 self.failUnlessEqual(urrm.render_rate(None, 123), "123Bps")
550 def test_GET_FILEURL(self):
551 d = self.GET(self.public_url + "/foo/bar.txt")
552 d.addCallback(self.failUnlessIsBarDotTxt)
555 def test_GET_FILEURL_range(self):
556 headers = {"range": "bytes=1-10"}
557 d = self.GET(self.public_url + "/foo/bar.txt", headers=headers,
558 return_response=True)
559 def _got((res, status, headers)):
560 self.failUnlessEqual(int(status), 206)
561 self.failUnless(headers.has_key("content-range"))
562 self.failUnlessEqual(headers["content-range"][0],
563 "bytes 1-10/%d" % len(self.BAR_CONTENTS))
564 self.failUnlessEqual(res, self.BAR_CONTENTS[1:11])
568 def test_GET_FILEURL_partial_range(self):
569 headers = {"range": "bytes=5-"}
570 length = len(self.BAR_CONTENTS)
571 d = self.GET(self.public_url + "/foo/bar.txt", headers=headers,
572 return_response=True)
573 def _got((res, status, headers)):
574 self.failUnlessEqual(int(status), 206)
575 self.failUnless(headers.has_key("content-range"))
576 self.failUnlessEqual(headers["content-range"][0],
577 "bytes 5-%d/%d" % (length-1, length))
578 self.failUnlessEqual(res, self.BAR_CONTENTS[5:])
582 def test_HEAD_FILEURL_range(self):
583 headers = {"range": "bytes=1-10"}
584 d = self.HEAD(self.public_url + "/foo/bar.txt", headers=headers,
585 return_response=True)
586 def _got((res, status, headers)):
587 self.failUnlessEqual(res, "")
588 self.failUnlessEqual(int(status), 206)
589 self.failUnless(headers.has_key("content-range"))
590 self.failUnlessEqual(headers["content-range"][0],
591 "bytes 1-10/%d" % len(self.BAR_CONTENTS))
595 def test_HEAD_FILEURL_partial_range(self):
596 headers = {"range": "bytes=5-"}
597 length = len(self.BAR_CONTENTS)
598 d = self.HEAD(self.public_url + "/foo/bar.txt", headers=headers,
599 return_response=True)
600 def _got((res, status, headers)):
601 self.failUnlessEqual(int(status), 206)
602 self.failUnless(headers.has_key("content-range"))
603 self.failUnlessEqual(headers["content-range"][0],
604 "bytes 5-%d/%d" % (length-1, length))
608 def test_GET_FILEURL_range_bad(self):
609 headers = {"range": "BOGUS=fizbop-quarnak"}
610 d = self.shouldFail2(error.Error, "test_GET_FILEURL_range_bad",
612 "Syntactically invalid http range header",
613 self.GET, self.public_url + "/foo/bar.txt",
617 def test_HEAD_FILEURL(self):
618 d = self.HEAD(self.public_url + "/foo/bar.txt", return_response=True)
619 def _got((res, status, headers)):
620 self.failUnlessEqual(res, "")
621 self.failUnlessEqual(headers["content-length"][0],
622 str(len(self.BAR_CONTENTS)))
623 self.failUnlessEqual(headers["content-type"], ["text/plain"])
627 def test_GET_FILEURL_named(self):
628 base = "/file/%s" % urllib.quote(self._bar_txt_uri)
629 base2 = "/named/%s" % urllib.quote(self._bar_txt_uri)
630 d = self.GET(base + "/@@name=/blah.txt")
631 d.addCallback(self.failUnlessIsBarDotTxt)
632 d.addCallback(lambda res: self.GET(base + "/blah.txt"))
633 d.addCallback(self.failUnlessIsBarDotTxt)
634 d.addCallback(lambda res: self.GET(base + "/ignore/lots/blah.txt"))
635 d.addCallback(self.failUnlessIsBarDotTxt)
636 d.addCallback(lambda res: self.GET(base2 + "/@@name=/blah.txt"))
637 d.addCallback(self.failUnlessIsBarDotTxt)
638 save_url = base + "?save=true&filename=blah.txt"
639 d.addCallback(lambda res: self.GET(save_url))
640 d.addCallback(self.failUnlessIsBarDotTxt) # TODO: check headers
641 u_filename = u"n\u00e9wer.txt" # n e-acute w e r . t x t
642 u_fn_e = urllib.quote(u_filename.encode("utf-8"))
643 u_url = base + "?save=true&filename=" + u_fn_e
644 d.addCallback(lambda res: self.GET(u_url))
645 d.addCallback(self.failUnlessIsBarDotTxt) # TODO: check headers
648 def test_PUT_FILEURL_named_bad(self):
649 base = "/file/%s" % urllib.quote(self._bar_txt_uri)
650 d = self.shouldFail2(error.Error, "test_PUT_FILEURL_named_bad",
652 "/file can only be used with GET or HEAD",
653 self.PUT, base + "/@@name=/blah.txt", "")
656 def test_GET_DIRURL_named_bad(self):
657 base = "/file/%s" % urllib.quote(self._foo_uri)
658 d = self.shouldFail2(error.Error, "test_PUT_DIRURL_named_bad",
661 self.GET, base + "/@@name=/blah.txt")
664 def test_GET_slash_file_bad(self):
665 d = self.shouldFail2(error.Error, "test_GET_slash_file_bad",
667 "/file must be followed by a file-cap and a name",
671 def test_GET_unhandled_URI_named(self):
672 contents, n, newuri = self.makefile(12)
673 verifier_cap = n.get_verify_cap().to_string()
674 base = "/file/%s" % urllib.quote(verifier_cap)
675 # client.create_node_from_uri() can't handle verify-caps
676 d = self.shouldFail2(error.Error, "GET_unhandled_URI_named",
677 "400 Bad Request", "is not a file-cap",
681 def test_GET_unhandled_URI(self):
682 contents, n, newuri = self.makefile(12)
683 verifier_cap = n.get_verify_cap().to_string()
684 base = "/uri/%s" % urllib.quote(verifier_cap)
685 # client.create_node_from_uri() can't handle verify-caps
686 d = self.shouldFail2(error.Error, "test_GET_unhandled_URI",
688 "GET unknown URI type: can only do t=info",
692 def test_GET_FILE_URI(self):
693 base = "/uri/%s" % urllib.quote(self._bar_txt_uri)
695 d.addCallback(self.failUnlessIsBarDotTxt)
698 def test_GET_FILE_URI_badchild(self):
699 base = "/uri/%s/boguschild" % urllib.quote(self._bar_txt_uri)
700 errmsg = "Files have no children, certainly not named 'boguschild'"
701 d = self.shouldFail2(error.Error, "test_GET_FILE_URI_badchild",
702 "400 Bad Request", errmsg,
706 def test_PUT_FILE_URI_badchild(self):
707 base = "/uri/%s/boguschild" % urllib.quote(self._bar_txt_uri)
708 errmsg = "Cannot create directory 'boguschild', because its parent is a file, not a directory"
709 d = self.shouldFail2(error.Error, "test_GET_FILE_URI_badchild",
710 "400 Bad Request", errmsg,
714 # TODO: version of this with a Unicode filename
715 def test_GET_FILEURL_save(self):
716 d = self.GET(self.public_url + "/foo/bar.txt?filename=bar.txt&save=true",
717 return_response=True)
718 def _got((res, statuscode, headers)):
719 content_disposition = headers["content-disposition"][0]
720 self.failUnless(content_disposition == 'attachment; filename="bar.txt"', content_disposition)
721 self.failUnlessIsBarDotTxt(res)
725 def test_GET_FILEURL_missing(self):
726 d = self.GET(self.public_url + "/foo/missing")
727 d.addBoth(self.should404, "test_GET_FILEURL_missing")
730 def test_PUT_overwrite_only_files(self):
731 # create a directory, put a file in that directory.
732 contents, n, filecap = self.makefile(8)
733 d = self.PUT(self.public_url + "/foo/dir?t=mkdir", "")
734 d.addCallback(lambda res:
735 self.PUT(self.public_url + "/foo/dir/file1.txt",
736 self.NEWFILE_CONTENTS))
737 # try to overwrite the file with replace=only-files
739 d.addCallback(lambda res:
740 self.PUT(self.public_url + "/foo/dir/file1.txt?t=uri&replace=only-files",
742 d.addCallback(lambda res:
743 self.shouldFail2(error.Error, "PUT_bad_t", "409 Conflict",
744 "There was already a child by that name, and you asked me "
746 self.PUT, self.public_url + "/foo/dir?t=uri&replace=only-files",
750 def test_PUT_NEWFILEURL(self):
751 d = self.PUT(self.public_url + "/foo/new.txt", self.NEWFILE_CONTENTS)
752 # TODO: we lose the response code, so we can't check this
753 #self.failUnlessEqual(responsecode, 201)
754 d.addCallback(self.failUnlessURIMatchesROChild, self._foo_node, u"new.txt")
755 d.addCallback(lambda res:
756 self.failUnlessChildContentsAre(self._foo_node, u"new.txt",
757 self.NEWFILE_CONTENTS))
760 def test_PUT_NEWFILEURL_not_mutable(self):
761 d = self.PUT(self.public_url + "/foo/new.txt?mutable=false",
762 self.NEWFILE_CONTENTS)
763 # TODO: we lose the response code, so we can't check this
764 #self.failUnlessEqual(responsecode, 201)
765 d.addCallback(self.failUnlessURIMatchesROChild, self._foo_node, u"new.txt")
766 d.addCallback(lambda res:
767 self.failUnlessChildContentsAre(self._foo_node, u"new.txt",
768 self.NEWFILE_CONTENTS))
771 def test_PUT_NEWFILEURL_range_bad(self):
772 headers = {"content-range": "bytes 1-10/%d" % len(self.NEWFILE_CONTENTS)}
773 target = self.public_url + "/foo/new.txt"
774 d = self.shouldFail2(error.Error, "test_PUT_NEWFILEURL_range_bad",
775 "501 Not Implemented",
776 "Content-Range in PUT not yet supported",
777 # (and certainly not for immutable files)
778 self.PUT, target, self.NEWFILE_CONTENTS[1:11],
780 d.addCallback(lambda res:
781 self.failIfNodeHasChild(self._foo_node, u"new.txt"))
784 def test_PUT_NEWFILEURL_mutable(self):
785 d = self.PUT(self.public_url + "/foo/new.txt?mutable=true",
786 self.NEWFILE_CONTENTS)
787 # TODO: we lose the response code, so we can't check this
788 #self.failUnlessEqual(responsecode, 201)
790 u = uri.from_string_mutable_filenode(res)
791 self.failUnless(u.is_mutable())
792 self.failIf(u.is_readonly())
794 d.addCallback(_check_uri)
795 d.addCallback(self.failUnlessURIMatchesRWChild, self._foo_node, u"new.txt")
796 d.addCallback(lambda res:
797 self.failUnlessMutableChildContentsAre(self._foo_node,
799 self.NEWFILE_CONTENTS))
802 def test_PUT_NEWFILEURL_mutable_toobig(self):
803 d = self.shouldFail2(error.Error, "test_PUT_NEWFILEURL_mutable_toobig",
804 "413 Request Entity Too Large",
805 "SDMF is limited to one segment, and 10001 > 10000",
807 self.public_url + "/foo/new.txt?mutable=true",
808 "b" * (self.s.MUTABLE_SIZELIMIT+1))
811 def test_PUT_NEWFILEURL_replace(self):
812 d = self.PUT(self.public_url + "/foo/bar.txt", self.NEWFILE_CONTENTS)
813 # TODO: we lose the response code, so we can't check this
814 #self.failUnlessEqual(responsecode, 200)
815 d.addCallback(self.failUnlessURIMatchesROChild, self._foo_node, u"bar.txt")
816 d.addCallback(lambda res:
817 self.failUnlessChildContentsAre(self._foo_node, u"bar.txt",
818 self.NEWFILE_CONTENTS))
821 def test_PUT_NEWFILEURL_bad_t(self):
822 d = self.shouldFail2(error.Error, "PUT_bad_t", "400 Bad Request",
823 "PUT to a file: bad t=bogus",
824 self.PUT, self.public_url + "/foo/bar.txt?t=bogus",
828 def test_PUT_NEWFILEURL_no_replace(self):
829 d = self.PUT(self.public_url + "/foo/bar.txt?replace=false",
830 self.NEWFILE_CONTENTS)
831 d.addBoth(self.shouldFail, error.Error, "PUT_NEWFILEURL_no_replace",
833 "There was already a child by that name, and you asked me "
837 def test_PUT_NEWFILEURL_mkdirs(self):
838 d = self.PUT(self.public_url + "/foo/newdir/new.txt", self.NEWFILE_CONTENTS)
840 d.addCallback(self.failUnlessURIMatchesROChild, fn, u"newdir/new.txt")
841 d.addCallback(lambda res: self.failIfNodeHasChild(fn, u"new.txt"))
842 d.addCallback(lambda res: self.failUnlessNodeHasChild(fn, u"newdir"))
843 d.addCallback(lambda res:
844 self.failUnlessChildContentsAre(fn, u"newdir/new.txt",
845 self.NEWFILE_CONTENTS))
848 def test_PUT_NEWFILEURL_blocked(self):
849 d = self.PUT(self.public_url + "/foo/blockingfile/new.txt",
850 self.NEWFILE_CONTENTS)
851 d.addBoth(self.shouldFail, error.Error, "PUT_NEWFILEURL_blocked",
853 "Unable to create directory 'blockingfile': a file was in the way")
856 def test_PUT_NEWFILEURL_emptyname(self):
857 # an empty pathname component (i.e. a double-slash) is disallowed
858 d = self.shouldFail2(error.Error, "test_PUT_NEWFILEURL_emptyname",
860 "The webapi does not allow empty pathname components",
861 self.PUT, self.public_url + "/foo//new.txt", "")
864 def test_DELETE_FILEURL(self):
865 d = self.DELETE(self.public_url + "/foo/bar.txt")
866 d.addCallback(lambda res:
867 self.failIfNodeHasChild(self._foo_node, u"bar.txt"))
870 def test_DELETE_FILEURL_missing(self):
871 d = self.DELETE(self.public_url + "/foo/missing")
872 d.addBoth(self.should404, "test_DELETE_FILEURL_missing")
875 def test_DELETE_FILEURL_missing2(self):
876 d = self.DELETE(self.public_url + "/missing/missing")
877 d.addBoth(self.should404, "test_DELETE_FILEURL_missing2")
880 def failUnlessHasBarDotTxtMetadata(self, res):
881 data = simplejson.loads(res)
882 self.failUnless(isinstance(data, list))
883 self.failUnless(data[1].has_key("metadata"))
884 self.failUnless(data[1]["metadata"].has_key("ctime"))
885 self.failUnless(data[1]["metadata"].has_key("mtime"))
886 self.failUnlessEqual(data[1]["metadata"]["ctime"],
887 self._bar_txt_metadata["ctime"])
889 def test_GET_FILEURL_json(self):
890 # twisted.web.http.parse_qs ignores any query args without an '=', so
891 # I can't do "GET /path?json", I have to do "GET /path/t=json"
892 # instead. This may make it tricky to emulate the S3 interface
894 d = self.GET(self.public_url + "/foo/bar.txt?t=json")
896 self.failUnlessIsBarJSON(data)
897 self.failUnlessHasBarDotTxtMetadata(data)
899 d.addCallback(_check1)
902 def test_GET_FILEURL_json_missing(self):
903 d = self.GET(self.public_url + "/foo/missing?json")
904 d.addBoth(self.should404, "test_GET_FILEURL_json_missing")
907 def test_GET_FILEURL_uri(self):
908 d = self.GET(self.public_url + "/foo/bar.txt?t=uri")
910 self.failUnlessEqual(res, self._bar_txt_uri)
911 d.addCallback(_check)
912 d.addCallback(lambda res:
913 self.GET(self.public_url + "/foo/bar.txt?t=readonly-uri"))
915 # for now, for files, uris and readonly-uris are the same
916 self.failUnlessEqual(res, self._bar_txt_uri)
917 d.addCallback(_check2)
920 def test_GET_FILEURL_badtype(self):
921 d = self.shouldHTTPError("GET t=bogus", 400, "Bad Request",
924 self.public_url + "/foo/bar.txt?t=bogus")
927 def test_GET_FILEURL_uri_missing(self):
928 d = self.GET(self.public_url + "/foo/missing?t=uri")
929 d.addBoth(self.should404, "test_GET_FILEURL_uri_missing")
932 def test_GET_DIRURL(self):
933 # the addSlash means we get a redirect here
934 # from /uri/$URI/foo/ , we need ../../../ to get back to the root
936 d = self.GET(self.public_url + "/foo", followRedirect=True)
938 self.failUnless(('<a href="%s">Return to Welcome page' % ROOT)
940 # the FILE reference points to a URI, but it should end in bar.txt
941 bar_url = ("%s/file/%s/@@named=/bar.txt" %
942 (ROOT, urllib.quote(self._bar_txt_uri)))
943 get_bar = "".join([r'<td>FILE</td>',
945 r'<a href="%s">bar.txt</a>' % bar_url,
947 r'\s+<td>%d</td>' % len(self.BAR_CONTENTS),
949 self.failUnless(re.search(get_bar, res), res)
950 for line in res.split("\n"):
951 # find the line that contains the delete button for bar.txt
952 if ("form action" in line and
953 'value="delete"' in line and
954 'value="bar.txt"' in line):
955 # the form target should use a relative URL
956 foo_url = urllib.quote("%s/uri/%s/" % (ROOT, self._foo_uri))
957 self.failUnless(('action="%s"' % foo_url) in line, line)
958 # and the when_done= should too
959 #done_url = urllib.quote(???)
960 #self.failUnless(('name="when_done" value="%s"' % done_url)
964 self.fail("unable to find delete-bar.txt line", res)
966 # the DIR reference just points to a URI
967 sub_url = ("%s/uri/%s/" % (ROOT, urllib.quote(self._sub_uri)))
968 get_sub = ((r'<td>DIR</td>')
969 +r'\s+<td><a href="%s">sub</a></td>' % sub_url)
970 self.failUnless(re.search(get_sub, res), res)
971 d.addCallback(_check)
973 # look at a readonly directory
974 d.addCallback(lambda res:
975 self.GET(self.public_url + "/reedownlee", followRedirect=True))
977 self.failUnless("(read-only)" in res, res)
978 self.failIf("Upload a file" in res, res)
979 d.addCallback(_check2)
981 # and at a directory that contains a readonly directory
982 d.addCallback(lambda res:
983 self.GET(self.public_url, followRedirect=True))
985 self.failUnless(re.search('<td>DIR-RO</td>'
986 r'\s+<td><a href="[\.\/]+/uri/URI%3ADIR2-RO%3A[^"]+">reedownlee</a></td>', res), res)
987 d.addCallback(_check3)
989 # and an empty directory
990 d.addCallback(lambda res: self.GET(self.public_url + "/foo/empty/"))
992 self.failUnless("directory is empty" in res, res)
993 MKDIR_BUTTON_RE=re.compile('<input type="hidden" name="t" value="mkdir" />.*<legend class="freeform-form-label">Create a new directory in this directory</legend>.*<input type="submit" value="Create" />', re.I)
994 self.failUnless(MKDIR_BUTTON_RE.search(res), res)
995 d.addCallback(_check4)
997 # and at a literal directory
998 tiny_litdir_uri = "URI:DIR2-LIT:gqytunj2onug64tufqzdcosvkjetutcjkq5gw4tvm5vwszdgnz5hgyzufqydulbshj5x2lbm" # contains one child which is itself also LIT
999 d.addCallback(lambda res:
1000 self.GET("/uri/" + tiny_litdir_uri + "/", followRedirect=True))
1002 self.failUnless('(immutable)' in res, res)
1003 self.failUnless(re.search('<td>FILE</td>'
1004 r'\s+<td><a href="[\.\/]+/file/URI%3ALIT%3Akrugkidfnzsc4/@@named=/short">short</a></td>', res), res)
1005 d.addCallback(_check5)
1008 def test_GET_DIRURL_badtype(self):
1009 d = self.shouldHTTPError("test_GET_DIRURL_badtype",
1013 self.public_url + "/foo?t=bogus")
1016 def test_GET_DIRURL_json(self):
1017 d = self.GET(self.public_url + "/foo?t=json")
1018 d.addCallback(self.failUnlessIsFooJSON)
1022 def test_POST_DIRURL_manifest_no_ophandle(self):
1023 d = self.shouldFail2(error.Error,
1024 "test_POST_DIRURL_manifest_no_ophandle",
1026 "slow operation requires ophandle=",
1027 self.POST, self.public_url, t="start-manifest")
1030 def test_POST_DIRURL_manifest(self):
1031 d = defer.succeed(None)
1032 def getman(ignored, output):
1033 d = self.POST(self.public_url + "/foo/?t=start-manifest&ophandle=125",
1034 followRedirect=True)
1035 d.addCallback(self.wait_for_operation, "125")
1036 d.addCallback(self.get_operation_results, "125", output)
1038 d.addCallback(getman, None)
1039 def _got_html(manifest):
1040 self.failUnless("Manifest of SI=" in manifest)
1041 self.failUnless("<td>sub</td>" in manifest)
1042 self.failUnless(self._sub_uri in manifest)
1043 self.failUnless("<td>sub/baz.txt</td>" in manifest)
1044 d.addCallback(_got_html)
1046 # both t=status and unadorned GET should be identical
1047 d.addCallback(lambda res: self.GET("/operations/125"))
1048 d.addCallback(_got_html)
1050 d.addCallback(getman, "html")
1051 d.addCallback(_got_html)
1052 d.addCallback(getman, "text")
1053 def _got_text(manifest):
1054 self.failUnless("\nsub " + self._sub_uri + "\n" in manifest)
1055 self.failUnless("\nsub/baz.txt URI:CHK:" in manifest)
1056 d.addCallback(_got_text)
1057 d.addCallback(getman, "JSON")
1059 data = res["manifest"]
1061 for (path_list, cap) in data:
1062 got[tuple(path_list)] = cap
1063 self.failUnlessEqual(got[(u"sub",)], self._sub_uri)
1064 self.failUnless((u"sub",u"baz.txt") in got)
1065 self.failUnless("finished" in res)
1066 self.failUnless("origin" in res)
1067 self.failUnless("storage-index" in res)
1068 self.failUnless("verifycaps" in res)
1069 self.failUnless("stats" in res)
1070 d.addCallback(_got_json)
1073 def test_POST_DIRURL_deepsize_no_ophandle(self):
1074 d = self.shouldFail2(error.Error,
1075 "test_POST_DIRURL_deepsize_no_ophandle",
1077 "slow operation requires ophandle=",
1078 self.POST, self.public_url, t="start-deep-size")
1081 def test_POST_DIRURL_deepsize(self):
1082 d = self.POST(self.public_url + "/foo/?t=start-deep-size&ophandle=126",
1083 followRedirect=True)
1084 d.addCallback(self.wait_for_operation, "126")
1085 d.addCallback(self.get_operation_results, "126", "json")
1086 def _got_json(data):
1087 self.failUnlessEqual(data["finished"], True)
1089 self.failUnless(size > 1000)
1090 d.addCallback(_got_json)
1091 d.addCallback(self.get_operation_results, "126", "text")
1093 mo = re.search(r'^size: (\d+)$', res, re.M)
1094 self.failUnless(mo, res)
1095 size = int(mo.group(1))
1096 # with directories, the size varies.
1097 self.failUnless(size > 1000)
1098 d.addCallback(_got_text)
1101 def test_POST_DIRURL_deepstats_no_ophandle(self):
1102 d = self.shouldFail2(error.Error,
1103 "test_POST_DIRURL_deepstats_no_ophandle",
1105 "slow operation requires ophandle=",
1106 self.POST, self.public_url, t="start-deep-stats")
1109 def test_POST_DIRURL_deepstats(self):
1110 d = self.POST(self.public_url + "/foo/?t=start-deep-stats&ophandle=127",
1111 followRedirect=True)
1112 d.addCallback(self.wait_for_operation, "127")
1113 d.addCallback(self.get_operation_results, "127", "json")
1114 def _got_json(stats):
1115 expected = {"count-immutable-files": 3,
1116 "count-mutable-files": 0,
1117 "count-literal-files": 0,
1119 "count-directories": 3,
1120 "size-immutable-files": 57,
1121 "size-literal-files": 0,
1122 #"size-directories": 1912, # varies
1123 #"largest-directory": 1590,
1124 "largest-directory-children": 5,
1125 "largest-immutable-file": 19,
1127 for k,v in expected.iteritems():
1128 self.failUnlessEqual(stats[k], v,
1129 "stats[%s] was %s, not %s" %
1131 self.failUnlessEqual(stats["size-files-histogram"],
1133 d.addCallback(_got_json)
1136 def test_POST_DIRURL_stream_manifest(self):
1137 d = self.POST(self.public_url + "/foo/?t=stream-manifest")
1139 self.failUnless(res.endswith("\n"))
1140 units = [simplejson.loads(t) for t in res[:-1].split("\n")]
1141 self.failUnlessEqual(len(units), 7)
1142 self.failUnlessEqual(units[-1]["type"], "stats")
1144 self.failUnlessEqual(first["path"], [])
1145 self.failUnlessEqual(first["cap"], self._foo_uri)
1146 self.failUnlessEqual(first["type"], "directory")
1147 baz = [u for u in units[:-1] if u["cap"] == self._baz_file_uri][0]
1148 self.failUnlessEqual(baz["path"], ["sub", "baz.txt"])
1149 self.failIfEqual(baz["storage-index"], None)
1150 self.failIfEqual(baz["verifycap"], None)
1151 self.failIfEqual(baz["repaircap"], None)
1153 d.addCallback(_check)
1156 def test_GET_DIRURL_uri(self):
1157 d = self.GET(self.public_url + "/foo?t=uri")
1159 self.failUnlessEqual(res, self._foo_uri)
1160 d.addCallback(_check)
1163 def test_GET_DIRURL_readonly_uri(self):
1164 d = self.GET(self.public_url + "/foo?t=readonly-uri")
1166 self.failUnlessEqual(res, self._foo_readonly_uri)
1167 d.addCallback(_check)
1170 def test_PUT_NEWDIRURL(self):
1171 d = self.PUT(self.public_url + "/foo/newdir?t=mkdir", "")
1172 d.addCallback(lambda res:
1173 self.failUnlessNodeHasChild(self._foo_node, u"newdir"))
1174 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
1175 d.addCallback(self.failUnlessNodeKeysAre, [])
1178 def test_POST_NEWDIRURL(self):
1179 d = self.POST2(self.public_url + "/foo/newdir?t=mkdir", "")
1180 d.addCallback(lambda res:
1181 self.failUnlessNodeHasChild(self._foo_node, u"newdir"))
1182 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
1183 d.addCallback(self.failUnlessNodeKeysAre, [])
1186 def test_POST_NEWDIRURL_emptyname(self):
1187 # an empty pathname component (i.e. a double-slash) is disallowed
1188 d = self.shouldFail2(error.Error, "test_POST_NEWDIRURL_emptyname",
1190 "The webapi does not allow empty pathname components, i.e. a double slash",
1191 self.POST, self.public_url + "//?t=mkdir")
1194 def test_POST_NEWDIRURL_initial_children(self):
1195 (newkids, caps) = self._create_initial_children()
1196 d = self.POST2(self.public_url + "/foo/newdir?t=mkdir-with-children",
1197 simplejson.dumps(newkids))
1199 n = self.s.create_node_from_uri(uri.strip())
1200 d2 = self.failUnlessNodeKeysAre(n, newkids.keys())
1201 d2.addCallback(lambda ign:
1202 self.failUnlessROChildURIIs(n, u"child-imm",
1204 d2.addCallback(lambda ign:
1205 self.failUnlessRWChildURIIs(n, u"child-mutable",
1207 d2.addCallback(lambda ign:
1208 self.failUnlessROChildURIIs(n, u"child-mutable-ro",
1210 d2.addCallback(lambda ign:
1211 self.failUnlessROChildURIIs(n, u"unknownchild-ro",
1212 caps['unknown_rocap']))
1213 d2.addCallback(lambda ign:
1214 self.failUnlessRWChildURIIs(n, u"unknownchild-rw",
1215 caps['unknown_rwcap']))
1216 d2.addCallback(lambda ign:
1217 self.failUnlessROChildURIIs(n, u"unknownchild-imm",
1218 caps['unknown_immcap']))
1219 d2.addCallback(lambda ign:
1220 self.failUnlessRWChildURIIs(n, u"dirchild",
1222 d2.addCallback(lambda ign:
1223 self.failUnlessROChildURIIs(n, u"dirchild-lit",
1225 d2.addCallback(lambda ign:
1226 self.failUnlessROChildURIIs(n, u"dirchild-empty",
1227 caps['emptydircap']))
1229 d.addCallback(_check)
1230 d.addCallback(lambda res:
1231 self.failUnlessNodeHasChild(self._foo_node, u"newdir"))
1232 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
1233 d.addCallback(self.failUnlessNodeKeysAre, newkids.keys())
1234 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
1235 d.addCallback(self.failUnlessROChildURIIs, u"child-imm", caps['filecap1'])
1238 def test_POST_NEWDIRURL_immutable(self):
1239 (newkids, caps) = self._create_immutable_children()
1240 d = self.POST2(self.public_url + "/foo/newdir?t=mkdir-immutable",
1241 simplejson.dumps(newkids))
1243 n = self.s.create_node_from_uri(uri.strip())
1244 d2 = self.failUnlessNodeKeysAre(n, newkids.keys())
1245 d2.addCallback(lambda ign:
1246 self.failUnlessROChildURIIs(n, u"child-imm",
1248 d2.addCallback(lambda ign:
1249 self.failUnlessROChildURIIs(n, u"unknownchild-imm",
1250 caps['unknown_immcap']))
1251 d2.addCallback(lambda ign:
1252 self.failUnlessROChildURIIs(n, u"dirchild-imm",
1254 d2.addCallback(lambda ign:
1255 self.failUnlessROChildURIIs(n, u"dirchild-lit",
1257 d2.addCallback(lambda ign:
1258 self.failUnlessROChildURIIs(n, u"dirchild-empty",
1259 caps['emptydircap']))
1261 d.addCallback(_check)
1262 d.addCallback(lambda res:
1263 self.failUnlessNodeHasChild(self._foo_node, u"newdir"))
1264 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
1265 d.addCallback(self.failUnlessNodeKeysAre, newkids.keys())
1266 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
1267 d.addCallback(self.failUnlessROChildURIIs, u"child-imm", caps['filecap1'])
1268 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
1269 d.addCallback(self.failUnlessROChildURIIs, u"unknownchild-imm", caps['unknown_immcap'])
1270 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
1271 d.addCallback(self.failUnlessROChildURIIs, u"dirchild-imm", caps['immdircap'])
1272 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
1273 d.addCallback(self.failUnlessROChildURIIs, u"dirchild-lit", caps['litdircap'])
1274 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
1275 d.addCallback(self.failUnlessROChildURIIs, u"dirchild-empty", caps['emptydircap'])
1276 d.addErrback(self.explain_web_error)
1279 def test_POST_NEWDIRURL_immutable_bad(self):
1280 (newkids, caps) = self._create_initial_children()
1281 d = self.shouldFail2(error.Error, "test_POST_NEWDIRURL_immutable_bad",
1283 "needed to be immutable but was not",
1285 self.public_url + "/foo/newdir?t=mkdir-immutable",
1286 simplejson.dumps(newkids))
1289 def test_PUT_NEWDIRURL_exists(self):
1290 d = self.PUT(self.public_url + "/foo/sub?t=mkdir", "")
1291 d.addCallback(lambda res:
1292 self.failUnlessNodeHasChild(self._foo_node, u"sub"))
1293 d.addCallback(lambda res: self._foo_node.get(u"sub"))
1294 d.addCallback(self.failUnlessNodeKeysAre, [u"baz.txt"])
1297 def test_PUT_NEWDIRURL_blocked(self):
1298 d = self.shouldFail2(error.Error, "PUT_NEWDIRURL_blocked",
1299 "409 Conflict", "Unable to create directory 'bar.txt': a file was in the way",
1301 self.public_url + "/foo/bar.txt/sub?t=mkdir", "")
1302 d.addCallback(lambda res:
1303 self.failUnlessNodeHasChild(self._foo_node, u"sub"))
1304 d.addCallback(lambda res: self._foo_node.get(u"sub"))
1305 d.addCallback(self.failUnlessNodeKeysAre, [u"baz.txt"])
1308 def test_PUT_NEWDIRURL_mkdir_p(self):
1309 d = defer.succeed(None)
1310 d.addCallback(lambda res: self.POST(self.public_url + "/foo", t='mkdir', name='mkp'))
1311 d.addCallback(lambda res: self.failUnlessNodeHasChild(self._foo_node, u"mkp"))
1312 d.addCallback(lambda res: self._foo_node.get(u"mkp"))
1313 def mkdir_p(mkpnode):
1314 url = '/uri/%s?t=mkdir-p&path=/sub1/sub2' % urllib.quote(mkpnode.get_uri())
1316 def made_subsub(ssuri):
1317 d = self._foo_node.get_child_at_path(u"mkp/sub1/sub2")
1318 d.addCallback(lambda ssnode: self.failUnlessEqual(ssnode.get_uri(), ssuri))
1320 d.addCallback(lambda uri2: self.failUnlessEqual(uri2, ssuri))
1322 d.addCallback(made_subsub)
1324 d.addCallback(mkdir_p)
1327 def test_PUT_NEWDIRURL_mkdirs(self):
1328 d = self.PUT(self.public_url + "/foo/subdir/newdir?t=mkdir", "")
1329 d.addCallback(lambda res:
1330 self.failIfNodeHasChild(self._foo_node, u"newdir"))
1331 d.addCallback(lambda res:
1332 self.failUnlessNodeHasChild(self._foo_node, u"subdir"))
1333 d.addCallback(lambda res:
1334 self._foo_node.get_child_at_path(u"subdir/newdir"))
1335 d.addCallback(self.failUnlessNodeKeysAre, [])
1338 def test_DELETE_DIRURL(self):
1339 d = self.DELETE(self.public_url + "/foo")
1340 d.addCallback(lambda res:
1341 self.failIfNodeHasChild(self.public_root, u"foo"))
1344 def test_DELETE_DIRURL_missing(self):
1345 d = self.DELETE(self.public_url + "/foo/missing")
1346 d.addBoth(self.should404, "test_DELETE_DIRURL_missing")
1347 d.addCallback(lambda res:
1348 self.failUnlessNodeHasChild(self.public_root, u"foo"))
1351 def test_DELETE_DIRURL_missing2(self):
1352 d = self.DELETE(self.public_url + "/missing")
1353 d.addBoth(self.should404, "test_DELETE_DIRURL_missing2")
1356 def dump_root(self):
1358 w = webish.DirnodeWalkerMixin()
1359 def visitor(childpath, childnode, metadata):
1361 d = w.walk(self.public_root, visitor)
1364 def failUnlessNodeKeysAre(self, node, expected_keys):
1365 for k in expected_keys:
1366 assert isinstance(k, unicode)
1368 def _check(children):
1369 self.failUnlessEqual(sorted(children.keys()), sorted(expected_keys))
1370 d.addCallback(_check)
1372 def failUnlessNodeHasChild(self, node, name):
1373 assert isinstance(name, unicode)
1375 def _check(children):
1376 self.failUnless(name in children)
1377 d.addCallback(_check)
1379 def failIfNodeHasChild(self, node, name):
1380 assert isinstance(name, unicode)
1382 def _check(children):
1383 self.failIf(name in children)
1384 d.addCallback(_check)
1387 def failUnlessChildContentsAre(self, node, name, expected_contents):
1388 assert isinstance(name, unicode)
1389 d = node.get_child_at_path(name)
1390 d.addCallback(lambda node: download_to_data(node))
1391 def _check(contents):
1392 self.failUnlessEqual(contents, expected_contents)
1393 d.addCallback(_check)
1396 def failUnlessMutableChildContentsAre(self, node, name, expected_contents):
1397 assert isinstance(name, unicode)
1398 d = node.get_child_at_path(name)
1399 d.addCallback(lambda node: node.download_best_version())
1400 def _check(contents):
1401 self.failUnlessEqual(contents, expected_contents)
1402 d.addCallback(_check)
1405 def failUnlessRWChildURIIs(self, node, name, expected_uri):
1406 assert isinstance(name, unicode)
1407 d = node.get_child_at_path(name)
1409 self.failUnless(child.is_unknown() or not child.is_readonly())
1410 self.failUnlessEqual(child.get_uri(), expected_uri.strip())
1411 self.failUnlessEqual(child.get_write_uri(), expected_uri.strip())
1412 expected_ro_uri = self._make_readonly(expected_uri)
1414 self.failUnlessEqual(child.get_readonly_uri(), expected_ro_uri.strip())
1415 d.addCallback(_check)
1418 def failUnlessROChildURIIs(self, node, name, expected_uri):
1419 assert isinstance(name, unicode)
1420 d = node.get_child_at_path(name)
1422 self.failUnless(child.is_unknown() or child.is_readonly())
1423 self.failUnlessEqual(child.get_write_uri(), None)
1424 self.failUnlessEqual(child.get_uri(), expected_uri.strip())
1425 self.failUnlessEqual(child.get_readonly_uri(), expected_uri.strip())
1426 d.addCallback(_check)
1429 def failUnlessURIMatchesRWChild(self, got_uri, node, name):
1430 assert isinstance(name, unicode)
1431 d = node.get_child_at_path(name)
1433 self.failUnless(child.is_unknown() or not child.is_readonly())
1434 self.failUnlessEqual(child.get_uri(), got_uri.strip())
1435 self.failUnlessEqual(child.get_write_uri(), got_uri.strip())
1436 expected_ro_uri = self._make_readonly(got_uri)
1438 self.failUnlessEqual(child.get_readonly_uri(), expected_ro_uri.strip())
1439 d.addCallback(_check)
1442 def failUnlessURIMatchesROChild(self, got_uri, node, name):
1443 assert isinstance(name, unicode)
1444 d = node.get_child_at_path(name)
1446 self.failUnless(child.is_unknown() or child.is_readonly())
1447 self.failUnlessEqual(child.get_write_uri(), None)
1448 self.failUnlessEqual(got_uri.strip(), child.get_uri())
1449 self.failUnlessEqual(got_uri.strip(), child.get_readonly_uri())
1450 d.addCallback(_check)
1453 def failUnlessCHKURIHasContents(self, got_uri, contents):
1454 self.failUnless(FakeCHKFileNode.all_contents[got_uri] == contents)
1456 def test_POST_upload(self):
1457 d = self.POST(self.public_url + "/foo", t="upload",
1458 file=("new.txt", self.NEWFILE_CONTENTS))
1460 d.addCallback(self.failUnlessURIMatchesROChild, fn, u"new.txt")
1461 d.addCallback(lambda res:
1462 self.failUnlessChildContentsAre(fn, u"new.txt",
1463 self.NEWFILE_CONTENTS))
1466 def test_POST_upload_unicode(self):
1467 filename = u"n\u00e9wer.txt" # n e-acute w e r . t x t
1468 d = self.POST(self.public_url + "/foo", t="upload",
1469 file=(filename, self.NEWFILE_CONTENTS))
1471 d.addCallback(self.failUnlessURIMatchesROChild, fn, filename)
1472 d.addCallback(lambda res:
1473 self.failUnlessChildContentsAre(fn, filename,
1474 self.NEWFILE_CONTENTS))
1475 target_url = self.public_url + "/foo/" + filename.encode("utf-8")
1476 d.addCallback(lambda res: self.GET(target_url))
1477 d.addCallback(lambda contents: self.failUnlessEqual(contents,
1478 self.NEWFILE_CONTENTS,
1482 def test_POST_upload_unicode_named(self):
1483 filename = u"n\u00e9wer.txt" # n e-acute w e r . t x t
1484 d = self.POST(self.public_url + "/foo", t="upload",
1486 file=("overridden", self.NEWFILE_CONTENTS))
1488 d.addCallback(self.failUnlessURIMatchesROChild, fn, filename)
1489 d.addCallback(lambda res:
1490 self.failUnlessChildContentsAre(fn, filename,
1491 self.NEWFILE_CONTENTS))
1492 target_url = self.public_url + "/foo/" + filename.encode("utf-8")
1493 d.addCallback(lambda res: self.GET(target_url))
1494 d.addCallback(lambda contents: self.failUnlessEqual(contents,
1495 self.NEWFILE_CONTENTS,
1499 def test_POST_upload_no_link(self):
1500 d = self.POST("/uri", t="upload",
1501 file=("new.txt", self.NEWFILE_CONTENTS))
1502 def _check_upload_results(page):
1503 # this should be a page which describes the results of the upload
1504 # that just finished.
1505 self.failUnless("Upload Results:" in page)
1506 self.failUnless("URI:" in page)
1507 uri_re = re.compile("URI: <tt><span>(.*)</span>")
1508 mo = uri_re.search(page)
1509 self.failUnless(mo, page)
1510 new_uri = mo.group(1)
1512 d.addCallback(_check_upload_results)
1513 d.addCallback(self.failUnlessCHKURIHasContents, self.NEWFILE_CONTENTS)
1516 def test_POST_upload_no_link_whendone(self):
1517 d = self.POST("/uri", t="upload", when_done="/",
1518 file=("new.txt", self.NEWFILE_CONTENTS))
1519 d.addBoth(self.shouldRedirect, "/")
1522 def shouldRedirect2(self, which, checker, callable, *args, **kwargs):
1523 d = defer.maybeDeferred(callable, *args, **kwargs)
1525 if isinstance(res, failure.Failure):
1526 res.trap(error.PageRedirect)
1527 statuscode = res.value.status
1528 target = res.value.location
1529 return checker(statuscode, target)
1530 self.fail("%s: callable was supposed to redirect, not return '%s'"
1535 def test_POST_upload_no_link_whendone_results(self):
1536 def check(statuscode, target):
1537 self.failUnlessEqual(statuscode, str(http.FOUND))
1538 self.failUnless(target.startswith(self.webish_url), target)
1539 return client.getPage(target, method="GET")
1540 d = self.shouldRedirect2("test_POST_upload_no_link_whendone_results",
1542 self.POST, "/uri", t="upload",
1543 when_done="/uri/%(uri)s",
1544 file=("new.txt", self.NEWFILE_CONTENTS))
1545 d.addCallback(lambda res:
1546 self.failUnlessEqual(res, self.NEWFILE_CONTENTS))
1549 def test_POST_upload_no_link_mutable(self):
1550 d = self.POST("/uri", t="upload", mutable="true",
1551 file=("new.txt", self.NEWFILE_CONTENTS))
1552 def _check(filecap):
1553 filecap = filecap.strip()
1554 self.failUnless(filecap.startswith("URI:SSK:"), filecap)
1555 self.filecap = filecap
1556 u = uri.WriteableSSKFileURI.init_from_string(filecap)
1557 self.failUnless(u.get_storage_index() in FakeMutableFileNode.all_contents)
1558 n = self.s.create_node_from_uri(filecap)
1559 return n.download_best_version()
1560 d.addCallback(_check)
1562 self.failUnlessEqual(data, self.NEWFILE_CONTENTS)
1563 return self.GET("/uri/%s" % urllib.quote(self.filecap))
1564 d.addCallback(_check2)
1566 self.failUnlessEqual(data, self.NEWFILE_CONTENTS)
1567 return self.GET("/file/%s" % urllib.quote(self.filecap))
1568 d.addCallback(_check3)
1570 self.failUnlessEqual(data, self.NEWFILE_CONTENTS)
1571 d.addCallback(_check4)
1574 def test_POST_upload_no_link_mutable_toobig(self):
1575 d = self.shouldFail2(error.Error,
1576 "test_POST_upload_no_link_mutable_toobig",
1577 "413 Request Entity Too Large",
1578 "SDMF is limited to one segment, and 10001 > 10000",
1580 "/uri", t="upload", mutable="true",
1582 "b" * (self.s.MUTABLE_SIZELIMIT+1)) )
1585 def test_POST_upload_mutable(self):
1586 # this creates a mutable file
1587 d = self.POST(self.public_url + "/foo", t="upload", mutable="true",
1588 file=("new.txt", self.NEWFILE_CONTENTS))
1590 d.addCallback(self.failUnlessURIMatchesRWChild, fn, u"new.txt")
1591 d.addCallback(lambda res:
1592 self.failUnlessMutableChildContentsAre(fn, u"new.txt",
1593 self.NEWFILE_CONTENTS))
1594 d.addCallback(lambda res: self._foo_node.get(u"new.txt"))
1596 self.failUnless(IMutableFileNode.providedBy(newnode))
1597 self.failUnless(newnode.is_mutable())
1598 self.failIf(newnode.is_readonly())
1599 self._mutable_node = newnode
1600 self._mutable_uri = newnode.get_uri()
1603 # now upload it again and make sure that the URI doesn't change
1604 NEWER_CONTENTS = self.NEWFILE_CONTENTS + "newer\n"
1605 d.addCallback(lambda res:
1606 self.POST(self.public_url + "/foo", t="upload",
1608 file=("new.txt", NEWER_CONTENTS)))
1609 d.addCallback(self.failUnlessURIMatchesRWChild, fn, u"new.txt")
1610 d.addCallback(lambda res:
1611 self.failUnlessMutableChildContentsAre(fn, u"new.txt",
1613 d.addCallback(lambda res: self._foo_node.get(u"new.txt"))
1615 self.failUnless(IMutableFileNode.providedBy(newnode))
1616 self.failUnless(newnode.is_mutable())
1617 self.failIf(newnode.is_readonly())
1618 self.failUnlessEqual(self._mutable_uri, newnode.get_uri())
1619 d.addCallback(_got2)
1621 # upload a second time, using PUT instead of POST
1622 NEW2_CONTENTS = NEWER_CONTENTS + "overwrite with PUT\n"
1623 d.addCallback(lambda res:
1624 self.PUT(self.public_url + "/foo/new.txt", NEW2_CONTENTS))
1625 d.addCallback(self.failUnlessURIMatchesRWChild, fn, u"new.txt")
1626 d.addCallback(lambda res:
1627 self.failUnlessMutableChildContentsAre(fn, u"new.txt",
1630 # finally list the directory, since mutable files are displayed
1631 # slightly differently
1633 d.addCallback(lambda res:
1634 self.GET(self.public_url + "/foo/",
1635 followRedirect=True))
1636 def _check_page(res):
1637 # TODO: assert more about the contents
1638 self.failUnless("SSK" in res)
1640 d.addCallback(_check_page)
1642 d.addCallback(lambda res: self._foo_node.get(u"new.txt"))
1644 self.failUnless(IMutableFileNode.providedBy(newnode))
1645 self.failUnless(newnode.is_mutable())
1646 self.failIf(newnode.is_readonly())
1647 self.failUnlessEqual(self._mutable_uri, newnode.get_uri())
1648 d.addCallback(_got3)
1650 # look at the JSON form of the enclosing directory
1651 d.addCallback(lambda res:
1652 self.GET(self.public_url + "/foo/?t=json",
1653 followRedirect=True))
1654 def _check_page_json(res):
1655 parsed = simplejson.loads(res)
1656 self.failUnlessEqual(parsed[0], "dirnode")
1657 children = dict( [(unicode(name),value)
1659 in parsed[1]["children"].iteritems()] )
1660 self.failUnless("new.txt" in children)
1661 new_json = children["new.txt"]
1662 self.failUnlessEqual(new_json[0], "filenode")
1663 self.failUnless(new_json[1]["mutable"])
1664 self.failUnlessEqual(new_json[1]["rw_uri"], self._mutable_uri)
1665 ro_uri = unicode(self._mutable_node.get_readonly().to_string())
1666 self.failUnlessEqual(new_json[1]["ro_uri"], ro_uri)
1667 d.addCallback(_check_page_json)
1669 # and the JSON form of the file
1670 d.addCallback(lambda res:
1671 self.GET(self.public_url + "/foo/new.txt?t=json"))
1672 def _check_file_json(res):
1673 parsed = simplejson.loads(res)
1674 self.failUnlessEqual(parsed[0], "filenode")
1675 self.failUnless(parsed[1]["mutable"])
1676 self.failUnlessEqual(parsed[1]["rw_uri"], self._mutable_uri)
1677 ro_uri = unicode(self._mutable_node.get_readonly().to_string())
1678 self.failUnlessEqual(parsed[1]["ro_uri"], ro_uri)
1679 d.addCallback(_check_file_json)
1681 # and look at t=uri and t=readonly-uri
1682 d.addCallback(lambda res:
1683 self.GET(self.public_url + "/foo/new.txt?t=uri"))
1684 d.addCallback(lambda res: self.failUnlessEqual(res, self._mutable_uri))
1685 d.addCallback(lambda res:
1686 self.GET(self.public_url + "/foo/new.txt?t=readonly-uri"))
1687 def _check_ro_uri(res):
1688 ro_uri = unicode(self._mutable_node.get_readonly().to_string())
1689 self.failUnlessEqual(res, ro_uri)
1690 d.addCallback(_check_ro_uri)
1692 # make sure we can get to it from /uri/URI
1693 d.addCallback(lambda res:
1694 self.GET("/uri/%s" % urllib.quote(self._mutable_uri)))
1695 d.addCallback(lambda res:
1696 self.failUnlessEqual(res, NEW2_CONTENTS))
1698 # and that HEAD computes the size correctly
1699 d.addCallback(lambda res:
1700 self.HEAD(self.public_url + "/foo/new.txt",
1701 return_response=True))
1702 def _got_headers((res, status, headers)):
1703 self.failUnlessEqual(res, "")
1704 self.failUnlessEqual(headers["content-length"][0],
1705 str(len(NEW2_CONTENTS)))
1706 self.failUnlessEqual(headers["content-type"], ["text/plain"])
1707 d.addCallback(_got_headers)
1709 # make sure that size errors are displayed correctly for overwrite
1710 d.addCallback(lambda res:
1711 self.shouldFail2(error.Error,
1712 "test_POST_upload_mutable-toobig",
1713 "413 Request Entity Too Large",
1714 "SDMF is limited to one segment, and 10001 > 10000",
1716 self.public_url + "/foo", t="upload",
1719 "b" * (self.s.MUTABLE_SIZELIMIT+1)),
1722 d.addErrback(self.dump_error)
1725 def test_POST_upload_mutable_toobig(self):
1726 d = self.shouldFail2(error.Error,
1727 "test_POST_upload_mutable_toobig",
1728 "413 Request Entity Too Large",
1729 "SDMF is limited to one segment, and 10001 > 10000",
1731 self.public_url + "/foo",
1732 t="upload", mutable="true",
1734 "b" * (self.s.MUTABLE_SIZELIMIT+1)) )
1737 def dump_error(self, f):
1738 # if the web server returns an error code (like 400 Bad Request),
1739 # web.client.getPage puts the HTTP response body into the .response
1740 # attribute of the exception object that it gives back. It does not
1741 # appear in the Failure's repr(), so the ERROR that trial displays
1742 # will be rather terse and unhelpful. addErrback this method to the
1743 # end of your chain to get more information out of these errors.
1744 if f.check(error.Error):
1745 print "web.error.Error:"
1747 print f.value.response
1750 def test_POST_upload_replace(self):
1751 d = self.POST(self.public_url + "/foo", t="upload",
1752 file=("bar.txt", self.NEWFILE_CONTENTS))
1754 d.addCallback(self.failUnlessURIMatchesROChild, fn, u"bar.txt")
1755 d.addCallback(lambda res:
1756 self.failUnlessChildContentsAre(fn, u"bar.txt",
1757 self.NEWFILE_CONTENTS))
1760 def test_POST_upload_no_replace_ok(self):
1761 d = self.POST(self.public_url + "/foo?replace=false", t="upload",
1762 file=("new.txt", self.NEWFILE_CONTENTS))
1763 d.addCallback(lambda res: self.GET(self.public_url + "/foo/new.txt"))
1764 d.addCallback(lambda res: self.failUnlessEqual(res,
1765 self.NEWFILE_CONTENTS))
1768 def test_POST_upload_no_replace_queryarg(self):
1769 d = self.POST(self.public_url + "/foo?replace=false", t="upload",
1770 file=("bar.txt", self.NEWFILE_CONTENTS))
1771 d.addBoth(self.shouldFail, error.Error,
1772 "POST_upload_no_replace_queryarg",
1774 "There was already a child by that name, and you asked me "
1775 "to not replace it")
1776 d.addCallback(lambda res: self.GET(self.public_url + "/foo/bar.txt"))
1777 d.addCallback(self.failUnlessIsBarDotTxt)
1780 def test_POST_upload_no_replace_field(self):
1781 d = self.POST(self.public_url + "/foo", t="upload", replace="false",
1782 file=("bar.txt", self.NEWFILE_CONTENTS))
1783 d.addBoth(self.shouldFail, error.Error, "POST_upload_no_replace_field",
1785 "There was already a child by that name, and you asked me "
1786 "to not replace it")
1787 d.addCallback(lambda res: self.GET(self.public_url + "/foo/bar.txt"))
1788 d.addCallback(self.failUnlessIsBarDotTxt)
1791 def test_POST_upload_whendone(self):
1792 d = self.POST(self.public_url + "/foo", t="upload", when_done="/THERE",
1793 file=("new.txt", self.NEWFILE_CONTENTS))
1794 d.addBoth(self.shouldRedirect, "/THERE")
1796 d.addCallback(lambda res:
1797 self.failUnlessChildContentsAre(fn, u"new.txt",
1798 self.NEWFILE_CONTENTS))
1801 def test_POST_upload_named(self):
1803 d = self.POST(self.public_url + "/foo", t="upload",
1804 name="new.txt", file=self.NEWFILE_CONTENTS)
1805 d.addCallback(self.failUnlessURIMatchesROChild, fn, u"new.txt")
1806 d.addCallback(lambda res:
1807 self.failUnlessChildContentsAre(fn, u"new.txt",
1808 self.NEWFILE_CONTENTS))
1811 def test_POST_upload_named_badfilename(self):
1812 d = self.POST(self.public_url + "/foo", t="upload",
1813 name="slashes/are/bad.txt", file=self.NEWFILE_CONTENTS)
1814 d.addBoth(self.shouldFail, error.Error,
1815 "test_POST_upload_named_badfilename",
1817 "name= may not contain a slash",
1819 # make sure that nothing was added
1820 d.addCallback(lambda res:
1821 self.failUnlessNodeKeysAre(self._foo_node,
1822 [u"bar.txt", u"blockingfile",
1823 u"empty", u"n\u00fc.txt",
1827 def test_POST_FILEURL_check(self):
1828 bar_url = self.public_url + "/foo/bar.txt"
1829 d = self.POST(bar_url, t="check")
1831 self.failUnless("Healthy :" in res)
1832 d.addCallback(_check)
1833 redir_url = "http://allmydata.org/TARGET"
1834 def _check2(statuscode, target):
1835 self.failUnlessEqual(statuscode, str(http.FOUND))
1836 self.failUnlessEqual(target, redir_url)
1837 d.addCallback(lambda res:
1838 self.shouldRedirect2("test_POST_FILEURL_check",
1842 when_done=redir_url))
1843 d.addCallback(lambda res:
1844 self.POST(bar_url, t="check", return_to=redir_url))
1846 self.failUnless("Healthy :" in res)
1847 self.failUnless("Return to file" in res)
1848 self.failUnless(redir_url in res)
1849 d.addCallback(_check3)
1851 d.addCallback(lambda res:
1852 self.POST(bar_url, t="check", output="JSON"))
1853 def _check_json(res):
1854 data = simplejson.loads(res)
1855 self.failUnless("storage-index" in data)
1856 self.failUnless(data["results"]["healthy"])
1857 d.addCallback(_check_json)
1861 def test_POST_FILEURL_check_and_repair(self):
1862 bar_url = self.public_url + "/foo/bar.txt"
1863 d = self.POST(bar_url, t="check", repair="true")
1865 self.failUnless("Healthy :" in res)
1866 d.addCallback(_check)
1867 redir_url = "http://allmydata.org/TARGET"
1868 def _check2(statuscode, target):
1869 self.failUnlessEqual(statuscode, str(http.FOUND))
1870 self.failUnlessEqual(target, redir_url)
1871 d.addCallback(lambda res:
1872 self.shouldRedirect2("test_POST_FILEURL_check_and_repair",
1875 t="check", repair="true",
1876 when_done=redir_url))
1877 d.addCallback(lambda res:
1878 self.POST(bar_url, t="check", return_to=redir_url))
1880 self.failUnless("Healthy :" in res)
1881 self.failUnless("Return to file" in res)
1882 self.failUnless(redir_url in res)
1883 d.addCallback(_check3)
1886 def test_POST_DIRURL_check(self):
1887 foo_url = self.public_url + "/foo/"
1888 d = self.POST(foo_url, t="check")
1890 self.failUnless("Healthy :" in res, res)
1891 d.addCallback(_check)
1892 redir_url = "http://allmydata.org/TARGET"
1893 def _check2(statuscode, target):
1894 self.failUnlessEqual(statuscode, str(http.FOUND))
1895 self.failUnlessEqual(target, redir_url)
1896 d.addCallback(lambda res:
1897 self.shouldRedirect2("test_POST_DIRURL_check",
1901 when_done=redir_url))
1902 d.addCallback(lambda res:
1903 self.POST(foo_url, t="check", return_to=redir_url))
1905 self.failUnless("Healthy :" in res, res)
1906 self.failUnless("Return to file/directory" in res)
1907 self.failUnless(redir_url in res)
1908 d.addCallback(_check3)
1910 d.addCallback(lambda res:
1911 self.POST(foo_url, t="check", output="JSON"))
1912 def _check_json(res):
1913 data = simplejson.loads(res)
1914 self.failUnless("storage-index" in data)
1915 self.failUnless(data["results"]["healthy"])
1916 d.addCallback(_check_json)
1920 def test_POST_DIRURL_check_and_repair(self):
1921 foo_url = self.public_url + "/foo/"
1922 d = self.POST(foo_url, t="check", repair="true")
1924 self.failUnless("Healthy :" in res, res)
1925 d.addCallback(_check)
1926 redir_url = "http://allmydata.org/TARGET"
1927 def _check2(statuscode, target):
1928 self.failUnlessEqual(statuscode, str(http.FOUND))
1929 self.failUnlessEqual(target, redir_url)
1930 d.addCallback(lambda res:
1931 self.shouldRedirect2("test_POST_DIRURL_check_and_repair",
1934 t="check", repair="true",
1935 when_done=redir_url))
1936 d.addCallback(lambda res:
1937 self.POST(foo_url, t="check", return_to=redir_url))
1939 self.failUnless("Healthy :" in res)
1940 self.failUnless("Return to file/directory" in res)
1941 self.failUnless(redir_url in res)
1942 d.addCallback(_check3)
1945 def wait_for_operation(self, ignored, ophandle):
1946 url = "/operations/" + ophandle
1947 url += "?t=status&output=JSON"
1950 data = simplejson.loads(res)
1951 if not data["finished"]:
1952 d = self.stall(delay=1.0)
1953 d.addCallback(self.wait_for_operation, ophandle)
1959 def get_operation_results(self, ignored, ophandle, output=None):
1960 url = "/operations/" + ophandle
1963 url += "&output=" + output
1966 if output and output.lower() == "json":
1967 return simplejson.loads(res)
1972 def test_POST_DIRURL_deepcheck_no_ophandle(self):
1973 d = self.shouldFail2(error.Error,
1974 "test_POST_DIRURL_deepcheck_no_ophandle",
1976 "slow operation requires ophandle=",
1977 self.POST, self.public_url, t="start-deep-check")
1980 def test_POST_DIRURL_deepcheck(self):
1981 def _check_redirect(statuscode, target):
1982 self.failUnlessEqual(statuscode, str(http.FOUND))
1983 self.failUnless(target.endswith("/operations/123"))
1984 d = self.shouldRedirect2("test_POST_DIRURL_deepcheck", _check_redirect,
1985 self.POST, self.public_url,
1986 t="start-deep-check", ophandle="123")
1987 d.addCallback(self.wait_for_operation, "123")
1988 def _check_json(data):
1989 self.failUnlessEqual(data["finished"], True)
1990 self.failUnlessEqual(data["count-objects-checked"], 8)
1991 self.failUnlessEqual(data["count-objects-healthy"], 8)
1992 d.addCallback(_check_json)
1993 d.addCallback(self.get_operation_results, "123", "html")
1994 def _check_html(res):
1995 self.failUnless("Objects Checked: <span>8</span>" in res)
1996 self.failUnless("Objects Healthy: <span>8</span>" in res)
1997 d.addCallback(_check_html)
1999 d.addCallback(lambda res:
2000 self.GET("/operations/123/"))
2001 d.addCallback(_check_html) # should be the same as without the slash
2003 d.addCallback(lambda res:
2004 self.shouldFail2(error.Error, "one", "404 Not Found",
2005 "No detailed results for SI bogus",
2006 self.GET, "/operations/123/bogus"))
2008 foo_si = self._foo_node.get_storage_index()
2009 foo_si_s = base32.b2a(foo_si)
2010 d.addCallback(lambda res:
2011 self.GET("/operations/123/%s?output=JSON" % foo_si_s))
2012 def _check_foo_json(res):
2013 data = simplejson.loads(res)
2014 self.failUnlessEqual(data["storage-index"], foo_si_s)
2015 self.failUnless(data["results"]["healthy"])
2016 d.addCallback(_check_foo_json)
2019 def test_POST_DIRURL_deepcheck_and_repair(self):
2020 d = self.POST(self.public_url, t="start-deep-check", repair="true",
2021 ophandle="124", output="json", followRedirect=True)
2022 d.addCallback(self.wait_for_operation, "124")
2023 def _check_json(data):
2024 self.failUnlessEqual(data["finished"], True)
2025 self.failUnlessEqual(data["count-objects-checked"], 8)
2026 self.failUnlessEqual(data["count-objects-healthy-pre-repair"], 8)
2027 self.failUnlessEqual(data["count-objects-unhealthy-pre-repair"], 0)
2028 self.failUnlessEqual(data["count-corrupt-shares-pre-repair"], 0)
2029 self.failUnlessEqual(data["count-repairs-attempted"], 0)
2030 self.failUnlessEqual(data["count-repairs-successful"], 0)
2031 self.failUnlessEqual(data["count-repairs-unsuccessful"], 0)
2032 self.failUnlessEqual(data["count-objects-healthy-post-repair"], 8)
2033 self.failUnlessEqual(data["count-objects-unhealthy-post-repair"], 0)
2034 self.failUnlessEqual(data["count-corrupt-shares-post-repair"], 0)
2035 d.addCallback(_check_json)
2036 d.addCallback(self.get_operation_results, "124", "html")
2037 def _check_html(res):
2038 self.failUnless("Objects Checked: <span>8</span>" in res)
2040 self.failUnless("Objects Healthy (before repair): <span>8</span>" in res)
2041 self.failUnless("Objects Unhealthy (before repair): <span>0</span>" in res)
2042 self.failUnless("Corrupt Shares (before repair): <span>0</span>" in res)
2044 self.failUnless("Repairs Attempted: <span>0</span>" in res)
2045 self.failUnless("Repairs Successful: <span>0</span>" in res)
2046 self.failUnless("Repairs Unsuccessful: <span>0</span>" in res)
2048 self.failUnless("Objects Healthy (after repair): <span>8</span>" in res)
2049 self.failUnless("Objects Unhealthy (after repair): <span>0</span>" in res)
2050 self.failUnless("Corrupt Shares (after repair): <span>0</span>" in res)
2051 d.addCallback(_check_html)
2054 def test_POST_FILEURL_bad_t(self):
2055 d = self.shouldFail2(error.Error, "POST_bad_t", "400 Bad Request",
2056 "POST to file: bad t=bogus",
2057 self.POST, self.public_url + "/foo/bar.txt",
2061 def test_POST_mkdir(self): # return value?
2062 d = self.POST(self.public_url + "/foo", t="mkdir", name="newdir")
2063 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
2064 d.addCallback(self.failUnlessNodeKeysAre, [])
2067 def test_POST_mkdir_initial_children(self):
2068 (newkids, caps) = self._create_initial_children()
2069 d = self.POST2(self.public_url +
2070 "/foo?t=mkdir-with-children&name=newdir",
2071 simplejson.dumps(newkids))
2072 d.addCallback(lambda res:
2073 self.failUnlessNodeHasChild(self._foo_node, u"newdir"))
2074 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
2075 d.addCallback(self.failUnlessNodeKeysAre, newkids.keys())
2076 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
2077 d.addCallback(self.failUnlessROChildURIIs, u"child-imm", caps['filecap1'])
2080 def test_POST_mkdir_immutable(self):
2081 (newkids, caps) = self._create_immutable_children()
2082 d = self.POST2(self.public_url +
2083 "/foo?t=mkdir-immutable&name=newdir",
2084 simplejson.dumps(newkids))
2085 d.addCallback(lambda res:
2086 self.failUnlessNodeHasChild(self._foo_node, u"newdir"))
2087 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
2088 d.addCallback(self.failUnlessNodeKeysAre, newkids.keys())
2089 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
2090 d.addCallback(self.failUnlessROChildURIIs, u"child-imm", caps['filecap1'])
2091 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
2092 d.addCallback(self.failUnlessROChildURIIs, u"unknownchild-imm", caps['unknown_immcap'])
2093 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
2094 d.addCallback(self.failUnlessROChildURIIs, u"dirchild-imm", caps['immdircap'])
2095 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
2096 d.addCallback(self.failUnlessROChildURIIs, u"dirchild-lit", caps['litdircap'])
2097 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
2098 d.addCallback(self.failUnlessROChildURIIs, u"dirchild-empty", caps['emptydircap'])
2101 def test_POST_mkdir_immutable_bad(self):
2102 (newkids, caps) = self._create_initial_children()
2103 d = self.shouldFail2(error.Error, "test_POST_mkdir_immutable_bad",
2105 "needed to be immutable but was not",
2108 "/foo?t=mkdir-immutable&name=newdir",
2109 simplejson.dumps(newkids))
2112 def test_POST_mkdir_2(self):
2113 d = self.POST(self.public_url + "/foo/newdir?t=mkdir", "")
2114 d.addCallback(lambda res:
2115 self.failUnlessNodeHasChild(self._foo_node, u"newdir"))
2116 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
2117 d.addCallback(self.failUnlessNodeKeysAre, [])
2120 def test_POST_mkdirs_2(self):
2121 d = self.POST(self.public_url + "/foo/bardir/newdir?t=mkdir", "")
2122 d.addCallback(lambda res:
2123 self.failUnlessNodeHasChild(self._foo_node, u"bardir"))
2124 d.addCallback(lambda res: self._foo_node.get(u"bardir"))
2125 d.addCallback(lambda bardirnode: bardirnode.get(u"newdir"))
2126 d.addCallback(self.failUnlessNodeKeysAre, [])
2129 def test_POST_mkdir_no_parentdir_noredirect(self):
2130 d = self.POST("/uri?t=mkdir")
2131 def _after_mkdir(res):
2132 uri.DirectoryURI.init_from_string(res)
2133 d.addCallback(_after_mkdir)
2136 def test_POST_mkdir_no_parentdir_noredirect2(self):
2137 # make sure form-based arguments (as on the welcome page) still work
2138 d = self.POST("/uri", t="mkdir")
2139 def _after_mkdir(res):
2140 uri.DirectoryURI.init_from_string(res)
2141 d.addCallback(_after_mkdir)
2142 d.addErrback(self.explain_web_error)
2145 def test_POST_mkdir_no_parentdir_redirect(self):
2146 d = self.POST("/uri?t=mkdir&redirect_to_result=true")
2147 d.addBoth(self.shouldRedirect, None, statuscode='303')
2148 def _check_target(target):
2149 target = urllib.unquote(target)
2150 self.failUnless(target.startswith("uri/URI:DIR2:"), target)
2151 d.addCallback(_check_target)
2154 def test_POST_mkdir_no_parentdir_redirect2(self):
2155 d = self.POST("/uri", t="mkdir", redirect_to_result="true")
2156 d.addBoth(self.shouldRedirect, None, statuscode='303')
2157 def _check_target(target):
2158 target = urllib.unquote(target)
2159 self.failUnless(target.startswith("uri/URI:DIR2:"), target)
2160 d.addCallback(_check_target)
2161 d.addErrback(self.explain_web_error)
2164 def _make_readonly(self, u):
2165 ro_uri = uri.from_string(u).get_readonly()
2168 return ro_uri.to_string()
2170 def _create_initial_children(self):
2171 contents, n, filecap1 = self.makefile(12)
2172 md1 = {"metakey1": "metavalue1"}
2173 filecap2 = make_mutable_file_uri()
2174 node3 = self.s.create_node_from_uri(make_mutable_file_uri())
2175 filecap3 = node3.get_readonly_uri()
2176 unknown_rwcap = "lafs://from_the_future"
2177 unknown_rocap = "ro.lafs://readonly_from_the_future"
2178 unknown_immcap = "imm.lafs://immutable_from_the_future"
2179 node4 = self.s.create_node_from_uri(make_mutable_file_uri())
2180 dircap = DirectoryNode(node4, None, None).get_uri()
2181 litdircap = "URI:DIR2-LIT:ge3dumj2mewdcotyfqydulbshj5x2lbm"
2182 emptydircap = "URI:DIR2-LIT:"
2183 newkids = {u"child-imm": ["filenode", {"rw_uri": filecap1,
2184 "ro_uri": self._make_readonly(filecap1),
2185 "metadata": md1, }],
2186 u"child-mutable": ["filenode", {"rw_uri": filecap2,
2187 "ro_uri": self._make_readonly(filecap2)}],
2188 u"child-mutable-ro": ["filenode", {"ro_uri": filecap3}],
2189 u"unknownchild-rw": ["unknown", {"rw_uri": unknown_rwcap,
2190 "ro_uri": unknown_rocap}],
2191 u"unknownchild-ro": ["unknown", {"ro_uri": unknown_rocap}],
2192 u"unknownchild-imm": ["unknown", {"ro_uri": unknown_immcap}],
2193 u"dirchild": ["dirnode", {"rw_uri": dircap,
2194 "ro_uri": self._make_readonly(dircap)}],
2195 u"dirchild-lit": ["dirnode", {"ro_uri": litdircap}],
2196 u"dirchild-empty": ["dirnode", {"ro_uri": emptydircap}],
2198 return newkids, {'filecap1': filecap1,
2199 'filecap2': filecap2,
2200 'filecap3': filecap3,
2201 'unknown_rwcap': unknown_rwcap,
2202 'unknown_rocap': unknown_rocap,
2203 'unknown_immcap': unknown_immcap,
2205 'litdircap': litdircap,
2206 'emptydircap': emptydircap}
2208 def _create_immutable_children(self):
2209 contents, n, filecap1 = self.makefile(12)
2210 md1 = {"metakey1": "metavalue1"}
2211 tnode = create_chk_filenode("immutable directory contents\n"*10)
2212 dnode = DirectoryNode(tnode, None, None)
2213 assert not dnode.is_mutable()
2214 unknown_immcap = "imm.lafs://immutable_from_the_future"
2215 immdircap = dnode.get_uri()
2216 litdircap = "URI:DIR2-LIT:ge3dumj2mewdcotyfqydulbshj5x2lbm"
2217 emptydircap = "URI:DIR2-LIT:"
2218 newkids = {u"child-imm": ["filenode", {"ro_uri": filecap1,
2219 "metadata": md1, }],
2220 u"unknownchild-imm": ["unknown", {"ro_uri": unknown_immcap}],
2221 u"dirchild-imm": ["dirnode", {"ro_uri": immdircap}],
2222 u"dirchild-lit": ["dirnode", {"ro_uri": litdircap}],
2223 u"dirchild-empty": ["dirnode", {"ro_uri": emptydircap}],
2225 return newkids, {'filecap1': filecap1,
2226 'unknown_immcap': unknown_immcap,
2227 'immdircap': immdircap,
2228 'litdircap': litdircap,
2229 'emptydircap': emptydircap}
2231 def test_POST_mkdir_no_parentdir_initial_children(self):
2232 (newkids, caps) = self._create_initial_children()
2233 d = self.POST2("/uri?t=mkdir-with-children", simplejson.dumps(newkids))
2234 def _after_mkdir(res):
2235 self.failUnless(res.startswith("URI:DIR"), res)
2236 n = self.s.create_node_from_uri(res)
2237 d2 = self.failUnlessNodeKeysAre(n, newkids.keys())
2238 d2.addCallback(lambda ign:
2239 self.failUnlessROChildURIIs(n, u"child-imm",
2241 d2.addCallback(lambda ign:
2242 self.failUnlessRWChildURIIs(n, u"child-mutable",
2244 d2.addCallback(lambda ign:
2245 self.failUnlessROChildURIIs(n, u"child-mutable-ro",
2247 d2.addCallback(lambda ign:
2248 self.failUnlessRWChildURIIs(n, u"unknownchild-rw",
2249 caps['unknown_rwcap']))
2250 d2.addCallback(lambda ign:
2251 self.failUnlessROChildURIIs(n, u"unknownchild-ro",
2252 caps['unknown_rocap']))
2253 d2.addCallback(lambda ign:
2254 self.failUnlessROChildURIIs(n, u"unknownchild-imm",
2255 caps['unknown_immcap']))
2256 d2.addCallback(lambda ign:
2257 self.failUnlessRWChildURIIs(n, u"dirchild",
2260 d.addCallback(_after_mkdir)
2263 def test_POST_mkdir_no_parentdir_unexpected_children(self):
2264 # the regular /uri?t=mkdir operation is specified to ignore its body.
2265 # Only t=mkdir-with-children pays attention to it.
2266 (newkids, caps) = self._create_initial_children()
2267 d = self.shouldHTTPError("POST t=mkdir unexpected children",
2269 "t=mkdir does not accept children=, "
2270 "try t=mkdir-with-children instead",
2271 self.POST2, "/uri?t=mkdir", # without children
2272 simplejson.dumps(newkids))
2275 def test_POST_noparent_bad(self):
2276 d = self.shouldHTTPError("POST /uri?t=bogus", 400, "Bad Request",
2277 "/uri accepts only PUT, PUT?t=mkdir, "
2278 "POST?t=upload, and POST?t=mkdir",
2279 self.POST, "/uri?t=bogus")
2282 def test_POST_mkdir_no_parentdir_immutable(self):
2283 (newkids, caps) = self._create_immutable_children()
2284 d = self.POST2("/uri?t=mkdir-immutable", simplejson.dumps(newkids))
2285 def _after_mkdir(res):
2286 self.failUnless(res.startswith("URI:DIR"), res)
2287 n = self.s.create_node_from_uri(res)
2288 d2 = self.failUnlessNodeKeysAre(n, newkids.keys())
2289 d2.addCallback(lambda ign:
2290 self.failUnlessROChildURIIs(n, u"child-imm",
2292 d2.addCallback(lambda ign:
2293 self.failUnlessROChildURIIs(n, u"unknownchild-imm",
2294 caps['unknown_immcap']))
2295 d2.addCallback(lambda ign:
2296 self.failUnlessROChildURIIs(n, u"dirchild-imm",
2298 d2.addCallback(lambda ign:
2299 self.failUnlessROChildURIIs(n, u"dirchild-lit",
2301 d2.addCallback(lambda ign:
2302 self.failUnlessROChildURIIs(n, u"dirchild-empty",
2303 caps['emptydircap']))
2305 d.addCallback(_after_mkdir)
2308 def test_POST_mkdir_no_parentdir_immutable_bad(self):
2309 (newkids, caps) = self._create_initial_children()
2310 d = self.shouldFail2(error.Error,
2311 "test_POST_mkdir_no_parentdir_immutable_bad",
2313 "needed to be immutable but was not",
2315 "/uri?t=mkdir-immutable",
2316 simplejson.dumps(newkids))
2319 def test_welcome_page_mkdir_button(self):
2320 # Fetch the welcome page.
2322 def _after_get_welcome_page(res):
2323 MKDIR_BUTTON_RE = re.compile(
2324 '<form action="([^"]*)" method="post".*?'
2325 '<input type="hidden" name="t" value="([^"]*)" />'
2326 '<input type="hidden" name="([^"]*)" value="([^"]*)" />'
2327 '<input type="submit" value="Create a directory" />',
2329 mo = MKDIR_BUTTON_RE.search(res)
2330 formaction = mo.group(1)
2332 formaname = mo.group(3)
2333 formavalue = mo.group(4)
2334 return (formaction, formt, formaname, formavalue)
2335 d.addCallback(_after_get_welcome_page)
2336 def _after_parse_form(res):
2337 (formaction, formt, formaname, formavalue) = res
2338 return self.POST("/%s?t=%s&%s=%s" % (formaction, formt, formaname, formavalue))
2339 d.addCallback(_after_parse_form)
2340 d.addBoth(self.shouldRedirect, None, statuscode='303')
2343 def test_POST_mkdir_replace(self): # return value?
2344 d = self.POST(self.public_url + "/foo", t="mkdir", name="sub")
2345 d.addCallback(lambda res: self._foo_node.get(u"sub"))
2346 d.addCallback(self.failUnlessNodeKeysAre, [])
2349 def test_POST_mkdir_no_replace_queryarg(self): # return value?
2350 d = self.POST(self.public_url + "/foo?replace=false", t="mkdir", name="sub")
2351 d.addBoth(self.shouldFail, error.Error,
2352 "POST_mkdir_no_replace_queryarg",
2354 "There was already a child by that name, and you asked me "
2355 "to not replace it")
2356 d.addCallback(lambda res: self._foo_node.get(u"sub"))
2357 d.addCallback(self.failUnlessNodeKeysAre, [u"baz.txt"])
2360 def test_POST_mkdir_no_replace_field(self): # return value?
2361 d = self.POST(self.public_url + "/foo", t="mkdir", name="sub",
2363 d.addBoth(self.shouldFail, error.Error, "POST_mkdir_no_replace_field",
2365 "There was already a child by that name, and you asked me "
2366 "to not replace it")
2367 d.addCallback(lambda res: self._foo_node.get(u"sub"))
2368 d.addCallback(self.failUnlessNodeKeysAre, [u"baz.txt"])
2371 def test_POST_mkdir_whendone_field(self):
2372 d = self.POST(self.public_url + "/foo",
2373 t="mkdir", name="newdir", when_done="/THERE")
2374 d.addBoth(self.shouldRedirect, "/THERE")
2375 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
2376 d.addCallback(self.failUnlessNodeKeysAre, [])
2379 def test_POST_mkdir_whendone_queryarg(self):
2380 d = self.POST(self.public_url + "/foo?when_done=/THERE",
2381 t="mkdir", name="newdir")
2382 d.addBoth(self.shouldRedirect, "/THERE")
2383 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
2384 d.addCallback(self.failUnlessNodeKeysAre, [])
2387 def test_POST_bad_t(self):
2388 d = self.shouldFail2(error.Error, "POST_bad_t", "400 Bad Request",
2389 "POST to a directory with bad t=BOGUS",
2390 self.POST, self.public_url + "/foo", t="BOGUS")
2393 def test_POST_set_children(self, command_name="set_children"):
2394 contents9, n9, newuri9 = self.makefile(9)
2395 contents10, n10, newuri10 = self.makefile(10)
2396 contents11, n11, newuri11 = self.makefile(11)
2399 "atomic_added_1": [ "filenode", { "rw_uri": "%s",
2402 "ctime": 1002777696.7564139,
2403 "mtime": 1002777696.7564139
2406 "atomic_added_2": [ "filenode", { "rw_uri": "%s",
2409 "ctime": 1002777696.7564139,
2410 "mtime": 1002777696.7564139
2413 "atomic_added_3": [ "filenode", { "rw_uri": "%s",
2416 "ctime": 1002777696.7564139,
2417 "mtime": 1002777696.7564139
2420 }""" % (newuri9, newuri10, newuri11)
2422 url = self.webish_url + self.public_url + "/foo" + "?t=" + command_name
2424 d = client.getPage(url, method="POST", postdata=reqbody)
2426 self.failUnlessURIMatchesROChild(newuri9, self._foo_node, u"atomic_added_1")
2427 self.failUnlessURIMatchesROChild(newuri10, self._foo_node, u"atomic_added_2")
2428 self.failUnlessURIMatchesROChild(newuri11, self._foo_node, u"atomic_added_3")
2430 d.addCallback(_then)
2431 d.addErrback(self.dump_error)
2434 def test_POST_set_children_with_hyphen(self):
2435 return self.test_POST_set_children(command_name="set-children")
2437 def test_POST_link_uri(self):
2438 contents, n, newuri = self.makefile(8)
2439 d = self.POST(self.public_url + "/foo", t="uri", name="new.txt", uri=newuri)
2440 d.addCallback(self.failUnlessURIMatchesROChild, self._foo_node, u"new.txt")
2441 d.addCallback(lambda res:
2442 self.failUnlessChildContentsAre(self._foo_node, u"new.txt",
2446 def test_POST_link_uri_replace(self):
2447 contents, n, newuri = self.makefile(8)
2448 d = self.POST(self.public_url + "/foo", t="uri", name="bar.txt", uri=newuri)
2449 d.addCallback(self.failUnlessURIMatchesROChild, self._foo_node, u"bar.txt")
2450 d.addCallback(lambda res:
2451 self.failUnlessChildContentsAre(self._foo_node, u"bar.txt",
2455 def test_POST_link_uri_unknown_bad(self):
2456 newuri = "lafs://from_the_future"
2457 d = self.POST(self.public_url + "/foo", t="uri", name="future.txt", uri=newuri)
2458 d.addBoth(self.shouldFail, error.Error,
2459 "POST_link_uri_unknown_bad",
2461 "unknown cap in a write slot")
2464 def test_POST_link_uri_unknown_ro_good(self):
2465 newuri = "ro.lafs://readonly_from_the_future"
2466 d = self.POST(self.public_url + "/foo", t="uri", name="future-ro.txt", uri=newuri)
2467 d.addCallback(self.failUnlessURIMatchesROChild, self._foo_node, u"future-ro.txt")
2470 def test_POST_link_uri_unknown_imm_good(self):
2471 newuri = "imm.lafs://immutable_from_the_future"
2472 d = self.POST(self.public_url + "/foo", t="uri", name="future-imm.txt", uri=newuri)
2473 d.addCallback(self.failUnlessURIMatchesROChild, self._foo_node, u"future-imm.txt")
2476 def test_POST_link_uri_no_replace_queryarg(self):
2477 contents, n, newuri = self.makefile(8)
2478 d = self.POST(self.public_url + "/foo?replace=false", t="uri",
2479 name="bar.txt", uri=newuri)
2480 d.addBoth(self.shouldFail, error.Error,
2481 "POST_link_uri_no_replace_queryarg",
2483 "There was already a child by that name, and you asked me "
2484 "to not replace it")
2485 d.addCallback(lambda res: self.GET(self.public_url + "/foo/bar.txt"))
2486 d.addCallback(self.failUnlessIsBarDotTxt)
2489 def test_POST_link_uri_no_replace_field(self):
2490 contents, n, newuri = self.makefile(8)
2491 d = self.POST(self.public_url + "/foo", t="uri", replace="false",
2492 name="bar.txt", uri=newuri)
2493 d.addBoth(self.shouldFail, error.Error,
2494 "POST_link_uri_no_replace_field",
2496 "There was already a child by that name, and you asked me "
2497 "to not replace it")
2498 d.addCallback(lambda res: self.GET(self.public_url + "/foo/bar.txt"))
2499 d.addCallback(self.failUnlessIsBarDotTxt)
2502 def test_POST_delete(self):
2503 d = self.POST(self.public_url + "/foo", t="delete", name="bar.txt")
2504 d.addCallback(lambda res: self._foo_node.list())
2505 def _check(children):
2506 self.failIf(u"bar.txt" in children)
2507 d.addCallback(_check)
2510 def test_POST_rename_file(self):
2511 d = self.POST(self.public_url + "/foo", t="rename",
2512 from_name="bar.txt", to_name='wibble.txt')
2513 d.addCallback(lambda res:
2514 self.failIfNodeHasChild(self._foo_node, u"bar.txt"))
2515 d.addCallback(lambda res:
2516 self.failUnlessNodeHasChild(self._foo_node, u"wibble.txt"))
2517 d.addCallback(lambda res: self.GET(self.public_url + "/foo/wibble.txt"))
2518 d.addCallback(self.failUnlessIsBarDotTxt)
2519 d.addCallback(lambda res: self.GET(self.public_url + "/foo/wibble.txt?t=json"))
2520 d.addCallback(self.failUnlessIsBarJSON)
2523 def test_POST_rename_file_redundant(self):
2524 d = self.POST(self.public_url + "/foo", t="rename",
2525 from_name="bar.txt", to_name='bar.txt')
2526 d.addCallback(lambda res:
2527 self.failUnlessNodeHasChild(self._foo_node, u"bar.txt"))
2528 d.addCallback(lambda res: self.GET(self.public_url + "/foo/bar.txt"))
2529 d.addCallback(self.failUnlessIsBarDotTxt)
2530 d.addCallback(lambda res: self.GET(self.public_url + "/foo/bar.txt?t=json"))
2531 d.addCallback(self.failUnlessIsBarJSON)
2534 def test_POST_rename_file_replace(self):
2535 # rename a file and replace a directory with it
2536 d = self.POST(self.public_url + "/foo", t="rename",
2537 from_name="bar.txt", to_name='empty')
2538 d.addCallback(lambda res:
2539 self.failIfNodeHasChild(self._foo_node, u"bar.txt"))
2540 d.addCallback(lambda res:
2541 self.failUnlessNodeHasChild(self._foo_node, u"empty"))
2542 d.addCallback(lambda res: self.GET(self.public_url + "/foo/empty"))
2543 d.addCallback(self.failUnlessIsBarDotTxt)
2544 d.addCallback(lambda res: self.GET(self.public_url + "/foo/empty?t=json"))
2545 d.addCallback(self.failUnlessIsBarJSON)
2548 def test_POST_rename_file_no_replace_queryarg(self):
2549 # rename a file and replace a directory with it
2550 d = self.POST(self.public_url + "/foo?replace=false", t="rename",
2551 from_name="bar.txt", to_name='empty')
2552 d.addBoth(self.shouldFail, error.Error,
2553 "POST_rename_file_no_replace_queryarg",
2555 "There was already a child by that name, and you asked me "
2556 "to not replace it")
2557 d.addCallback(lambda res: self.GET(self.public_url + "/foo/empty?t=json"))
2558 d.addCallback(self.failUnlessIsEmptyJSON)
2561 def test_POST_rename_file_no_replace_field(self):
2562 # rename a file and replace a directory with it
2563 d = self.POST(self.public_url + "/foo", t="rename", replace="false",
2564 from_name="bar.txt", to_name='empty')
2565 d.addBoth(self.shouldFail, error.Error,
2566 "POST_rename_file_no_replace_field",
2568 "There was already a child by that name, and you asked me "
2569 "to not replace it")
2570 d.addCallback(lambda res: self.GET(self.public_url + "/foo/empty?t=json"))
2571 d.addCallback(self.failUnlessIsEmptyJSON)
2574 def failUnlessIsEmptyJSON(self, res):
2575 data = simplejson.loads(res)
2576 self.failUnlessEqual(data[0], "dirnode", data)
2577 self.failUnlessEqual(len(data[1]["children"]), 0)
2579 def test_POST_rename_file_slash_fail(self):
2580 d = self.POST(self.public_url + "/foo", t="rename",
2581 from_name="bar.txt", to_name='kirk/spock.txt')
2582 d.addBoth(self.shouldFail, error.Error,
2583 "test_POST_rename_file_slash_fail",
2585 "to_name= may not contain a slash",
2587 d.addCallback(lambda res:
2588 self.failUnlessNodeHasChild(self._foo_node, u"bar.txt"))
2591 def test_POST_rename_dir(self):
2592 d = self.POST(self.public_url, t="rename",
2593 from_name="foo", to_name='plunk')
2594 d.addCallback(lambda res:
2595 self.failIfNodeHasChild(self.public_root, u"foo"))
2596 d.addCallback(lambda res:
2597 self.failUnlessNodeHasChild(self.public_root, u"plunk"))
2598 d.addCallback(lambda res: self.GET(self.public_url + "/plunk?t=json"))
2599 d.addCallback(self.failUnlessIsFooJSON)
2602 def shouldRedirect(self, res, target=None, statuscode=None, which=""):
2603 """ If target is not None then the redirection has to go to target. If
2604 statuscode is not None then the redirection has to be accomplished with
2605 that HTTP status code."""
2606 if not isinstance(res, failure.Failure):
2607 to_where = (target is None) and "somewhere" or ("to " + target)
2608 self.fail("%s: we were expecting to get redirected %s, not get an"
2609 " actual page: %s" % (which, to_where, res))
2610 res.trap(error.PageRedirect)
2611 if statuscode is not None:
2612 self.failUnlessEqual(res.value.status, statuscode,
2613 "%s: not a redirect" % which)
2614 if target is not None:
2615 # the PageRedirect does not seem to capture the uri= query arg
2616 # properly, so we can't check for it.
2617 realtarget = self.webish_url + target
2618 self.failUnlessEqual(res.value.location, realtarget,
2619 "%s: wrong target" % which)
2620 return res.value.location
2622 def test_GET_URI_form(self):
2623 base = "/uri?uri=%s" % self._bar_txt_uri
2624 # this is supposed to give us a redirect to /uri/$URI, plus arguments
2625 targetbase = "/uri/%s" % urllib.quote(self._bar_txt_uri)
2627 d.addBoth(self.shouldRedirect, targetbase)
2628 d.addCallback(lambda res: self.GET(base+"&filename=bar.txt"))
2629 d.addBoth(self.shouldRedirect, targetbase+"?filename=bar.txt")
2630 d.addCallback(lambda res: self.GET(base+"&t=json"))
2631 d.addBoth(self.shouldRedirect, targetbase+"?t=json")
2632 d.addCallback(self.log, "about to get file by uri")
2633 d.addCallback(lambda res: self.GET(base, followRedirect=True))
2634 d.addCallback(self.failUnlessIsBarDotTxt)
2635 d.addCallback(self.log, "got file by uri, about to get dir by uri")
2636 d.addCallback(lambda res: self.GET("/uri?uri=%s&t=json" % self._foo_uri,
2637 followRedirect=True))
2638 d.addCallback(self.failUnlessIsFooJSON)
2639 d.addCallback(self.log, "got dir by uri")
2643 def test_GET_URI_form_bad(self):
2644 d = self.shouldFail2(error.Error, "test_GET_URI_form_bad",
2645 "400 Bad Request", "GET /uri requires uri=",
2649 def test_GET_rename_form(self):
2650 d = self.GET(self.public_url + "/foo?t=rename-form&name=bar.txt",
2651 followRedirect=True)
2653 self.failUnless('name="when_done" value="."' in res, res)
2654 self.failUnless(re.search(r'name="from_name" value="bar\.txt"', res))
2655 d.addCallback(_check)
2658 def log(self, res, msg):
2659 #print "MSG: %s RES: %s" % (msg, res)
2663 def test_GET_URI_URL(self):
2664 base = "/uri/%s" % self._bar_txt_uri
2666 d.addCallback(self.failUnlessIsBarDotTxt)
2667 d.addCallback(lambda res: self.GET(base+"?filename=bar.txt"))
2668 d.addCallback(self.failUnlessIsBarDotTxt)
2669 d.addCallback(lambda res: self.GET(base+"?filename=bar.txt&save=true"))
2670 d.addCallback(self.failUnlessIsBarDotTxt)
2673 def test_GET_URI_URL_dir(self):
2674 base = "/uri/%s?t=json" % self._foo_uri
2676 d.addCallback(self.failUnlessIsFooJSON)
2679 def test_GET_URI_URL_missing(self):
2680 base = "/uri/%s" % self._bad_file_uri
2681 d = self.shouldHTTPError("test_GET_URI_URL_missing",
2682 http.GONE, None, "NotEnoughSharesError",
2684 # TODO: how can we exercise both sides of WebDownloadTarget.fail
2685 # here? we must arrange for a download to fail after target.open()
2686 # has been called, and then inspect the response to see that it is
2687 # shorter than we expected.
2690 def test_PUT_DIRURL_uri(self):
2691 d = self.s.create_dirnode()
2693 new_uri = dn.get_uri()
2694 # replace /foo with a new (empty) directory
2695 d = self.PUT(self.public_url + "/foo?t=uri", new_uri)
2696 d.addCallback(lambda res:
2697 self.failUnlessEqual(res.strip(), new_uri))
2698 d.addCallback(lambda res:
2699 self.failUnlessRWChildURIIs(self.public_root,
2703 d.addCallback(_made_dir)
2706 def test_PUT_DIRURL_uri_noreplace(self):
2707 d = self.s.create_dirnode()
2709 new_uri = dn.get_uri()
2710 # replace /foo with a new (empty) directory, but ask that
2711 # replace=false, so it should fail
2712 d = self.shouldFail2(error.Error, "test_PUT_DIRURL_uri_noreplace",
2713 "409 Conflict", "There was already a child by that name, and you asked me to not replace it",
2715 self.public_url + "/foo?t=uri&replace=false",
2717 d.addCallback(lambda res:
2718 self.failUnlessRWChildURIIs(self.public_root,
2722 d.addCallback(_made_dir)
2725 def test_PUT_DIRURL_bad_t(self):
2726 d = self.shouldFail2(error.Error, "test_PUT_DIRURL_bad_t",
2727 "400 Bad Request", "PUT to a directory",
2728 self.PUT, self.public_url + "/foo?t=BOGUS", "")
2729 d.addCallback(lambda res:
2730 self.failUnlessRWChildURIIs(self.public_root,
2735 def test_PUT_NEWFILEURL_uri(self):
2736 contents, n, new_uri = self.makefile(8)
2737 d = self.PUT(self.public_url + "/foo/new.txt?t=uri", new_uri)
2738 d.addCallback(lambda res: self.failUnlessEqual(res.strip(), new_uri))
2739 d.addCallback(lambda res:
2740 self.failUnlessChildContentsAre(self._foo_node, u"new.txt",
2744 def test_PUT_NEWFILEURL_uri_replace(self):
2745 contents, n, new_uri = self.makefile(8)
2746 d = self.PUT(self.public_url + "/foo/bar.txt?t=uri", new_uri)
2747 d.addCallback(lambda res: self.failUnlessEqual(res.strip(), new_uri))
2748 d.addCallback(lambda res:
2749 self.failUnlessChildContentsAre(self._foo_node, u"bar.txt",
2753 def test_PUT_NEWFILEURL_uri_no_replace(self):
2754 contents, n, new_uri = self.makefile(8)
2755 d = self.PUT(self.public_url + "/foo/bar.txt?t=uri&replace=false", new_uri)
2756 d.addBoth(self.shouldFail, error.Error, "PUT_NEWFILEURL_uri_no_replace",
2758 "There was already a child by that name, and you asked me "
2759 "to not replace it")
2762 def test_PUT_NEWFILEURL_uri_unknown_bad(self):
2763 new_uri = "lafs://from_the_future"
2764 d = self.PUT(self.public_url + "/foo/put-future.txt?t=uri", new_uri)
2765 d.addBoth(self.shouldFail, error.Error,
2766 "POST_put_uri_unknown_bad",
2768 "unknown cap in a write slot")
2771 def test_PUT_NEWFILEURL_uri_unknown_ro_good(self):
2772 new_uri = "ro.lafs://readonly_from_the_future"
2773 d = self.PUT(self.public_url + "/foo/put-future-ro.txt?t=uri", new_uri)
2774 d.addCallback(self.failUnlessURIMatchesROChild, self._foo_node,
2775 u"put-future-ro.txt")
2778 def test_PUT_NEWFILEURL_uri_unknown_imm_good(self):
2779 new_uri = "imm.lafs://immutable_from_the_future"
2780 d = self.PUT(self.public_url + "/foo/put-future-imm.txt?t=uri", new_uri)
2781 d.addCallback(self.failUnlessURIMatchesROChild, self._foo_node,
2782 u"put-future-imm.txt")
2785 def test_PUT_NEWFILE_URI(self):
2786 file_contents = "New file contents here\n"
2787 d = self.PUT("/uri", file_contents)
2789 assert isinstance(uri, str), uri
2790 self.failUnless(uri in FakeCHKFileNode.all_contents)
2791 self.failUnlessEqual(FakeCHKFileNode.all_contents[uri],
2793 return self.GET("/uri/%s" % uri)
2794 d.addCallback(_check)
2796 self.failUnlessEqual(res, file_contents)
2797 d.addCallback(_check2)
2800 def test_PUT_NEWFILE_URI_not_mutable(self):
2801 file_contents = "New file contents here\n"
2802 d = self.PUT("/uri?mutable=false", file_contents)
2804 assert isinstance(uri, str), uri
2805 self.failUnless(uri in FakeCHKFileNode.all_contents)
2806 self.failUnlessEqual(FakeCHKFileNode.all_contents[uri],
2808 return self.GET("/uri/%s" % uri)
2809 d.addCallback(_check)
2811 self.failUnlessEqual(res, file_contents)
2812 d.addCallback(_check2)
2815 def test_PUT_NEWFILE_URI_only_PUT(self):
2816 d = self.PUT("/uri?t=bogus", "")
2817 d.addBoth(self.shouldFail, error.Error,
2818 "PUT_NEWFILE_URI_only_PUT",
2820 "/uri accepts only PUT, PUT?t=mkdir, POST?t=upload, and POST?t=mkdir")
2823 def test_PUT_NEWFILE_URI_mutable(self):
2824 file_contents = "New file contents here\n"
2825 d = self.PUT("/uri?mutable=true", file_contents)
2826 def _check1(filecap):
2827 filecap = filecap.strip()
2828 self.failUnless(filecap.startswith("URI:SSK:"), filecap)
2829 self.filecap = filecap
2830 u = uri.WriteableSSKFileURI.init_from_string(filecap)
2831 self.failUnless(u.get_storage_index() in FakeMutableFileNode.all_contents)
2832 n = self.s.create_node_from_uri(filecap)
2833 return n.download_best_version()
2834 d.addCallback(_check1)
2836 self.failUnlessEqual(data, file_contents)
2837 return self.GET("/uri/%s" % urllib.quote(self.filecap))
2838 d.addCallback(_check2)
2840 self.failUnlessEqual(res, file_contents)
2841 d.addCallback(_check3)
2844 def test_PUT_mkdir(self):
2845 d = self.PUT("/uri?t=mkdir", "")
2847 n = self.s.create_node_from_uri(uri.strip())
2848 d2 = self.failUnlessNodeKeysAre(n, [])
2849 d2.addCallback(lambda res:
2850 self.GET("/uri/%s?t=json" % uri))
2852 d.addCallback(_check)
2853 d.addCallback(self.failUnlessIsEmptyJSON)
2856 def test_POST_check(self):
2857 d = self.POST(self.public_url + "/foo", t="check", name="bar.txt")
2859 # this returns a string form of the results, which are probably
2860 # None since we're using fake filenodes.
2861 # TODO: verify that the check actually happened, by changing
2862 # FakeCHKFileNode to count how many times .check() has been
2865 d.addCallback(_done)
2868 def test_bad_method(self):
2869 url = self.webish_url + self.public_url + "/foo/bar.txt"
2870 d = self.shouldHTTPError("test_bad_method",
2871 501, "Not Implemented",
2872 "I don't know how to treat a BOGUS request.",
2873 client.getPage, url, method="BOGUS")
2876 def test_short_url(self):
2877 url = self.webish_url + "/uri"
2878 d = self.shouldHTTPError("test_short_url", 501, "Not Implemented",
2879 "I don't know how to treat a DELETE request.",
2880 client.getPage, url, method="DELETE")
2883 def test_ophandle_bad(self):
2884 url = self.webish_url + "/operations/bogus?t=status"
2885 d = self.shouldHTTPError("test_ophandle_bad", 404, "404 Not Found",
2886 "unknown/expired handle 'bogus'",
2887 client.getPage, url)
2890 def test_ophandle_cancel(self):
2891 d = self.POST(self.public_url + "/foo/?t=start-manifest&ophandle=128",
2892 followRedirect=True)
2893 d.addCallback(lambda ignored:
2894 self.GET("/operations/128?t=status&output=JSON"))
2896 data = simplejson.loads(res)
2897 self.failUnless("finished" in data, res)
2898 monitor = self.ws.root.child_operations.handles["128"][0]
2899 d = self.POST("/operations/128?t=cancel&output=JSON")
2901 data = simplejson.loads(res)
2902 self.failUnless("finished" in data, res)
2903 # t=cancel causes the handle to be forgotten
2904 self.failUnless(monitor.is_cancelled())
2905 d.addCallback(_check2)
2907 d.addCallback(_check1)
2908 d.addCallback(lambda ignored:
2909 self.shouldHTTPError("test_ophandle_cancel",
2910 404, "404 Not Found",
2911 "unknown/expired handle '128'",
2913 "/operations/128?t=status&output=JSON"))
2916 def test_ophandle_retainfor(self):
2917 d = self.POST(self.public_url + "/foo/?t=start-manifest&ophandle=129&retain-for=60",
2918 followRedirect=True)
2919 d.addCallback(lambda ignored:
2920 self.GET("/operations/129?t=status&output=JSON&retain-for=0"))
2922 data = simplejson.loads(res)
2923 self.failUnless("finished" in data, res)
2924 d.addCallback(_check1)
2925 # the retain-for=0 will cause the handle to be expired very soon
2926 d.addCallback(lambda ign:
2927 self.clock.advance(2.0))
2928 d.addCallback(lambda ignored:
2929 self.shouldHTTPError("test_ophandle_retainfor",
2930 404, "404 Not Found",
2931 "unknown/expired handle '129'",
2933 "/operations/129?t=status&output=JSON"))
2936 def test_ophandle_release_after_complete(self):
2937 d = self.POST(self.public_url + "/foo/?t=start-manifest&ophandle=130",
2938 followRedirect=True)
2939 d.addCallback(self.wait_for_operation, "130")
2940 d.addCallback(lambda ignored:
2941 self.GET("/operations/130?t=status&output=JSON&release-after-complete=true"))
2942 # the release-after-complete=true will cause the handle to be expired
2943 d.addCallback(lambda ignored:
2944 self.shouldHTTPError("test_ophandle_release_after_complete",
2945 404, "404 Not Found",
2946 "unknown/expired handle '130'",
2948 "/operations/130?t=status&output=JSON"))
2951 def test_uncollected_ophandle_expiration(self):
2952 # uncollected ophandles should expire after 4 days
2953 def _make_uncollected_ophandle(ophandle):
2954 d = self.POST(self.public_url +
2955 "/foo/?t=start-manifest&ophandle=%d" % ophandle,
2956 followRedirect=False)
2957 # When we start the operation, the webapi server will want
2958 # to redirect us to the page for the ophandle, so we get
2959 # confirmation that the operation has started. If the
2960 # manifest operation has finished by the time we get there,
2961 # following that redirect (by setting followRedirect=True
2962 # above) has the side effect of collecting the ophandle that
2963 # we've just created, which means that we can't use the
2964 # ophandle to test the uncollected timeout anymore. So,
2965 # instead, catch the 302 here and don't follow it.
2966 d.addBoth(self.should302, "uncollected_ophandle_creation")
2968 # Create an ophandle, don't collect it, then advance the clock by
2969 # 4 days - 1 second and make sure that the ophandle is still there.
2970 d = _make_uncollected_ophandle(131)
2971 d.addCallback(lambda ign:
2972 self.clock.advance((96*60*60) - 1)) # 96 hours = 4 days
2973 d.addCallback(lambda ign:
2974 self.GET("/operations/131?t=status&output=JSON"))
2976 data = simplejson.loads(res)
2977 self.failUnless("finished" in data, res)
2978 d.addCallback(_check1)
2979 # Create an ophandle, don't collect it, then try to collect it
2980 # after 4 days. It should be gone.
2981 d.addCallback(lambda ign:
2982 _make_uncollected_ophandle(132))
2983 d.addCallback(lambda ign:
2984 self.clock.advance(96*60*60))
2985 d.addCallback(lambda ign:
2986 self.shouldHTTPError("test_uncollected_ophandle_expired_after_100_hours",
2987 404, "404 Not Found",
2988 "unknown/expired handle '132'",
2990 "/operations/132?t=status&output=JSON"))
2993 def test_collected_ophandle_expiration(self):
2994 # collected ophandles should expire after 1 day
2995 def _make_collected_ophandle(ophandle):
2996 d = self.POST(self.public_url +
2997 "/foo/?t=start-manifest&ophandle=%d" % ophandle,
2998 followRedirect=True)
2999 # By following the initial redirect, we collect the ophandle
3000 # we've just created.
3002 # Create a collected ophandle, then collect it after 23 hours
3003 # and 59 seconds to make sure that it is still there.
3004 d = _make_collected_ophandle(133)
3005 d.addCallback(lambda ign:
3006 self.clock.advance((24*60*60) - 1))
3007 d.addCallback(lambda ign:
3008 self.GET("/operations/133?t=status&output=JSON"))
3010 data = simplejson.loads(res)
3011 self.failUnless("finished" in data, res)
3012 d.addCallback(_check1)
3013 # Create another uncollected ophandle, then try to collect it
3014 # after 24 hours to make sure that it is gone.
3015 d.addCallback(lambda ign:
3016 _make_collected_ophandle(134))
3017 d.addCallback(lambda ign:
3018 self.clock.advance(24*60*60))
3019 d.addCallback(lambda ign:
3020 self.shouldHTTPError("test_collected_ophandle_expired_after_1000_minutes",
3021 404, "404 Not Found",
3022 "unknown/expired handle '134'",
3024 "/operations/134?t=status&output=JSON"))
3027 def test_incident(self):
3028 d = self.POST("/report_incident", details="eek")
3030 self.failUnless("Thank you for your report!" in res, res)
3031 d.addCallback(_done)
3034 def test_static(self):
3035 webdir = os.path.join(self.staticdir, "subdir")
3036 fileutil.make_dirs(webdir)
3037 f = open(os.path.join(webdir, "hello.txt"), "wb")
3041 d = self.GET("/static/subdir/hello.txt")
3043 self.failUnlessEqual(res, "hello")
3044 d.addCallback(_check)
3048 class Util(unittest.TestCase, ShouldFailMixin):
3049 def test_parse_replace_arg(self):
3050 self.failUnlessEqual(common.parse_replace_arg("true"), True)
3051 self.failUnlessEqual(common.parse_replace_arg("false"), False)
3052 self.failUnlessEqual(common.parse_replace_arg("only-files"),
3054 self.shouldFail(AssertionError, "test_parse_replace_arg", "",
3055 common.parse_replace_arg, "only_fles")
3057 def test_abbreviate_time(self):
3058 self.failUnlessEqual(common.abbreviate_time(None), "")
3059 self.failUnlessEqual(common.abbreviate_time(1.234), "1.23s")
3060 self.failUnlessEqual(common.abbreviate_time(0.123), "123ms")
3061 self.failUnlessEqual(common.abbreviate_time(0.00123), "1.2ms")
3062 self.failUnlessEqual(common.abbreviate_time(0.000123), "123us")
3064 def test_abbreviate_rate(self):
3065 self.failUnlessEqual(common.abbreviate_rate(None), "")
3066 self.failUnlessEqual(common.abbreviate_rate(1234000), "1.23MBps")
3067 self.failUnlessEqual(common.abbreviate_rate(12340), "12.3kBps")
3068 self.failUnlessEqual(common.abbreviate_rate(123), "123Bps")
3070 def test_abbreviate_size(self):
3071 self.failUnlessEqual(common.abbreviate_size(None), "")
3072 self.failUnlessEqual(common.abbreviate_size(1.23*1000*1000*1000), "1.23GB")
3073 self.failUnlessEqual(common.abbreviate_size(1.23*1000*1000), "1.23MB")
3074 self.failUnlessEqual(common.abbreviate_size(1230), "1.2kB")
3075 self.failUnlessEqual(common.abbreviate_size(123), "123B")
3077 def test_plural(self):
3079 return "%d second%s" % (s, status.plural(s))
3080 self.failUnlessEqual(convert(0), "0 seconds")
3081 self.failUnlessEqual(convert(1), "1 second")
3082 self.failUnlessEqual(convert(2), "2 seconds")
3084 return "has share%s: %s" % (status.plural(s), ",".join(s))
3085 self.failUnlessEqual(convert2([]), "has shares: ")
3086 self.failUnlessEqual(convert2(["1"]), "has share: 1")
3087 self.failUnlessEqual(convert2(["1","2"]), "has shares: 1,2")
3090 class Grid(GridTestMixin, WebErrorMixin, unittest.TestCase, ShouldFailMixin):
3092 def CHECK(self, ign, which, args, clientnum=0):
3093 fileurl = self.fileurls[which]
3094 url = fileurl + "?" + args
3095 return self.GET(url, method="POST", clientnum=clientnum)
3097 def test_filecheck(self):
3098 self.basedir = "web/Grid/filecheck"
3100 c0 = self.g.clients[0]
3103 d = c0.upload(upload.Data(DATA, convergence=""))
3104 def _stash_uri(ur, which):
3105 self.uris[which] = ur.uri
3106 d.addCallback(_stash_uri, "good")
3107 d.addCallback(lambda ign:
3108 c0.upload(upload.Data(DATA+"1", convergence="")))
3109 d.addCallback(_stash_uri, "sick")
3110 d.addCallback(lambda ign:
3111 c0.upload(upload.Data(DATA+"2", convergence="")))
3112 d.addCallback(_stash_uri, "dead")
3113 def _stash_mutable_uri(n, which):
3114 self.uris[which] = n.get_uri()
3115 assert isinstance(self.uris[which], str)
3116 d.addCallback(lambda ign: c0.create_mutable_file(DATA+"3"))
3117 d.addCallback(_stash_mutable_uri, "corrupt")
3118 d.addCallback(lambda ign:
3119 c0.upload(upload.Data("literal", convergence="")))
3120 d.addCallback(_stash_uri, "small")
3121 d.addCallback(lambda ign: c0.create_immutable_dirnode({}))
3122 d.addCallback(_stash_mutable_uri, "smalldir")
3124 def _compute_fileurls(ignored):
3126 for which in self.uris:
3127 self.fileurls[which] = "uri/" + urllib.quote(self.uris[which])
3128 d.addCallback(_compute_fileurls)
3130 def _clobber_shares(ignored):
3131 good_shares = self.find_shares(self.uris["good"])
3132 self.failUnlessEqual(len(good_shares), 10)
3133 sick_shares = self.find_shares(self.uris["sick"])
3134 os.unlink(sick_shares[0][2])
3135 dead_shares = self.find_shares(self.uris["dead"])
3136 for i in range(1, 10):
3137 os.unlink(dead_shares[i][2])
3138 c_shares = self.find_shares(self.uris["corrupt"])
3139 cso = CorruptShareOptions()
3140 cso.stdout = StringIO()
3141 cso.parseOptions([c_shares[0][2]])
3143 d.addCallback(_clobber_shares)
3145 d.addCallback(self.CHECK, "good", "t=check")
3146 def _got_html_good(res):
3147 self.failUnless("Healthy" in res, res)
3148 self.failIf("Not Healthy" in res, res)
3149 d.addCallback(_got_html_good)
3150 d.addCallback(self.CHECK, "good", "t=check&return_to=somewhere")
3151 def _got_html_good_return_to(res):
3152 self.failUnless("Healthy" in res, res)
3153 self.failIf("Not Healthy" in res, res)
3154 self.failUnless('<a href="somewhere">Return to file'
3156 d.addCallback(_got_html_good_return_to)
3157 d.addCallback(self.CHECK, "good", "t=check&output=json")
3158 def _got_json_good(res):
3159 r = simplejson.loads(res)
3160 self.failUnlessEqual(r["summary"], "Healthy")
3161 self.failUnless(r["results"]["healthy"])
3162 self.failIf(r["results"]["needs-rebalancing"])
3163 self.failUnless(r["results"]["recoverable"])
3164 d.addCallback(_got_json_good)
3166 d.addCallback(self.CHECK, "small", "t=check")
3167 def _got_html_small(res):
3168 self.failUnless("Literal files are always healthy" in res, res)
3169 self.failIf("Not Healthy" in res, res)
3170 d.addCallback(_got_html_small)
3171 d.addCallback(self.CHECK, "small", "t=check&return_to=somewhere")
3172 def _got_html_small_return_to(res):
3173 self.failUnless("Literal files are always healthy" in res, res)
3174 self.failIf("Not Healthy" in res, res)
3175 self.failUnless('<a href="somewhere">Return to file'
3177 d.addCallback(_got_html_small_return_to)
3178 d.addCallback(self.CHECK, "small", "t=check&output=json")
3179 def _got_json_small(res):
3180 r = simplejson.loads(res)
3181 self.failUnlessEqual(r["storage-index"], "")
3182 self.failUnless(r["results"]["healthy"])
3183 d.addCallback(_got_json_small)
3185 d.addCallback(self.CHECK, "smalldir", "t=check")
3186 def _got_html_smalldir(res):
3187 self.failUnless("Literal files are always healthy" in res, res)
3188 self.failIf("Not Healthy" in res, res)
3189 d.addCallback(_got_html_smalldir)
3190 d.addCallback(self.CHECK, "smalldir", "t=check&output=json")
3191 def _got_json_smalldir(res):
3192 r = simplejson.loads(res)
3193 self.failUnlessEqual(r["storage-index"], "")
3194 self.failUnless(r["results"]["healthy"])
3195 d.addCallback(_got_json_smalldir)
3197 d.addCallback(self.CHECK, "sick", "t=check")
3198 def _got_html_sick(res):
3199 self.failUnless("Not Healthy" in res, res)
3200 d.addCallback(_got_html_sick)
3201 d.addCallback(self.CHECK, "sick", "t=check&output=json")
3202 def _got_json_sick(res):
3203 r = simplejson.loads(res)
3204 self.failUnlessEqual(r["summary"],
3205 "Not Healthy: 9 shares (enc 3-of-10)")
3206 self.failIf(r["results"]["healthy"])
3207 self.failIf(r["results"]["needs-rebalancing"])
3208 self.failUnless(r["results"]["recoverable"])
3209 d.addCallback(_got_json_sick)
3211 d.addCallback(self.CHECK, "dead", "t=check")
3212 def _got_html_dead(res):
3213 self.failUnless("Not Healthy" in res, res)
3214 d.addCallback(_got_html_dead)
3215 d.addCallback(self.CHECK, "dead", "t=check&output=json")
3216 def _got_json_dead(res):
3217 r = simplejson.loads(res)
3218 self.failUnlessEqual(r["summary"],
3219 "Not Healthy: 1 shares (enc 3-of-10)")
3220 self.failIf(r["results"]["healthy"])
3221 self.failIf(r["results"]["needs-rebalancing"])
3222 self.failIf(r["results"]["recoverable"])
3223 d.addCallback(_got_json_dead)
3225 d.addCallback(self.CHECK, "corrupt", "t=check&verify=true")
3226 def _got_html_corrupt(res):
3227 self.failUnless("Not Healthy! : Unhealthy" in res, res)
3228 d.addCallback(_got_html_corrupt)
3229 d.addCallback(self.CHECK, "corrupt", "t=check&verify=true&output=json")
3230 def _got_json_corrupt(res):
3231 r = simplejson.loads(res)
3232 self.failUnless("Unhealthy: 9 shares (enc 3-of-10)" in r["summary"],
3234 self.failIf(r["results"]["healthy"])
3235 self.failUnless(r["results"]["recoverable"])
3236 self.failUnlessEqual(r["results"]["count-shares-good"], 9)
3237 self.failUnlessEqual(r["results"]["count-corrupt-shares"], 1)
3238 d.addCallback(_got_json_corrupt)
3240 d.addErrback(self.explain_web_error)
3243 def test_repair_html(self):
3244 self.basedir = "web/Grid/repair_html"
3246 c0 = self.g.clients[0]
3249 d = c0.upload(upload.Data(DATA, convergence=""))
3250 def _stash_uri(ur, which):
3251 self.uris[which] = ur.uri
3252 d.addCallback(_stash_uri, "good")
3253 d.addCallback(lambda ign:
3254 c0.upload(upload.Data(DATA+"1", convergence="")))
3255 d.addCallback(_stash_uri, "sick")
3256 d.addCallback(lambda ign:
3257 c0.upload(upload.Data(DATA+"2", convergence="")))
3258 d.addCallback(_stash_uri, "dead")
3259 def _stash_mutable_uri(n, which):
3260 self.uris[which] = n.get_uri()
3261 assert isinstance(self.uris[which], str)
3262 d.addCallback(lambda ign: c0.create_mutable_file(DATA+"3"))
3263 d.addCallback(_stash_mutable_uri, "corrupt")
3265 def _compute_fileurls(ignored):
3267 for which in self.uris:
3268 self.fileurls[which] = "uri/" + urllib.quote(self.uris[which])
3269 d.addCallback(_compute_fileurls)
3271 def _clobber_shares(ignored):
3272 good_shares = self.find_shares(self.uris["good"])
3273 self.failUnlessEqual(len(good_shares), 10)
3274 sick_shares = self.find_shares(self.uris["sick"])
3275 os.unlink(sick_shares[0][2])
3276 dead_shares = self.find_shares(self.uris["dead"])
3277 for i in range(1, 10):
3278 os.unlink(dead_shares[i][2])
3279 c_shares = self.find_shares(self.uris["corrupt"])
3280 cso = CorruptShareOptions()
3281 cso.stdout = StringIO()
3282 cso.parseOptions([c_shares[0][2]])
3284 d.addCallback(_clobber_shares)
3286 d.addCallback(self.CHECK, "good", "t=check&repair=true")
3287 def _got_html_good(res):
3288 self.failUnless("Healthy" in res, res)
3289 self.failIf("Not Healthy" in res, res)
3290 self.failUnless("No repair necessary" in res, res)
3291 d.addCallback(_got_html_good)
3293 d.addCallback(self.CHECK, "sick", "t=check&repair=true")
3294 def _got_html_sick(res):
3295 self.failUnless("Healthy : healthy" in res, res)
3296 self.failIf("Not Healthy" in res, res)
3297 self.failUnless("Repair successful" in res, res)
3298 d.addCallback(_got_html_sick)
3300 # repair of a dead file will fail, of course, but it isn't yet
3301 # clear how this should be reported. Right now it shows up as
3304 #d.addCallback(self.CHECK, "dead", "t=check&repair=true")
3305 #def _got_html_dead(res):
3307 # self.failUnless("Healthy : healthy" in res, res)
3308 # self.failIf("Not Healthy" in res, res)
3309 # self.failUnless("No repair necessary" in res, res)
3310 #d.addCallback(_got_html_dead)
3312 d.addCallback(self.CHECK, "corrupt", "t=check&verify=true&repair=true")
3313 def _got_html_corrupt(res):
3314 self.failUnless("Healthy : Healthy" in res, res)
3315 self.failIf("Not Healthy" in res, res)
3316 self.failUnless("Repair successful" in res, res)
3317 d.addCallback(_got_html_corrupt)
3319 d.addErrback(self.explain_web_error)
3322 def test_repair_json(self):
3323 self.basedir = "web/Grid/repair_json"
3325 c0 = self.g.clients[0]
3328 d = c0.upload(upload.Data(DATA+"1", convergence=""))
3329 def _stash_uri(ur, which):
3330 self.uris[which] = ur.uri
3331 d.addCallback(_stash_uri, "sick")
3333 def _compute_fileurls(ignored):
3335 for which in self.uris:
3336 self.fileurls[which] = "uri/" + urllib.quote(self.uris[which])
3337 d.addCallback(_compute_fileurls)
3339 def _clobber_shares(ignored):
3340 sick_shares = self.find_shares(self.uris["sick"])
3341 os.unlink(sick_shares[0][2])
3342 d.addCallback(_clobber_shares)
3344 d.addCallback(self.CHECK, "sick", "t=check&repair=true&output=json")
3345 def _got_json_sick(res):
3346 r = simplejson.loads(res)
3347 self.failUnlessEqual(r["repair-attempted"], True)
3348 self.failUnlessEqual(r["repair-successful"], True)
3349 self.failUnlessEqual(r["pre-repair-results"]["summary"],
3350 "Not Healthy: 9 shares (enc 3-of-10)")
3351 self.failIf(r["pre-repair-results"]["results"]["healthy"])
3352 self.failUnlessEqual(r["post-repair-results"]["summary"], "healthy")
3353 self.failUnless(r["post-repair-results"]["results"]["healthy"])
3354 d.addCallback(_got_json_sick)
3356 d.addErrback(self.explain_web_error)
3359 def test_unknown(self, immutable=False):
3360 self.basedir = "web/Grid/unknown"
3362 self.basedir = "web/Grid/unknown-immutable"
3365 c0 = self.g.clients[0]
3369 future_write_uri = "x-tahoe-crazy://I_am_from_the_future."
3370 future_read_uri = "x-tahoe-crazy-readonly://I_am_from_the_future."
3371 # the future cap format may contain slashes, which must be tolerated
3372 expected_info_url = "uri/%s?t=info" % urllib.quote(future_write_uri,
3376 name = u"future-imm"
3377 future_node = UnknownNode(None, future_read_uri, deep_immutable=True)
3378 d = c0.create_immutable_dirnode({name: (future_node, {})})
3381 future_node = UnknownNode(future_write_uri, future_read_uri)
3382 d = c0.create_dirnode()
3384 def _stash_root_and_create_file(n):
3386 self.rooturl = "uri/" + urllib.quote(n.get_uri()) + "/"
3387 self.rourl = "uri/" + urllib.quote(n.get_readonly_uri()) + "/"
3389 return self.rootnode.set_node(name, future_node)
3390 d.addCallback(_stash_root_and_create_file)
3392 # make sure directory listing tolerates unknown nodes
3393 d.addCallback(lambda ign: self.GET(self.rooturl))
3394 def _check_directory_html(res, expected_type_suffix):
3395 pattern = re.compile(r'<td>\?%s</td>[ \t\n\r]*'
3396 '<td>%s</td>' % (expected_type_suffix, str(name)),
3398 self.failUnless(re.search(pattern, res), res)
3399 # find the More Info link for name, should be relative
3400 mo = re.search(r'<a href="([^"]+)">More Info</a>', res)
3401 info_url = mo.group(1)
3402 self.failUnlessEqual(info_url, "%s?t=info" % (str(name),))
3404 d.addCallback(_check_directory_html, "-IMM")
3406 d.addCallback(_check_directory_html, "")
3408 d.addCallback(lambda ign: self.GET(self.rooturl+"?t=json"))
3409 def _check_directory_json(res, expect_rw_uri):
3410 data = simplejson.loads(res)
3411 self.failUnlessEqual(data[0], "dirnode")
3412 f = data[1]["children"][name]
3413 self.failUnlessEqual(f[0], "unknown")
3415 self.failUnlessEqual(f[1]["rw_uri"], future_write_uri)
3417 self.failIfIn("rw_uri", f[1])
3419 self.failUnlessEqual(f[1]["ro_uri"], "imm." + future_read_uri)
3421 self.failUnlessEqual(f[1]["ro_uri"], "ro." + future_read_uri)
3422 self.failUnless("metadata" in f[1])
3423 d.addCallback(_check_directory_json, expect_rw_uri=not immutable)
3425 def _check_info(res, expect_rw_uri, expect_ro_uri):
3426 self.failUnlessIn("Object Type: <span>unknown</span>", res)
3428 self.failUnlessIn(future_write_uri, res)
3430 self.failUnlessIn(future_read_uri, res)
3432 self.failIfIn(future_read_uri, res)
3433 self.failIfIn("Raw data as", res)
3434 self.failIfIn("Directory writecap", res)
3435 self.failIfIn("Checker Operations", res)
3436 self.failIfIn("Mutable File Operations", res)
3437 self.failIfIn("Directory Operations", res)
3439 # FIXME: these should have expect_rw_uri=not immutable; I don't know
3440 # why they fail. Possibly related to ticket #922.
3442 d.addCallback(lambda ign: self.GET(expected_info_url))
3443 d.addCallback(_check_info, expect_rw_uri=False, expect_ro_uri=False)
3444 d.addCallback(lambda ign: self.GET("%s%s?t=info" % (self.rooturl, str(name))))
3445 d.addCallback(_check_info, expect_rw_uri=False, expect_ro_uri=True)
3447 def _check_json(res, expect_rw_uri):
3448 data = simplejson.loads(res)
3449 self.failUnlessEqual(data[0], "unknown")
3451 self.failUnlessEqual(data[1]["rw_uri"], future_write_uri)
3453 self.failIfIn("rw_uri", data[1])
3456 self.failUnlessEqual(data[1]["ro_uri"], "imm." + future_read_uri)
3457 self.failUnlessEqual(data[1]["mutable"], False)
3459 self.failUnlessEqual(data[1]["ro_uri"], "ro." + future_read_uri)
3460 self.failUnlessEqual(data[1]["mutable"], True)
3462 self.failUnlessEqual(data[1]["ro_uri"], "ro." + future_read_uri)
3463 self.failIf("mutable" in data[1], data[1])
3465 # TODO: check metadata contents
3466 self.failUnless("metadata" in data[1])
3468 d.addCallback(lambda ign: self.GET("%s%s?t=json" % (self.rooturl, str(name))))
3469 d.addCallback(_check_json, expect_rw_uri=not immutable)
3471 # and make sure that a read-only version of the directory can be
3472 # rendered too. This version will not have future_write_uri, whether
3473 # or not future_node was immutable.
3474 d.addCallback(lambda ign: self.GET(self.rourl))
3476 d.addCallback(_check_directory_html, "-IMM")
3478 d.addCallback(_check_directory_html, "-RO")
3480 d.addCallback(lambda ign: self.GET(self.rourl+"?t=json"))
3481 d.addCallback(_check_directory_json, expect_rw_uri=False)
3483 d.addCallback(lambda ign: self.GET("%s%s?t=json" % (self.rourl, str(name))))
3484 d.addCallback(_check_json, expect_rw_uri=False)
3486 # TODO: check that getting t=info from the Info link in the ro directory
3487 # works, and does not include the writecap URI.
3490 def test_immutable_unknown(self):
3491 return self.test_unknown(immutable=True)
3493 def test_mutant_dirnodes_are_omitted(self):
3494 self.basedir = "web/Grid/mutant_dirnodes_are_omitted"
3497 c = self.g.clients[0]
3502 lonely_uri = "URI:LIT:n5xgk" # LIT for "one"
3503 mut_write_uri = "URI:SSK:vfvcbdfbszyrsaxchgevhmmlii:euw4iw7bbnkrrwpzuburbhppuxhc3gwxv26f6imekhz7zyw2ojnq"
3504 mut_read_uri = "URI:SSK-RO:e3mdrzfwhoq42hy5ubcz6rp3o4:ybyibhnp3vvwuq2vaw2ckjmesgkklfs6ghxleztqidihjyofgw7q"
3506 # This method tests mainly dirnode, but we'd have to duplicate code in order to
3507 # test the dirnode and web layers separately.
3509 # 'lonely' is a valid LIT child, 'ro' is a mutant child with an SSK-RO readcap,
3510 # and 'write-in-ro' is a mutant child with an SSK writecap in the ro_uri field.
3511 # When the directory is read, the mutants should be silently disposed of, leaving
3512 # their lonely sibling.
3513 # We don't test the case of a retrieving a cap from the encrypted rw_uri field,
3514 # because immutable directories don't have a writecap and therefore that field
3515 # isn't (and can't be) decrypted.
3516 # TODO: The field still exists in the netstring. Technically we should check what
3517 # happens if something is put there (_unpack_contents should raise ValueError),
3518 # but that can wait.
3520 lonely_child = nm.create_from_cap(lonely_uri)
3521 mutant_ro_child = nm.create_from_cap(mut_read_uri)
3522 mutant_write_in_ro_child = nm.create_from_cap(mut_write_uri)
3524 def _by_hook_or_by_crook():
3526 for n in [mutant_ro_child, mutant_write_in_ro_child]:
3527 n.is_allowed_in_immutable_directory = _by_hook_or_by_crook
3529 mutant_write_in_ro_child.get_write_uri = lambda: None
3530 mutant_write_in_ro_child.get_readonly_uri = lambda: mut_write_uri
3532 kids = {u"lonely": (lonely_child, {}),
3533 u"ro": (mutant_ro_child, {}),
3534 u"write-in-ro": (mutant_write_in_ro_child, {}),
3536 d = c.create_immutable_dirnode(kids)
3539 self.failUnless(isinstance(dn, dirnode.DirectoryNode))
3540 self.failIf(dn.is_mutable())
3541 self.failUnless(dn.is_readonly())
3542 # This checks that if we somehow ended up calling dn._decrypt_rwcapdata, it would fail.
3543 self.failIf(hasattr(dn._node, 'get_writekey'))
3545 self.failUnless("RO-IMM" in rep)
3547 self.failUnlessIn("CHK", cap.to_string())
3550 self.rooturl = "uri/" + urllib.quote(dn.get_uri()) + "/"
3551 return download_to_data(dn._node)
3552 d.addCallback(_created)
3554 def _check_data(data):
3555 # Decode the netstring representation of the directory to check that all children
3556 # are present. This is a bit of an abstraction violation, but there's not really
3557 # any other way to do it given that the real DirectoryNode._unpack_contents would
3558 # strip the mutant children out (which is what we're trying to test, later).
3561 while position < len(data):
3562 entries, position = split_netstring(data, 1, position)
3564 (name_utf8, ro_uri, rwcapdata, metadata_s), subpos = split_netstring(entry, 4)
3565 name = name_utf8.decode("utf-8")
3566 self.failUnless(rwcapdata == "")
3567 self.failUnless(name in kids)
3568 (expected_child, ign) = kids[name]
3569 self.failUnlessEqual(ro_uri, expected_child.get_readonly_uri())
3572 self.failUnlessEqual(numkids, 3)
3573 return self.rootnode.list()
3574 d.addCallback(_check_data)
3576 # Now when we use the real directory listing code, the mutants should be absent.
3577 def _check_kids(children):
3578 self.failUnlessEqual(sorted(children.keys()), [u"lonely"])
3579 lonely_node, lonely_metadata = children[u"lonely"]
3581 self.failUnlessEqual(lonely_node.get_write_uri(), None)
3582 self.failUnlessEqual(lonely_node.get_readonly_uri(), lonely_uri)
3583 d.addCallback(_check_kids)
3585 d.addCallback(lambda ign: nm.create_from_cap(self.cap.to_string()))
3586 d.addCallback(lambda n: n.list())
3587 d.addCallback(_check_kids) # again with dirnode recreated from cap
3589 # Make sure the lonely child can be listed in HTML...
3590 d.addCallback(lambda ign: self.GET(self.rooturl))
3591 def _check_html(res):
3592 self.failIfIn("URI:SSK", res)
3593 get_lonely = "".join([r'<td>FILE</td>',
3595 r'<a href="[^"]+%s[^"]+">lonely</a>' % (urllib.quote(lonely_uri),),
3597 r'\s+<td>%d</td>' % len("one"),
3599 self.failUnless(re.search(get_lonely, res), res)
3601 # find the More Info link for name, should be relative
3602 mo = re.search(r'<a href="([^"]+)">More Info</a>', res)
3603 info_url = mo.group(1)
3604 self.failUnless(info_url.endswith(urllib.quote(lonely_uri) + "?t=info"), info_url)
3605 d.addCallback(_check_html)
3608 d.addCallback(lambda ign: self.GET(self.rooturl+"?t=json"))
3609 def _check_json(res):
3610 data = simplejson.loads(res)
3611 self.failUnlessEqual(data[0], "dirnode")
3612 listed_children = data[1]["children"]
3613 self.failUnlessEqual(sorted(listed_children.keys()), [u"lonely"])
3614 ll_type, ll_data = listed_children[u"lonely"]
3615 self.failUnlessEqual(ll_type, "filenode")
3616 self.failIf("rw_uri" in ll_data)
3617 self.failUnlessEqual(ll_data["ro_uri"], lonely_uri)
3618 d.addCallback(_check_json)
3621 def test_deep_check(self):
3622 self.basedir = "web/Grid/deep_check"
3624 c0 = self.g.clients[0]
3628 d = c0.create_dirnode()
3629 def _stash_root_and_create_file(n):
3631 self.fileurls["root"] = "uri/" + urllib.quote(n.get_uri()) + "/"
3632 return n.add_file(u"good", upload.Data(DATA, convergence=""))
3633 d.addCallback(_stash_root_and_create_file)
3634 def _stash_uri(fn, which):
3635 self.uris[which] = fn.get_uri()
3637 d.addCallback(_stash_uri, "good")
3638 d.addCallback(lambda ign:
3639 self.rootnode.add_file(u"small",
3640 upload.Data("literal",
3642 d.addCallback(_stash_uri, "small")
3643 d.addCallback(lambda ign:
3644 self.rootnode.add_file(u"sick",
3645 upload.Data(DATA+"1",
3647 d.addCallback(_stash_uri, "sick")
3649 # this tests that deep-check and stream-manifest will ignore
3650 # UnknownNode instances. Hopefully this will also cover deep-stats.
3651 future_write_uri = "x-tahoe-crazy://I_am_from_the_future."
3652 future_read_uri = "x-tahoe-crazy-readonly://I_am_from_the_future."
3653 future_node = UnknownNode(future_write_uri, future_read_uri)
3654 d.addCallback(lambda ign: self.rootnode.set_node(u"future", future_node))
3656 def _clobber_shares(ignored):
3657 self.delete_shares_numbered(self.uris["sick"], [0,1])
3658 d.addCallback(_clobber_shares)
3666 d.addCallback(self.CHECK, "root", "t=stream-deep-check")
3669 units = [simplejson.loads(line)
3670 for line in res.splitlines()
3673 print "response is:", res
3674 print "undecodeable line was '%s'" % line
3676 self.failUnlessEqual(len(units), 5+1)
3677 # should be parent-first
3679 self.failUnlessEqual(u0["path"], [])
3680 self.failUnlessEqual(u0["type"], "directory")
3681 self.failUnlessEqual(u0["cap"], self.rootnode.get_uri())
3682 u0cr = u0["check-results"]
3683 self.failUnlessEqual(u0cr["results"]["count-shares-good"], 10)
3685 ugood = [u for u in units
3686 if u["type"] == "file" and u["path"] == [u"good"]][0]
3687 self.failUnlessEqual(ugood["cap"], self.uris["good"])
3688 ugoodcr = ugood["check-results"]
3689 self.failUnlessEqual(ugoodcr["results"]["count-shares-good"], 10)
3692 self.failUnlessEqual(stats["type"], "stats")
3694 self.failUnlessEqual(s["count-immutable-files"], 2)
3695 self.failUnlessEqual(s["count-literal-files"], 1)
3696 self.failUnlessEqual(s["count-directories"], 1)
3697 self.failUnlessEqual(s["count-unknown"], 1)
3698 d.addCallback(_done)
3700 d.addCallback(self.CHECK, "root", "t=stream-manifest")
3701 def _check_manifest(res):
3702 self.failUnless(res.endswith("\n"))
3703 units = [simplejson.loads(t) for t in res[:-1].split("\n")]
3704 self.failUnlessEqual(len(units), 5+1)
3705 self.failUnlessEqual(units[-1]["type"], "stats")
3707 self.failUnlessEqual(first["path"], [])
3708 self.failUnlessEqual(first["cap"], self.rootnode.get_uri())
3709 self.failUnlessEqual(first["type"], "directory")
3710 stats = units[-1]["stats"]
3711 self.failUnlessEqual(stats["count-immutable-files"], 2)
3712 self.failUnlessEqual(stats["count-literal-files"], 1)
3713 self.failUnlessEqual(stats["count-mutable-files"], 0)
3714 self.failUnlessEqual(stats["count-immutable-files"], 2)
3715 self.failUnlessEqual(stats["count-unknown"], 1)
3716 d.addCallback(_check_manifest)
3718 # now add root/subdir and root/subdir/grandchild, then make subdir
3719 # unrecoverable, then see what happens
3721 d.addCallback(lambda ign:
3722 self.rootnode.create_subdirectory(u"subdir"))
3723 d.addCallback(_stash_uri, "subdir")
3724 d.addCallback(lambda subdir_node:
3725 subdir_node.add_file(u"grandchild",
3726 upload.Data(DATA+"2",
3728 d.addCallback(_stash_uri, "grandchild")
3730 d.addCallback(lambda ign:
3731 self.delete_shares_numbered(self.uris["subdir"],
3739 # root/subdir [unrecoverable]
3740 # root/subdir/grandchild
3742 # how should a streaming-JSON API indicate fatal error?
3743 # answer: emit ERROR: instead of a JSON string
3745 d.addCallback(self.CHECK, "root", "t=stream-manifest")
3746 def _check_broken_manifest(res):
3747 lines = res.splitlines()
3749 for (i,line) in enumerate(lines)
3750 if line.startswith("ERROR:")]
3752 self.fail("no ERROR: in output: %s" % (res,))
3753 first_error = error_lines[0]
3754 error_line = lines[first_error]
3755 error_msg = lines[first_error+1:]
3756 error_msg_s = "\n".join(error_msg) + "\n"
3757 self.failUnlessIn("ERROR: UnrecoverableFileError(no recoverable versions)",
3759 self.failUnless(len(error_msg) > 2, error_msg_s) # some traceback
3760 units = [simplejson.loads(line) for line in lines[:first_error]]
3761 self.failUnlessEqual(len(units), 6) # includes subdir
3762 last_unit = units[-1]
3763 self.failUnlessEqual(last_unit["path"], ["subdir"])
3764 d.addCallback(_check_broken_manifest)
3766 d.addCallback(self.CHECK, "root", "t=stream-deep-check")
3767 def _check_broken_deepcheck(res):
3768 lines = res.splitlines()
3770 for (i,line) in enumerate(lines)
3771 if line.startswith("ERROR:")]
3773 self.fail("no ERROR: in output: %s" % (res,))
3774 first_error = error_lines[0]
3775 error_line = lines[first_error]
3776 error_msg = lines[first_error+1:]
3777 error_msg_s = "\n".join(error_msg) + "\n"
3778 self.failUnlessIn("ERROR: UnrecoverableFileError(no recoverable versions)",
3780 self.failUnless(len(error_msg) > 2, error_msg_s) # some traceback
3781 units = [simplejson.loads(line) for line in lines[:first_error]]
3782 self.failUnlessEqual(len(units), 6) # includes subdir
3783 last_unit = units[-1]
3784 self.failUnlessEqual(last_unit["path"], ["subdir"])
3785 r = last_unit["check-results"]["results"]
3786 self.failUnlessEqual(r["count-recoverable-versions"], 0)
3787 self.failUnlessEqual(r["count-shares-good"], 1)
3788 self.failUnlessEqual(r["recoverable"], False)
3789 d.addCallback(_check_broken_deepcheck)
3791 d.addErrback(self.explain_web_error)
3794 def test_deep_check_and_repair(self):
3795 self.basedir = "web/Grid/deep_check_and_repair"
3797 c0 = self.g.clients[0]
3801 d = c0.create_dirnode()
3802 def _stash_root_and_create_file(n):
3804 self.fileurls["root"] = "uri/" + urllib.quote(n.get_uri()) + "/"
3805 return n.add_file(u"good", upload.Data(DATA, convergence=""))
3806 d.addCallback(_stash_root_and_create_file)
3807 def _stash_uri(fn, which):
3808 self.uris[which] = fn.get_uri()
3809 d.addCallback(_stash_uri, "good")
3810 d.addCallback(lambda ign:
3811 self.rootnode.add_file(u"small",
3812 upload.Data("literal",
3814 d.addCallback(_stash_uri, "small")
3815 d.addCallback(lambda ign:
3816 self.rootnode.add_file(u"sick",
3817 upload.Data(DATA+"1",
3819 d.addCallback(_stash_uri, "sick")
3820 #d.addCallback(lambda ign:
3821 # self.rootnode.add_file(u"dead",
3822 # upload.Data(DATA+"2",
3824 #d.addCallback(_stash_uri, "dead")
3826 #d.addCallback(lambda ign: c0.create_mutable_file("mutable"))
3827 #d.addCallback(lambda fn: self.rootnode.set_node(u"corrupt", fn))
3828 #d.addCallback(_stash_uri, "corrupt")
3830 def _clobber_shares(ignored):
3831 good_shares = self.find_shares(self.uris["good"])
3832 self.failUnlessEqual(len(good_shares), 10)
3833 sick_shares = self.find_shares(self.uris["sick"])
3834 os.unlink(sick_shares[0][2])
3835 #dead_shares = self.find_shares(self.uris["dead"])
3836 #for i in range(1, 10):
3837 # os.unlink(dead_shares[i][2])
3839 #c_shares = self.find_shares(self.uris["corrupt"])
3840 #cso = CorruptShareOptions()
3841 #cso.stdout = StringIO()
3842 #cso.parseOptions([c_shares[0][2]])
3844 d.addCallback(_clobber_shares)
3847 # root/good CHK, 10 shares
3849 # root/sick CHK, 9 shares
3851 d.addCallback(self.CHECK, "root", "t=stream-deep-check&repair=true")
3853 units = [simplejson.loads(line)
3854 for line in res.splitlines()
3856 self.failUnlessEqual(len(units), 4+1)
3857 # should be parent-first
3859 self.failUnlessEqual(u0["path"], [])
3860 self.failUnlessEqual(u0["type"], "directory")
3861 self.failUnlessEqual(u0["cap"], self.rootnode.get_uri())
3862 u0crr = u0["check-and-repair-results"]
3863 self.failUnlessEqual(u0crr["repair-attempted"], False)
3864 self.failUnlessEqual(u0crr["pre-repair-results"]["results"]["count-shares-good"], 10)
3866 ugood = [u for u in units
3867 if u["type"] == "file" and u["path"] == [u"good"]][0]
3868 self.failUnlessEqual(ugood["cap"], self.uris["good"])
3869 ugoodcrr = ugood["check-and-repair-results"]
3870 self.failUnlessEqual(ugoodcrr["repair-attempted"], False)
3871 self.failUnlessEqual(ugoodcrr["pre-repair-results"]["results"]["count-shares-good"], 10)
3873 usick = [u for u in units
3874 if u["type"] == "file" and u["path"] == [u"sick"]][0]
3875 self.failUnlessEqual(usick["cap"], self.uris["sick"])
3876 usickcrr = usick["check-and-repair-results"]
3877 self.failUnlessEqual(usickcrr["repair-attempted"], True)
3878 self.failUnlessEqual(usickcrr["repair-successful"], True)
3879 self.failUnlessEqual(usickcrr["pre-repair-results"]["results"]["count-shares-good"], 9)
3880 self.failUnlessEqual(usickcrr["post-repair-results"]["results"]["count-shares-good"], 10)
3883 self.failUnlessEqual(stats["type"], "stats")
3885 self.failUnlessEqual(s["count-immutable-files"], 2)
3886 self.failUnlessEqual(s["count-literal-files"], 1)
3887 self.failUnlessEqual(s["count-directories"], 1)
3888 d.addCallback(_done)
3890 d.addErrback(self.explain_web_error)
3893 def _count_leases(self, ignored, which):
3894 u = self.uris[which]
3895 shares = self.find_shares(u)
3897 for shnum, serverid, fn in shares:
3898 sf = get_share_file(fn)
3899 num_leases = len(list(sf.get_leases()))
3900 lease_counts.append( (fn, num_leases) )
3903 def _assert_leasecount(self, lease_counts, expected):
3904 for (fn, num_leases) in lease_counts:
3905 if num_leases != expected:
3906 self.fail("expected %d leases, have %d, on %s" %
3907 (expected, num_leases, fn))
3909 def test_add_lease(self):
3910 self.basedir = "web/Grid/add_lease"
3911 self.set_up_grid(num_clients=2)
3912 c0 = self.g.clients[0]
3915 d = c0.upload(upload.Data(DATA, convergence=""))
3916 def _stash_uri(ur, which):
3917 self.uris[which] = ur.uri
3918 d.addCallback(_stash_uri, "one")
3919 d.addCallback(lambda ign:
3920 c0.upload(upload.Data(DATA+"1", convergence="")))
3921 d.addCallback(_stash_uri, "two")
3922 def _stash_mutable_uri(n, which):
3923 self.uris[which] = n.get_uri()
3924 assert isinstance(self.uris[which], str)
3925 d.addCallback(lambda ign: c0.create_mutable_file(DATA+"2"))
3926 d.addCallback(_stash_mutable_uri, "mutable")
3928 def _compute_fileurls(ignored):
3930 for which in self.uris:
3931 self.fileurls[which] = "uri/" + urllib.quote(self.uris[which])
3932 d.addCallback(_compute_fileurls)
3934 d.addCallback(self._count_leases, "one")
3935 d.addCallback(self._assert_leasecount, 1)
3936 d.addCallback(self._count_leases, "two")
3937 d.addCallback(self._assert_leasecount, 1)
3938 d.addCallback(self._count_leases, "mutable")
3939 d.addCallback(self._assert_leasecount, 1)
3941 d.addCallback(self.CHECK, "one", "t=check") # no add-lease
3942 def _got_html_good(res):
3943 self.failUnless("Healthy" in res, res)
3944 self.failIf("Not Healthy" in res, res)
3945 d.addCallback(_got_html_good)
3947 d.addCallback(self._count_leases, "one")
3948 d.addCallback(self._assert_leasecount, 1)
3949 d.addCallback(self._count_leases, "two")
3950 d.addCallback(self._assert_leasecount, 1)
3951 d.addCallback(self._count_leases, "mutable")
3952 d.addCallback(self._assert_leasecount, 1)
3954 # this CHECK uses the original client, which uses the same
3955 # lease-secrets, so it will just renew the original lease
3956 d.addCallback(self.CHECK, "one", "t=check&add-lease=true")
3957 d.addCallback(_got_html_good)
3959 d.addCallback(self._count_leases, "one")
3960 d.addCallback(self._assert_leasecount, 1)
3961 d.addCallback(self._count_leases, "two")
3962 d.addCallback(self._assert_leasecount, 1)
3963 d.addCallback(self._count_leases, "mutable")
3964 d.addCallback(self._assert_leasecount, 1)
3966 # this CHECK uses an alternate client, which adds a second lease
3967 d.addCallback(self.CHECK, "one", "t=check&add-lease=true", clientnum=1)
3968 d.addCallback(_got_html_good)
3970 d.addCallback(self._count_leases, "one")
3971 d.addCallback(self._assert_leasecount, 2)
3972 d.addCallback(self._count_leases, "two")
3973 d.addCallback(self._assert_leasecount, 1)
3974 d.addCallback(self._count_leases, "mutable")
3975 d.addCallback(self._assert_leasecount, 1)
3977 d.addCallback(self.CHECK, "mutable", "t=check&add-lease=true")
3978 d.addCallback(_got_html_good)
3980 d.addCallback(self._count_leases, "one")
3981 d.addCallback(self._assert_leasecount, 2)
3982 d.addCallback(self._count_leases, "two")
3983 d.addCallback(self._assert_leasecount, 1)
3984 d.addCallback(self._count_leases, "mutable")
3985 d.addCallback(self._assert_leasecount, 1)
3987 d.addCallback(self.CHECK, "mutable", "t=check&add-lease=true",
3989 d.addCallback(_got_html_good)
3991 d.addCallback(self._count_leases, "one")
3992 d.addCallback(self._assert_leasecount, 2)
3993 d.addCallback(self._count_leases, "two")
3994 d.addCallback(self._assert_leasecount, 1)
3995 d.addCallback(self._count_leases, "mutable")
3996 d.addCallback(self._assert_leasecount, 2)
3998 d.addErrback(self.explain_web_error)
4001 def test_deep_add_lease(self):
4002 self.basedir = "web/Grid/deep_add_lease"
4003 self.set_up_grid(num_clients=2)
4004 c0 = self.g.clients[0]
4008 d = c0.create_dirnode()
4009 def _stash_root_and_create_file(n):
4011 self.uris["root"] = n.get_uri()
4012 self.fileurls["root"] = "uri/" + urllib.quote(n.get_uri()) + "/"
4013 return n.add_file(u"one", upload.Data(DATA, convergence=""))
4014 d.addCallback(_stash_root_and_create_file)
4015 def _stash_uri(fn, which):
4016 self.uris[which] = fn.get_uri()
4017 d.addCallback(_stash_uri, "one")
4018 d.addCallback(lambda ign:
4019 self.rootnode.add_file(u"small",
4020 upload.Data("literal",
4022 d.addCallback(_stash_uri, "small")
4024 d.addCallback(lambda ign: c0.create_mutable_file("mutable"))
4025 d.addCallback(lambda fn: self.rootnode.set_node(u"mutable", fn))
4026 d.addCallback(_stash_uri, "mutable")
4028 d.addCallback(self.CHECK, "root", "t=stream-deep-check") # no add-lease
4030 units = [simplejson.loads(line)
4031 for line in res.splitlines()
4033 # root, one, small, mutable, stats
4034 self.failUnlessEqual(len(units), 4+1)
4035 d.addCallback(_done)
4037 d.addCallback(self._count_leases, "root")
4038 d.addCallback(self._assert_leasecount, 1)
4039 d.addCallback(self._count_leases, "one")
4040 d.addCallback(self._assert_leasecount, 1)
4041 d.addCallback(self._count_leases, "mutable")
4042 d.addCallback(self._assert_leasecount, 1)
4044 d.addCallback(self.CHECK, "root", "t=stream-deep-check&add-lease=true")
4045 d.addCallback(_done)
4047 d.addCallback(self._count_leases, "root")
4048 d.addCallback(self._assert_leasecount, 1)
4049 d.addCallback(self._count_leases, "one")
4050 d.addCallback(self._assert_leasecount, 1)
4051 d.addCallback(self._count_leases, "mutable")
4052 d.addCallback(self._assert_leasecount, 1)
4054 d.addCallback(self.CHECK, "root", "t=stream-deep-check&add-lease=true",
4056 d.addCallback(_done)
4058 d.addCallback(self._count_leases, "root")
4059 d.addCallback(self._assert_leasecount, 2)
4060 d.addCallback(self._count_leases, "one")
4061 d.addCallback(self._assert_leasecount, 2)
4062 d.addCallback(self._count_leases, "mutable")
4063 d.addCallback(self._assert_leasecount, 2)
4065 d.addErrback(self.explain_web_error)
4069 def test_exceptions(self):
4070 self.basedir = "web/Grid/exceptions"
4071 self.set_up_grid(num_clients=1, num_servers=2)
4072 c0 = self.g.clients[0]
4075 d = c0.create_dirnode()
4077 self.fileurls["root"] = "uri/" + urllib.quote(n.get_uri()) + "/"
4078 self.fileurls["imaginary"] = self.fileurls["root"] + "imaginary"
4080 d.addCallback(_stash_root)
4081 d.addCallback(lambda ign: c0.upload(upload.Data(DATA, convergence="")))
4083 self.fileurls["1share"] = "uri/" + urllib.quote(ur.uri)
4084 self.delete_shares_numbered(ur.uri, range(1,10))
4086 u = uri.from_string(ur.uri)
4087 u.key = testutil.flip_bit(u.key, 0)
4088 baduri = u.to_string()
4089 self.fileurls["0shares"] = "uri/" + urllib.quote(baduri)
4090 d.addCallback(_stash_bad)
4091 d.addCallback(lambda ign: c0.create_dirnode())
4092 def _mangle_dirnode_1share(n):
4094 url = self.fileurls["dir-1share"] = "uri/" + urllib.quote(u) + "/"
4095 self.fileurls["dir-1share-json"] = url + "?t=json"
4096 self.delete_shares_numbered(u, range(1,10))
4097 d.addCallback(_mangle_dirnode_1share)
4098 d.addCallback(lambda ign: c0.create_dirnode())
4099 def _mangle_dirnode_0share(n):
4101 url = self.fileurls["dir-0share"] = "uri/" + urllib.quote(u) + "/"
4102 self.fileurls["dir-0share-json"] = url + "?t=json"
4103 self.delete_shares_numbered(u, range(0,10))
4104 d.addCallback(_mangle_dirnode_0share)
4106 # NotEnoughSharesError should be reported sensibly, with a
4107 # text/plain explanation of the problem, and perhaps some
4108 # information on which shares *could* be found.
4110 d.addCallback(lambda ignored:
4111 self.shouldHTTPError("GET unrecoverable",
4112 410, "Gone", "NoSharesError",
4113 self.GET, self.fileurls["0shares"]))
4114 def _check_zero_shares(body):
4115 self.failIf("<html>" in body, body)
4116 body = " ".join(body.strip().split())
4117 exp = ("NoSharesError: no shares could be found. "
4118 "Zero shares usually indicates a corrupt URI, or that "
4119 "no servers were connected, but it might also indicate "
4120 "severe corruption. You should perform a filecheck on "
4121 "this object to learn more. The full error message is: "
4122 "Failed to get enough shareholders: have 0, need 3")
4123 self.failUnlessEqual(exp, body)
4124 d.addCallback(_check_zero_shares)
4127 d.addCallback(lambda ignored:
4128 self.shouldHTTPError("GET 1share",
4129 410, "Gone", "NotEnoughSharesError",
4130 self.GET, self.fileurls["1share"]))
4131 def _check_one_share(body):
4132 self.failIf("<html>" in body, body)
4133 body = " ".join(body.strip().split())
4134 exp = ("NotEnoughSharesError: This indicates that some "
4135 "servers were unavailable, or that shares have been "
4136 "lost to server departure, hard drive failure, or disk "
4137 "corruption. You should perform a filecheck on "
4138 "this object to learn more. The full error message is:"
4139 " Failed to get enough shareholders: have 1, need 3")
4140 self.failUnlessEqual(exp, body)
4141 d.addCallback(_check_one_share)
4143 d.addCallback(lambda ignored:
4144 self.shouldHTTPError("GET imaginary",
4145 404, "Not Found", None,
4146 self.GET, self.fileurls["imaginary"]))
4147 def _missing_child(body):
4148 self.failUnless("No such child: imaginary" in body, body)
4149 d.addCallback(_missing_child)
4151 d.addCallback(lambda ignored: self.GET(self.fileurls["dir-0share"]))
4152 def _check_0shares_dir_html(body):
4153 self.failUnless("<html>" in body, body)
4154 # we should see the regular page, but without the child table or
4156 body = " ".join(body.strip().split())
4157 self.failUnlessIn('href="?t=info">More info on this directory',
4159 exp = ("UnrecoverableFileError: the directory (or mutable file) "
4160 "could not be retrieved, because there were insufficient "
4161 "good shares. This might indicate that no servers were "
4162 "connected, insufficient servers were connected, the URI "
4163 "was corrupt, or that shares have been lost due to server "
4164 "departure, hard drive failure, or disk corruption. You "
4165 "should perform a filecheck on this object to learn more.")
4166 self.failUnlessIn(exp, body)
4167 self.failUnlessIn("No upload forms: directory is unreadable", body)
4168 d.addCallback(_check_0shares_dir_html)
4170 d.addCallback(lambda ignored: self.GET(self.fileurls["dir-1share"]))
4171 def _check_1shares_dir_html(body):
4172 # at some point, we'll split UnrecoverableFileError into 0-shares
4173 # and some-shares like we did for immutable files (since there
4174 # are different sorts of advice to offer in each case). For now,
4175 # they present the same way.
4176 self.failUnless("<html>" in body, body)
4177 body = " ".join(body.strip().split())
4178 self.failUnlessIn('href="?t=info">More info on this directory',
4180 exp = ("UnrecoverableFileError: the directory (or mutable file) "
4181 "could not be retrieved, because there were insufficient "
4182 "good shares. This might indicate that no servers were "
4183 "connected, insufficient servers were connected, the URI "
4184 "was corrupt, or that shares have been lost due to server "
4185 "departure, hard drive failure, or disk corruption. You "
4186 "should perform a filecheck on this object to learn more.")
4187 self.failUnlessIn(exp, body)
4188 self.failUnlessIn("No upload forms: directory is unreadable", body)
4189 d.addCallback(_check_1shares_dir_html)
4191 d.addCallback(lambda ignored:
4192 self.shouldHTTPError("GET dir-0share-json",
4193 410, "Gone", "UnrecoverableFileError",
4195 self.fileurls["dir-0share-json"]))
4196 def _check_unrecoverable_file(body):
4197 self.failIf("<html>" in body, body)
4198 body = " ".join(body.strip().split())
4199 exp = ("UnrecoverableFileError: the directory (or mutable file) "
4200 "could not be retrieved, because there were insufficient "
4201 "good shares. This might indicate that no servers were "
4202 "connected, insufficient servers were connected, the URI "
4203 "was corrupt, or that shares have been lost due to server "
4204 "departure, hard drive failure, or disk corruption. You "
4205 "should perform a filecheck on this object to learn more.")
4206 self.failUnlessEqual(exp, body)
4207 d.addCallback(_check_unrecoverable_file)
4209 d.addCallback(lambda ignored:
4210 self.shouldHTTPError("GET dir-1share-json",
4211 410, "Gone", "UnrecoverableFileError",
4213 self.fileurls["dir-1share-json"]))
4214 d.addCallback(_check_unrecoverable_file)
4216 d.addCallback(lambda ignored:
4217 self.shouldHTTPError("GET imaginary",
4218 404, "Not Found", None,
4219 self.GET, self.fileurls["imaginary"]))
4221 # attach a webapi child that throws a random error, to test how it
4223 w = c0.getServiceNamed("webish")
4224 w.root.putChild("ERRORBOOM", ErrorBoom())
4226 # "Accept: */*" : should get a text/html stack trace
4227 # "Accept: text/plain" : should get a text/plain stack trace
4228 # "Accept: text/plain, application/octet-stream" : text/plain (CLI)
4229 # no Accept header: should get a text/html stack trace
4231 d.addCallback(lambda ignored:
4232 self.shouldHTTPError("GET errorboom_html",
4233 500, "Internal Server Error", None,
4234 self.GET, "ERRORBOOM",
4235 headers={"accept": ["*/*"]}))
4236 def _internal_error_html1(body):
4237 self.failUnless("<html>" in body, "expected HTML, not '%s'" % body)
4238 d.addCallback(_internal_error_html1)
4240 d.addCallback(lambda ignored:
4241 self.shouldHTTPError("GET errorboom_text",
4242 500, "Internal Server Error", None,
4243 self.GET, "ERRORBOOM",
4244 headers={"accept": ["text/plain"]}))
4245 def _internal_error_text2(body):
4246 self.failIf("<html>" in body, body)
4247 self.failUnless(body.startswith("Traceback "), body)
4248 d.addCallback(_internal_error_text2)
4250 CLI_accepts = "text/plain, application/octet-stream"
4251 d.addCallback(lambda ignored:
4252 self.shouldHTTPError("GET errorboom_text",
4253 500, "Internal Server Error", None,
4254 self.GET, "ERRORBOOM",
4255 headers={"accept": [CLI_accepts]}))
4256 def _internal_error_text3(body):
4257 self.failIf("<html>" in body, body)
4258 self.failUnless(body.startswith("Traceback "), body)
4259 d.addCallback(_internal_error_text3)
4261 d.addCallback(lambda ignored:
4262 self.shouldHTTPError("GET errorboom_text",
4263 500, "Internal Server Error", None,
4264 self.GET, "ERRORBOOM"))
4265 def _internal_error_html4(body):
4266 self.failUnless("<html>" in body, "expected HTML, not '%s'" % body)
4267 d.addCallback(_internal_error_html4)
4269 def _flush_errors(res):
4270 # Trial: please ignore the CompletelyUnhandledError in the logs
4271 self.flushLoggedErrors(CompletelyUnhandledError)
4273 d.addBoth(_flush_errors)
4277 class CompletelyUnhandledError(Exception):
4279 class ErrorBoom(rend.Page):
4280 def beforeRender(self, ctx):
4281 raise CompletelyUnhandledError("whoops")