1 import os.path, re, urllib
3 from StringIO import StringIO
4 from twisted.application import service
5 from twisted.trial import unittest
6 from twisted.internet import defer, reactor
7 from twisted.web import client, error, http
8 from twisted.python import failure, log
10 from allmydata import interfaces, uri, webish, dirnode
11 from allmydata.storage.shares import get_share_file
12 from allmydata.storage_client import StorageFarmBroker
13 from allmydata.immutable import upload, download
14 from allmydata.dirnode import DirectoryNode
15 from allmydata.nodemaker import NodeMaker
16 from allmydata.unknown import UnknownNode
17 from allmydata.web import status, common
18 from allmydata.scripts.debug import CorruptShareOptions, corrupt_share
19 from allmydata.util import fileutil, base32
20 from allmydata.util.consumer import download_to_data
21 from allmydata.util.netstring import split_netstring
22 from allmydata.test.common import FakeCHKFileNode, FakeMutableFileNode, \
23 create_chk_filenode, WebErrorMixin, ShouldFailMixin, make_mutable_file_uri
24 from allmydata.interfaces import IMutableFileNode
25 from allmydata.mutable import servermap, publish, retrieve
26 import common_util as testutil
27 from allmydata.test.no_network import GridTestMixin
28 from allmydata.test.common_web import HTTPClientGETFactory, \
30 from allmydata.client import Client, SecretHolder
32 # create a fake uploader/downloader, and a couple of fake dirnodes, then
33 # create a webserver that works against them
35 timeout = 480 # Most of these take longer than 240 seconds on Francois's arm box.
37 class FakeStatsProvider:
39 stats = {'stats': {}, 'counters': {}}
42 class FakeNodeMaker(NodeMaker):
43 def _create_lit(self, cap):
44 return FakeCHKFileNode(cap)
45 def _create_immutable(self, cap):
46 return FakeCHKFileNode(cap)
47 def _create_mutable(self, cap):
48 return FakeMutableFileNode(None, None, None, None).init_from_cap(cap)
49 def create_mutable_file(self, contents="", keysize=None):
50 n = FakeMutableFileNode(None, None, None, None)
51 return n.create(contents)
53 class FakeUploader(service.Service):
55 def upload(self, uploadable, history=None):
56 d = uploadable.get_size()
57 d.addCallback(lambda size: uploadable.read(size))
60 n = create_chk_filenode(data)
61 results = upload.UploadResults()
62 results.uri = n.get_uri()
64 d.addCallback(_got_data)
66 def get_helper_info(self):
70 _all_upload_status = [upload.UploadStatus()]
71 _all_download_status = [download.DownloadStatus()]
72 _all_mapupdate_statuses = [servermap.UpdateStatus()]
73 _all_publish_statuses = [publish.PublishStatus()]
74 _all_retrieve_statuses = [retrieve.RetrieveStatus()]
76 def list_all_upload_statuses(self):
77 return self._all_upload_status
78 def list_all_download_statuses(self):
79 return self._all_download_status
80 def list_all_mapupdate_statuses(self):
81 return self._all_mapupdate_statuses
82 def list_all_publish_statuses(self):
83 return self._all_publish_statuses
84 def list_all_retrieve_statuses(self):
85 return self._all_retrieve_statuses
86 def list_all_helper_statuses(self):
89 class FakeClient(Client):
91 # don't upcall to Client.__init__, since we only want to initialize a
93 service.MultiService.__init__(self)
94 self.nodeid = "fake_nodeid"
95 self.nickname = "fake_nickname"
96 self.introducer_furl = "None"
97 self.stats_provider = FakeStatsProvider()
98 self._secret_holder = SecretHolder("lease secret", "convergence secret")
100 self.convergence = "some random string"
101 self.storage_broker = StorageFarmBroker(None, permute_peers=True)
102 self.introducer_client = None
103 self.history = FakeHistory()
104 self.uploader = FakeUploader()
105 self.uploader.setServiceParent(self)
106 self.nodemaker = FakeNodeMaker(None, self._secret_holder, None,
107 self.uploader, None, None,
110 def startService(self):
111 return service.MultiService.startService(self)
112 def stopService(self):
113 return service.MultiService.stopService(self)
115 MUTABLE_SIZELIMIT = FakeMutableFileNode.MUTABLE_SIZELIMIT
117 class WebMixin(object):
119 self.s = FakeClient()
120 self.s.startService()
121 self.staticdir = self.mktemp()
122 self.ws = webish.WebishServer(self.s, "0", staticdir=self.staticdir)
123 self.ws.setServiceParent(self.s)
124 self.webish_port = port = self.ws.listener._port.getHost().port
125 self.webish_url = "http://localhost:%d" % port
127 l = [ self.s.create_dirnode() for x in range(6) ]
128 d = defer.DeferredList(l)
130 self.public_root = res[0][1]
131 assert interfaces.IDirectoryNode.providedBy(self.public_root), res
132 self.public_url = "/uri/" + self.public_root.get_uri()
133 self.private_root = res[1][1]
137 self._foo_uri = foo.get_uri()
138 self._foo_readonly_uri = foo.get_readonly_uri()
139 self._foo_verifycap = foo.get_verify_cap().to_string()
140 # NOTE: we ignore the deferred on all set_uri() calls, because we
141 # know the fake nodes do these synchronously
142 self.public_root.set_uri(u"foo", foo.get_uri(),
143 foo.get_readonly_uri())
145 self.BAR_CONTENTS, n, self._bar_txt_uri = self.makefile(0)
146 foo.set_uri(u"bar.txt", self._bar_txt_uri, self._bar_txt_uri)
147 self._bar_txt_verifycap = n.get_verify_cap().to_string()
149 foo.set_uri(u"empty", res[3][1].get_uri(),
150 res[3][1].get_readonly_uri())
151 sub_uri = res[4][1].get_uri()
152 self._sub_uri = sub_uri
153 foo.set_uri(u"sub", sub_uri, sub_uri)
154 sub = self.s.create_node_from_uri(sub_uri)
156 _ign, n, blocking_uri = self.makefile(1)
157 foo.set_uri(u"blockingfile", blocking_uri, blocking_uri)
159 unicode_filename = u"n\u00fc.txt" # n u-umlaut . t x t
160 # ok, unicode calls it LATIN SMALL LETTER U WITH DIAERESIS but I
161 # still think of it as an umlaut
162 foo.set_uri(unicode_filename, self._bar_txt_uri, self._bar_txt_uri)
164 _ign, n, baz_file = self.makefile(2)
165 self._baz_file_uri = baz_file
166 sub.set_uri(u"baz.txt", baz_file, baz_file)
168 _ign, n, self._bad_file_uri = self.makefile(3)
169 # this uri should not be downloadable
170 del FakeCHKFileNode.all_contents[self._bad_file_uri]
173 self.public_root.set_uri(u"reedownlee", rodir.get_readonly_uri(),
174 rodir.get_readonly_uri())
175 rodir.set_uri(u"nor", baz_file, baz_file)
180 # public/foo/blockingfile
183 # public/foo/sub/baz.txt
185 # public/reedownlee/nor
186 self.NEWFILE_CONTENTS = "newfile contents\n"
188 return foo.get_metadata_for(u"bar.txt")
190 def _got_metadata(metadata):
191 self._bar_txt_metadata = metadata
192 d.addCallback(_got_metadata)
195 def makefile(self, number):
196 contents = "contents of file %s\n" % number
197 n = create_chk_filenode(contents)
198 return contents, n, n.get_uri()
201 return self.s.stopService()
203 def failUnlessIsBarDotTxt(self, res):
204 self.failUnlessEqual(res, self.BAR_CONTENTS, res)
206 def failUnlessIsBarJSON(self, res):
207 data = simplejson.loads(res)
208 self.failUnless(isinstance(data, list))
209 self.failUnlessEqual(data[0], u"filenode")
210 self.failUnless(isinstance(data[1], dict))
211 self.failIf(data[1]["mutable"])
212 self.failIf("rw_uri" in data[1]) # immutable
213 self.failUnlessEqual(data[1]["ro_uri"], self._bar_txt_uri)
214 self.failUnlessEqual(data[1]["verify_uri"], self._bar_txt_verifycap)
215 self.failUnlessEqual(data[1]["size"], len(self.BAR_CONTENTS))
217 def failUnlessIsFooJSON(self, res):
218 data = simplejson.loads(res)
219 self.failUnless(isinstance(data, list))
220 self.failUnlessEqual(data[0], "dirnode", res)
221 self.failUnless(isinstance(data[1], dict))
222 self.failUnless(data[1]["mutable"])
223 self.failUnless("rw_uri" in data[1]) # mutable
224 self.failUnlessEqual(data[1]["rw_uri"], self._foo_uri)
225 self.failUnlessEqual(data[1]["ro_uri"], self._foo_readonly_uri)
226 self.failUnlessEqual(data[1]["verify_uri"], self._foo_verifycap)
228 kidnames = sorted([unicode(n) for n in data[1]["children"]])
229 self.failUnlessEqual(kidnames,
230 [u"bar.txt", u"blockingfile", u"empty",
231 u"n\u00fc.txt", u"sub"])
232 kids = dict( [(unicode(name),value)
234 in data[1]["children"].iteritems()] )
235 self.failUnlessEqual(kids[u"sub"][0], "dirnode")
236 self.failUnless("metadata" in kids[u"sub"][1])
237 self.failUnless("ctime" in kids[u"sub"][1]["metadata"])
238 self.failUnless("mtime" in kids[u"sub"][1]["metadata"])
239 self.failUnlessEqual(kids[u"bar.txt"][0], "filenode")
240 self.failUnlessEqual(kids[u"bar.txt"][1]["size"], len(self.BAR_CONTENTS))
241 self.failUnlessEqual(kids[u"bar.txt"][1]["ro_uri"], self._bar_txt_uri)
242 self.failUnlessEqual(kids[u"bar.txt"][1]["verify_uri"],
243 self._bar_txt_verifycap)
244 self.failUnlessEqual(kids[u"bar.txt"][1]["metadata"]["ctime"],
245 self._bar_txt_metadata["ctime"])
246 self.failUnlessEqual(kids[u"n\u00fc.txt"][1]["ro_uri"],
249 def GET(self, urlpath, followRedirect=False, return_response=False,
251 # if return_response=True, this fires with (data, statuscode,
252 # respheaders) instead of just data.
253 assert not isinstance(urlpath, unicode)
254 url = self.webish_url + urlpath
255 factory = HTTPClientGETFactory(url, method="GET",
256 followRedirect=followRedirect, **kwargs)
257 reactor.connectTCP("localhost", self.webish_port, factory)
260 return (data, factory.status, factory.response_headers)
262 d.addCallback(_got_data)
263 return factory.deferred
265 def HEAD(self, urlpath, return_response=False, **kwargs):
266 # this requires some surgery, because twisted.web.client doesn't want
267 # to give us back the response headers.
268 factory = HTTPClientHEADFactory(urlpath, method="HEAD", **kwargs)
269 reactor.connectTCP("localhost", self.webish_port, factory)
272 return (data, factory.status, factory.response_headers)
274 d.addCallback(_got_data)
275 return factory.deferred
277 def PUT(self, urlpath, data, **kwargs):
278 url = self.webish_url + urlpath
279 return client.getPage(url, method="PUT", postdata=data, **kwargs)
281 def DELETE(self, urlpath):
282 url = self.webish_url + urlpath
283 return client.getPage(url, method="DELETE")
285 def POST(self, urlpath, followRedirect=False, **fields):
286 sepbase = "boogabooga"
290 form.append('Content-Disposition: form-data; name="_charset"')
294 for name, value in fields.iteritems():
295 if isinstance(value, tuple):
296 filename, value = value
297 form.append('Content-Disposition: form-data; name="%s"; '
298 'filename="%s"' % (name, filename.encode("utf-8")))
300 form.append('Content-Disposition: form-data; name="%s"' % name)
302 if isinstance(value, unicode):
303 value = value.encode("utf-8")
306 assert isinstance(value, str)
313 body = "\r\n".join(form) + "\r\n"
314 headers["content-type"] = "multipart/form-data; boundary=%s" % sepbase
315 return self.POST2(urlpath, body, headers, followRedirect)
317 def POST2(self, urlpath, body="", headers={}, followRedirect=False):
318 url = self.webish_url + urlpath
319 return client.getPage(url, method="POST", postdata=body,
320 headers=headers, followRedirect=followRedirect)
322 def shouldFail(self, res, expected_failure, which,
323 substring=None, response_substring=None):
324 if isinstance(res, failure.Failure):
325 res.trap(expected_failure)
327 self.failUnless(substring in str(res),
328 "substring '%s' not in '%s'"
329 % (substring, str(res)))
330 if response_substring:
331 self.failUnless(response_substring in res.value.response,
332 "response substring '%s' not in '%s'"
333 % (response_substring, res.value.response))
335 self.fail("%s was supposed to raise %s, not get '%s'" %
336 (which, expected_failure, res))
338 def shouldFail2(self, expected_failure, which, substring,
340 callable, *args, **kwargs):
341 assert substring is None or isinstance(substring, str)
342 assert response_substring is None or isinstance(response_substring, str)
343 d = defer.maybeDeferred(callable, *args, **kwargs)
345 if isinstance(res, failure.Failure):
346 res.trap(expected_failure)
348 self.failUnless(substring in str(res),
349 "%s: substring '%s' not in '%s'"
350 % (which, substring, str(res)))
351 if response_substring:
352 self.failUnless(response_substring in res.value.response,
353 "%s: response substring '%s' not in '%s'"
355 response_substring, res.value.response))
357 self.fail("%s was supposed to raise %s, not get '%s'" %
358 (which, expected_failure, res))
362 def should404(self, res, which):
363 if isinstance(res, failure.Failure):
364 res.trap(error.Error)
365 self.failUnlessEqual(res.value.status, "404")
367 self.fail("%s was supposed to Error(404), not get '%s'" %
371 class Web(WebMixin, WebErrorMixin, testutil.StallMixin, unittest.TestCase):
372 def test_create(self):
375 def test_welcome(self):
378 self.failUnless('Welcome To Tahoe-LAFS' in res, res)
380 self.s.basedir = 'web/test_welcome'
381 fileutil.make_dirs("web/test_welcome")
382 fileutil.make_dirs("web/test_welcome/private")
384 d.addCallback(_check)
387 def test_provisioning(self):
388 d = self.GET("/provisioning/")
390 self.failUnless('Tahoe Provisioning Tool' in res)
391 fields = {'filled': True,
392 "num_users": int(50e3),
393 "files_per_user": 1000,
394 "space_per_user": int(1e9),
395 "sharing_ratio": 1.0,
396 "encoding_parameters": "3-of-10-5",
398 "ownership_mode": "A",
399 "download_rate": 100,
404 return self.POST("/provisioning/", **fields)
406 d.addCallback(_check)
408 self.failUnless('Tahoe Provisioning Tool' in res)
409 self.failUnless("Share space consumed: 167.01TB" in res)
411 fields = {'filled': True,
412 "num_users": int(50e6),
413 "files_per_user": 1000,
414 "space_per_user": int(5e9),
415 "sharing_ratio": 1.0,
416 "encoding_parameters": "25-of-100-50",
417 "num_servers": 30000,
418 "ownership_mode": "E",
419 "drive_failure_model": "U",
421 "download_rate": 1000,
426 return self.POST("/provisioning/", **fields)
427 d.addCallback(_check2)
429 self.failUnless("Share space consumed: huge!" in res)
430 fields = {'filled': True}
431 return self.POST("/provisioning/", **fields)
432 d.addCallback(_check3)
434 self.failUnless("Share space consumed:" in res)
435 d.addCallback(_check4)
438 def test_reliability_tool(self):
440 from allmydata import reliability
441 _hush_pyflakes = reliability
444 raise unittest.SkipTest("reliability tool requires NumPy")
446 d = self.GET("/reliability/")
448 self.failUnless('Tahoe Reliability Tool' in res)
449 fields = {'drive_lifetime': "8Y",
454 "check_period": "1M",
455 "report_period": "3M",
458 return self.POST("/reliability/", **fields)
460 d.addCallback(_check)
462 self.failUnless('Tahoe Reliability Tool' in res)
463 r = r'Probability of loss \(no maintenance\):\s+<span>0.033591'
464 self.failUnless(re.search(r, res), res)
465 d.addCallback(_check2)
468 def test_status(self):
469 h = self.s.get_history()
470 dl_num = h.list_all_download_statuses()[0].get_counter()
471 ul_num = h.list_all_upload_statuses()[0].get_counter()
472 mu_num = h.list_all_mapupdate_statuses()[0].get_counter()
473 pub_num = h.list_all_publish_statuses()[0].get_counter()
474 ret_num = h.list_all_retrieve_statuses()[0].get_counter()
475 d = self.GET("/status", followRedirect=True)
477 self.failUnless('Upload and Download Status' in res, res)
478 self.failUnless('"down-%d"' % dl_num in res, res)
479 self.failUnless('"up-%d"' % ul_num in res, res)
480 self.failUnless('"mapupdate-%d"' % mu_num in res, res)
481 self.failUnless('"publish-%d"' % pub_num in res, res)
482 self.failUnless('"retrieve-%d"' % ret_num in res, res)
483 d.addCallback(_check)
484 d.addCallback(lambda res: self.GET("/status/?t=json"))
485 def _check_json(res):
486 data = simplejson.loads(res)
487 self.failUnless(isinstance(data, dict))
488 #active = data["active"]
489 # TODO: test more. We need a way to fake an active operation
491 d.addCallback(_check_json)
493 d.addCallback(lambda res: self.GET("/status/down-%d" % dl_num))
495 self.failUnless("File Download Status" in res, res)
496 d.addCallback(_check_dl)
497 d.addCallback(lambda res: self.GET("/status/up-%d" % ul_num))
499 self.failUnless("File Upload Status" in res, res)
500 d.addCallback(_check_ul)
501 d.addCallback(lambda res: self.GET("/status/mapupdate-%d" % mu_num))
502 def _check_mapupdate(res):
503 self.failUnless("Mutable File Servermap Update Status" in res, res)
504 d.addCallback(_check_mapupdate)
505 d.addCallback(lambda res: self.GET("/status/publish-%d" % pub_num))
506 def _check_publish(res):
507 self.failUnless("Mutable File Publish Status" in res, res)
508 d.addCallback(_check_publish)
509 d.addCallback(lambda res: self.GET("/status/retrieve-%d" % ret_num))
510 def _check_retrieve(res):
511 self.failUnless("Mutable File Retrieve Status" in res, res)
512 d.addCallback(_check_retrieve)
516 def test_status_numbers(self):
517 drrm = status.DownloadResultsRendererMixin()
518 self.failUnlessEqual(drrm.render_time(None, None), "")
519 self.failUnlessEqual(drrm.render_time(None, 2.5), "2.50s")
520 self.failUnlessEqual(drrm.render_time(None, 0.25), "250ms")
521 self.failUnlessEqual(drrm.render_time(None, 0.0021), "2.1ms")
522 self.failUnlessEqual(drrm.render_time(None, 0.000123), "123us")
523 self.failUnlessEqual(drrm.render_rate(None, None), "")
524 self.failUnlessEqual(drrm.render_rate(None, 2500000), "2.50MBps")
525 self.failUnlessEqual(drrm.render_rate(None, 30100), "30.1kBps")
526 self.failUnlessEqual(drrm.render_rate(None, 123), "123Bps")
528 urrm = status.UploadResultsRendererMixin()
529 self.failUnlessEqual(urrm.render_time(None, None), "")
530 self.failUnlessEqual(urrm.render_time(None, 2.5), "2.50s")
531 self.failUnlessEqual(urrm.render_time(None, 0.25), "250ms")
532 self.failUnlessEqual(urrm.render_time(None, 0.0021), "2.1ms")
533 self.failUnlessEqual(urrm.render_time(None, 0.000123), "123us")
534 self.failUnlessEqual(urrm.render_rate(None, None), "")
535 self.failUnlessEqual(urrm.render_rate(None, 2500000), "2.50MBps")
536 self.failUnlessEqual(urrm.render_rate(None, 30100), "30.1kBps")
537 self.failUnlessEqual(urrm.render_rate(None, 123), "123Bps")
539 def test_GET_FILEURL(self):
540 d = self.GET(self.public_url + "/foo/bar.txt")
541 d.addCallback(self.failUnlessIsBarDotTxt)
544 def test_GET_FILEURL_range(self):
545 headers = {"range": "bytes=1-10"}
546 d = self.GET(self.public_url + "/foo/bar.txt", headers=headers,
547 return_response=True)
548 def _got((res, status, headers)):
549 self.failUnlessEqual(int(status), 206)
550 self.failUnless(headers.has_key("content-range"))
551 self.failUnlessEqual(headers["content-range"][0],
552 "bytes 1-10/%d" % len(self.BAR_CONTENTS))
553 self.failUnlessEqual(res, self.BAR_CONTENTS[1:11])
557 def test_GET_FILEURL_partial_range(self):
558 headers = {"range": "bytes=5-"}
559 length = len(self.BAR_CONTENTS)
560 d = self.GET(self.public_url + "/foo/bar.txt", headers=headers,
561 return_response=True)
562 def _got((res, status, headers)):
563 self.failUnlessEqual(int(status), 206)
564 self.failUnless(headers.has_key("content-range"))
565 self.failUnlessEqual(headers["content-range"][0],
566 "bytes 5-%d/%d" % (length-1, length))
567 self.failUnlessEqual(res, self.BAR_CONTENTS[5:])
571 def test_HEAD_FILEURL_range(self):
572 headers = {"range": "bytes=1-10"}
573 d = self.HEAD(self.public_url + "/foo/bar.txt", headers=headers,
574 return_response=True)
575 def _got((res, status, headers)):
576 self.failUnlessEqual(res, "")
577 self.failUnlessEqual(int(status), 206)
578 self.failUnless(headers.has_key("content-range"))
579 self.failUnlessEqual(headers["content-range"][0],
580 "bytes 1-10/%d" % len(self.BAR_CONTENTS))
584 def test_HEAD_FILEURL_partial_range(self):
585 headers = {"range": "bytes=5-"}
586 length = len(self.BAR_CONTENTS)
587 d = self.HEAD(self.public_url + "/foo/bar.txt", headers=headers,
588 return_response=True)
589 def _got((res, status, headers)):
590 self.failUnlessEqual(int(status), 206)
591 self.failUnless(headers.has_key("content-range"))
592 self.failUnlessEqual(headers["content-range"][0],
593 "bytes 5-%d/%d" % (length-1, length))
597 def test_GET_FILEURL_range_bad(self):
598 headers = {"range": "BOGUS=fizbop-quarnak"}
599 d = self.shouldFail2(error.Error, "test_GET_FILEURL_range_bad",
601 "Syntactically invalid http range header",
602 self.GET, self.public_url + "/foo/bar.txt",
606 def test_HEAD_FILEURL(self):
607 d = self.HEAD(self.public_url + "/foo/bar.txt", return_response=True)
608 def _got((res, status, headers)):
609 self.failUnlessEqual(res, "")
610 self.failUnlessEqual(headers["content-length"][0],
611 str(len(self.BAR_CONTENTS)))
612 self.failUnlessEqual(headers["content-type"], ["text/plain"])
616 def test_GET_FILEURL_named(self):
617 base = "/file/%s" % urllib.quote(self._bar_txt_uri)
618 base2 = "/named/%s" % urllib.quote(self._bar_txt_uri)
619 d = self.GET(base + "/@@name=/blah.txt")
620 d.addCallback(self.failUnlessIsBarDotTxt)
621 d.addCallback(lambda res: self.GET(base + "/blah.txt"))
622 d.addCallback(self.failUnlessIsBarDotTxt)
623 d.addCallback(lambda res: self.GET(base + "/ignore/lots/blah.txt"))
624 d.addCallback(self.failUnlessIsBarDotTxt)
625 d.addCallback(lambda res: self.GET(base2 + "/@@name=/blah.txt"))
626 d.addCallback(self.failUnlessIsBarDotTxt)
627 save_url = base + "?save=true&filename=blah.txt"
628 d.addCallback(lambda res: self.GET(save_url))
629 d.addCallback(self.failUnlessIsBarDotTxt) # TODO: check headers
630 u_filename = u"n\u00e9wer.txt" # n e-acute w e r . t x t
631 u_fn_e = urllib.quote(u_filename.encode("utf-8"))
632 u_url = base + "?save=true&filename=" + u_fn_e
633 d.addCallback(lambda res: self.GET(u_url))
634 d.addCallback(self.failUnlessIsBarDotTxt) # TODO: check headers
637 def test_PUT_FILEURL_named_bad(self):
638 base = "/file/%s" % urllib.quote(self._bar_txt_uri)
639 d = self.shouldFail2(error.Error, "test_PUT_FILEURL_named_bad",
641 "/file can only be used with GET or HEAD",
642 self.PUT, base + "/@@name=/blah.txt", "")
645 def test_GET_DIRURL_named_bad(self):
646 base = "/file/%s" % urllib.quote(self._foo_uri)
647 d = self.shouldFail2(error.Error, "test_PUT_DIRURL_named_bad",
650 self.GET, base + "/@@name=/blah.txt")
653 def test_GET_slash_file_bad(self):
654 d = self.shouldFail2(error.Error, "test_GET_slash_file_bad",
656 "/file must be followed by a file-cap and a name",
660 def test_GET_unhandled_URI_named(self):
661 contents, n, newuri = self.makefile(12)
662 verifier_cap = n.get_verify_cap().to_string()
663 base = "/file/%s" % urllib.quote(verifier_cap)
664 # client.create_node_from_uri() can't handle verify-caps
665 d = self.shouldFail2(error.Error, "GET_unhandled_URI_named",
666 "400 Bad Request", "is not a file-cap",
670 def test_GET_unhandled_URI(self):
671 contents, n, newuri = self.makefile(12)
672 verifier_cap = n.get_verify_cap().to_string()
673 base = "/uri/%s" % urllib.quote(verifier_cap)
674 # client.create_node_from_uri() can't handle verify-caps
675 d = self.shouldFail2(error.Error, "test_GET_unhandled_URI",
677 "GET unknown URI type: can only do t=info",
681 def test_GET_FILE_URI(self):
682 base = "/uri/%s" % urllib.quote(self._bar_txt_uri)
684 d.addCallback(self.failUnlessIsBarDotTxt)
687 def test_GET_FILE_URI_badchild(self):
688 base = "/uri/%s/boguschild" % urllib.quote(self._bar_txt_uri)
689 errmsg = "Files have no children, certainly not named 'boguschild'"
690 d = self.shouldFail2(error.Error, "test_GET_FILE_URI_badchild",
691 "400 Bad Request", errmsg,
695 def test_PUT_FILE_URI_badchild(self):
696 base = "/uri/%s/boguschild" % urllib.quote(self._bar_txt_uri)
697 errmsg = "Cannot create directory 'boguschild', because its parent is a file, not a directory"
698 d = self.shouldFail2(error.Error, "test_GET_FILE_URI_badchild",
699 "400 Bad Request", errmsg,
703 def test_GET_FILEURL_save(self):
704 d = self.GET(self.public_url + "/foo/bar.txt?filename=bar.txt&save=true")
705 # TODO: look at the headers, expect a Content-Disposition: attachment
707 d.addCallback(self.failUnlessIsBarDotTxt)
710 def test_GET_FILEURL_missing(self):
711 d = self.GET(self.public_url + "/foo/missing")
712 d.addBoth(self.should404, "test_GET_FILEURL_missing")
715 def test_PUT_overwrite_only_files(self):
716 # create a directory, put a file in that directory.
717 contents, n, filecap = self.makefile(8)
718 d = self.PUT(self.public_url + "/foo/dir?t=mkdir", "")
719 d.addCallback(lambda res:
720 self.PUT(self.public_url + "/foo/dir/file1.txt",
721 self.NEWFILE_CONTENTS))
722 # try to overwrite the file with replace=only-files
724 d.addCallback(lambda res:
725 self.PUT(self.public_url + "/foo/dir/file1.txt?t=uri&replace=only-files",
727 d.addCallback(lambda res:
728 self.shouldFail2(error.Error, "PUT_bad_t", "409 Conflict",
729 "There was already a child by that name, and you asked me "
731 self.PUT, self.public_url + "/foo/dir?t=uri&replace=only-files",
735 def test_PUT_NEWFILEURL(self):
736 d = self.PUT(self.public_url + "/foo/new.txt", self.NEWFILE_CONTENTS)
737 # TODO: we lose the response code, so we can't check this
738 #self.failUnlessEqual(responsecode, 201)
739 d.addCallback(self.failUnlessURIMatchesROChild, self._foo_node, u"new.txt")
740 d.addCallback(lambda res:
741 self.failUnlessChildContentsAre(self._foo_node, u"new.txt",
742 self.NEWFILE_CONTENTS))
745 def test_PUT_NEWFILEURL_not_mutable(self):
746 d = self.PUT(self.public_url + "/foo/new.txt?mutable=false",
747 self.NEWFILE_CONTENTS)
748 # TODO: we lose the response code, so we can't check this
749 #self.failUnlessEqual(responsecode, 201)
750 d.addCallback(self.failUnlessURIMatchesROChild, self._foo_node, u"new.txt")
751 d.addCallback(lambda res:
752 self.failUnlessChildContentsAre(self._foo_node, u"new.txt",
753 self.NEWFILE_CONTENTS))
756 def test_PUT_NEWFILEURL_range_bad(self):
757 headers = {"content-range": "bytes 1-10/%d" % len(self.NEWFILE_CONTENTS)}
758 target = self.public_url + "/foo/new.txt"
759 d = self.shouldFail2(error.Error, "test_PUT_NEWFILEURL_range_bad",
760 "501 Not Implemented",
761 "Content-Range in PUT not yet supported",
762 # (and certainly not for immutable files)
763 self.PUT, target, self.NEWFILE_CONTENTS[1:11],
765 d.addCallback(lambda res:
766 self.failIfNodeHasChild(self._foo_node, u"new.txt"))
769 def test_PUT_NEWFILEURL_mutable(self):
770 d = self.PUT(self.public_url + "/foo/new.txt?mutable=true",
771 self.NEWFILE_CONTENTS)
772 # TODO: we lose the response code, so we can't check this
773 #self.failUnlessEqual(responsecode, 201)
775 u = uri.from_string_mutable_filenode(res)
776 self.failUnless(u.is_mutable())
777 self.failIf(u.is_readonly())
779 d.addCallback(_check_uri)
780 d.addCallback(self.failUnlessURIMatchesRWChild, self._foo_node, u"new.txt")
781 d.addCallback(lambda res:
782 self.failUnlessMutableChildContentsAre(self._foo_node,
784 self.NEWFILE_CONTENTS))
787 def test_PUT_NEWFILEURL_mutable_toobig(self):
788 d = self.shouldFail2(error.Error, "test_PUT_NEWFILEURL_mutable_toobig",
789 "413 Request Entity Too Large",
790 "SDMF is limited to one segment, and 10001 > 10000",
792 self.public_url + "/foo/new.txt?mutable=true",
793 "b" * (self.s.MUTABLE_SIZELIMIT+1))
796 def test_PUT_NEWFILEURL_replace(self):
797 d = self.PUT(self.public_url + "/foo/bar.txt", self.NEWFILE_CONTENTS)
798 # TODO: we lose the response code, so we can't check this
799 #self.failUnlessEqual(responsecode, 200)
800 d.addCallback(self.failUnlessURIMatchesROChild, self._foo_node, u"bar.txt")
801 d.addCallback(lambda res:
802 self.failUnlessChildContentsAre(self._foo_node, u"bar.txt",
803 self.NEWFILE_CONTENTS))
806 def test_PUT_NEWFILEURL_bad_t(self):
807 d = self.shouldFail2(error.Error, "PUT_bad_t", "400 Bad Request",
808 "PUT to a file: bad t=bogus",
809 self.PUT, self.public_url + "/foo/bar.txt?t=bogus",
813 def test_PUT_NEWFILEURL_no_replace(self):
814 d = self.PUT(self.public_url + "/foo/bar.txt?replace=false",
815 self.NEWFILE_CONTENTS)
816 d.addBoth(self.shouldFail, error.Error, "PUT_NEWFILEURL_no_replace",
818 "There was already a child by that name, and you asked me "
822 def test_PUT_NEWFILEURL_mkdirs(self):
823 d = self.PUT(self.public_url + "/foo/newdir/new.txt", self.NEWFILE_CONTENTS)
825 d.addCallback(self.failUnlessURIMatchesROChild, fn, u"newdir/new.txt")
826 d.addCallback(lambda res: self.failIfNodeHasChild(fn, u"new.txt"))
827 d.addCallback(lambda res: self.failUnlessNodeHasChild(fn, u"newdir"))
828 d.addCallback(lambda res:
829 self.failUnlessChildContentsAre(fn, u"newdir/new.txt",
830 self.NEWFILE_CONTENTS))
833 def test_PUT_NEWFILEURL_blocked(self):
834 d = self.PUT(self.public_url + "/foo/blockingfile/new.txt",
835 self.NEWFILE_CONTENTS)
836 d.addBoth(self.shouldFail, error.Error, "PUT_NEWFILEURL_blocked",
838 "Unable to create directory 'blockingfile': a file was in the way")
841 def test_PUT_NEWFILEURL_emptyname(self):
842 # an empty pathname component (i.e. a double-slash) is disallowed
843 d = self.shouldFail2(error.Error, "test_PUT_NEWFILEURL_emptyname",
845 "The webapi does not allow empty pathname components",
846 self.PUT, self.public_url + "/foo//new.txt", "")
849 def test_DELETE_FILEURL(self):
850 d = self.DELETE(self.public_url + "/foo/bar.txt")
851 d.addCallback(lambda res:
852 self.failIfNodeHasChild(self._foo_node, u"bar.txt"))
855 def test_DELETE_FILEURL_missing(self):
856 d = self.DELETE(self.public_url + "/foo/missing")
857 d.addBoth(self.should404, "test_DELETE_FILEURL_missing")
860 def test_DELETE_FILEURL_missing2(self):
861 d = self.DELETE(self.public_url + "/missing/missing")
862 d.addBoth(self.should404, "test_DELETE_FILEURL_missing2")
865 def failUnlessHasBarDotTxtMetadata(self, res):
866 data = simplejson.loads(res)
867 self.failUnless(isinstance(data, list))
868 self.failUnless(data[1].has_key("metadata"))
869 self.failUnless(data[1]["metadata"].has_key("ctime"))
870 self.failUnless(data[1]["metadata"].has_key("mtime"))
871 self.failUnlessEqual(data[1]["metadata"]["ctime"],
872 self._bar_txt_metadata["ctime"])
874 def test_GET_FILEURL_json(self):
875 # twisted.web.http.parse_qs ignores any query args without an '=', so
876 # I can't do "GET /path?json", I have to do "GET /path/t=json"
877 # instead. This may make it tricky to emulate the S3 interface
879 d = self.GET(self.public_url + "/foo/bar.txt?t=json")
881 self.failUnlessIsBarJSON(data)
882 self.failUnlessHasBarDotTxtMetadata(data)
884 d.addCallback(_check1)
887 def test_GET_FILEURL_json_missing(self):
888 d = self.GET(self.public_url + "/foo/missing?json")
889 d.addBoth(self.should404, "test_GET_FILEURL_json_missing")
892 def test_GET_FILEURL_uri(self):
893 d = self.GET(self.public_url + "/foo/bar.txt?t=uri")
895 self.failUnlessEqual(res, self._bar_txt_uri)
896 d.addCallback(_check)
897 d.addCallback(lambda res:
898 self.GET(self.public_url + "/foo/bar.txt?t=readonly-uri"))
900 # for now, for files, uris and readonly-uris are the same
901 self.failUnlessEqual(res, self._bar_txt_uri)
902 d.addCallback(_check2)
905 def test_GET_FILEURL_badtype(self):
906 d = self.shouldHTTPError("GET t=bogus", 400, "Bad Request",
909 self.public_url + "/foo/bar.txt?t=bogus")
912 def test_GET_FILEURL_uri_missing(self):
913 d = self.GET(self.public_url + "/foo/missing?t=uri")
914 d.addBoth(self.should404, "test_GET_FILEURL_uri_missing")
917 def test_GET_DIRURL(self):
918 # the addSlash means we get a redirect here
919 # from /uri/$URI/foo/ , we need ../../../ to get back to the root
921 d = self.GET(self.public_url + "/foo", followRedirect=True)
923 self.failUnless(('<a href="%s">Return to Welcome page' % ROOT)
925 # the FILE reference points to a URI, but it should end in bar.txt
926 bar_url = ("%s/file/%s/@@named=/bar.txt" %
927 (ROOT, urllib.quote(self._bar_txt_uri)))
928 get_bar = "".join([r'<td>FILE</td>',
930 r'<a href="%s">bar.txt</a>' % bar_url,
932 r'\s+<td>%d</td>' % len(self.BAR_CONTENTS),
934 self.failUnless(re.search(get_bar, res), res)
935 for line in res.split("\n"):
936 # find the line that contains the delete button for bar.txt
937 if ("form action" in line and
938 'value="delete"' in line and
939 'value="bar.txt"' in line):
940 # the form target should use a relative URL
941 foo_url = urllib.quote("%s/uri/%s/" % (ROOT, self._foo_uri))
942 self.failUnless(('action="%s"' % foo_url) in line, line)
943 # and the when_done= should too
944 #done_url = urllib.quote(???)
945 #self.failUnless(('name="when_done" value="%s"' % done_url)
949 self.fail("unable to find delete-bar.txt line", res)
951 # the DIR reference just points to a URI
952 sub_url = ("%s/uri/%s/" % (ROOT, urllib.quote(self._sub_uri)))
953 get_sub = ((r'<td>DIR</td>')
954 +r'\s+<td><a href="%s">sub</a></td>' % sub_url)
955 self.failUnless(re.search(get_sub, res), res)
956 d.addCallback(_check)
958 # look at a readonly directory
959 d.addCallback(lambda res:
960 self.GET(self.public_url + "/reedownlee", followRedirect=True))
962 self.failUnless("(read-only)" in res, res)
963 self.failIf("Upload a file" in res, res)
964 d.addCallback(_check2)
966 # and at a directory that contains a readonly directory
967 d.addCallback(lambda res:
968 self.GET(self.public_url, followRedirect=True))
970 self.failUnless(re.search('<td>DIR-RO</td>'
971 r'\s+<td><a href="[\.\/]+/uri/URI%3ADIR2-RO%3A[^"]+">reedownlee</a></td>', res), res)
972 d.addCallback(_check3)
974 # and an empty directory
975 d.addCallback(lambda res: self.GET(self.public_url + "/foo/empty/"))
977 self.failUnless("directory is empty" in res, res)
978 MKDIR_BUTTON_RE=re.compile('<input type="hidden" name="t" value="mkdir" />.*<legend class="freeform-form-label">Create a new directory in this directory</legend>.*<input type="submit" value="Create" />', re.I)
979 self.failUnless(MKDIR_BUTTON_RE.search(res), res)
980 d.addCallback(_check4)
984 def test_GET_DIRURL_badtype(self):
985 d = self.shouldHTTPError("test_GET_DIRURL_badtype",
989 self.public_url + "/foo?t=bogus")
992 def test_GET_DIRURL_json(self):
993 d = self.GET(self.public_url + "/foo?t=json")
994 d.addCallback(self.failUnlessIsFooJSON)
998 def test_POST_DIRURL_manifest_no_ophandle(self):
999 d = self.shouldFail2(error.Error,
1000 "test_POST_DIRURL_manifest_no_ophandle",
1002 "slow operation requires ophandle=",
1003 self.POST, self.public_url, t="start-manifest")
1006 def test_POST_DIRURL_manifest(self):
1007 d = defer.succeed(None)
1008 def getman(ignored, output):
1009 d = self.POST(self.public_url + "/foo/?t=start-manifest&ophandle=125",
1010 followRedirect=True)
1011 d.addCallback(self.wait_for_operation, "125")
1012 d.addCallback(self.get_operation_results, "125", output)
1014 d.addCallback(getman, None)
1015 def _got_html(manifest):
1016 self.failUnless("Manifest of SI=" in manifest)
1017 self.failUnless("<td>sub</td>" in manifest)
1018 self.failUnless(self._sub_uri in manifest)
1019 self.failUnless("<td>sub/baz.txt</td>" in manifest)
1020 d.addCallback(_got_html)
1022 # both t=status and unadorned GET should be identical
1023 d.addCallback(lambda res: self.GET("/operations/125"))
1024 d.addCallback(_got_html)
1026 d.addCallback(getman, "html")
1027 d.addCallback(_got_html)
1028 d.addCallback(getman, "text")
1029 def _got_text(manifest):
1030 self.failUnless("\nsub " + self._sub_uri + "\n" in manifest)
1031 self.failUnless("\nsub/baz.txt URI:CHK:" in manifest)
1032 d.addCallback(_got_text)
1033 d.addCallback(getman, "JSON")
1035 data = res["manifest"]
1037 for (path_list, cap) in data:
1038 got[tuple(path_list)] = cap
1039 self.failUnlessEqual(got[(u"sub",)], self._sub_uri)
1040 self.failUnless((u"sub",u"baz.txt") in got)
1041 self.failUnless("finished" in res)
1042 self.failUnless("origin" in res)
1043 self.failUnless("storage-index" in res)
1044 self.failUnless("verifycaps" in res)
1045 self.failUnless("stats" in res)
1046 d.addCallback(_got_json)
1049 def test_POST_DIRURL_deepsize_no_ophandle(self):
1050 d = self.shouldFail2(error.Error,
1051 "test_POST_DIRURL_deepsize_no_ophandle",
1053 "slow operation requires ophandle=",
1054 self.POST, self.public_url, t="start-deep-size")
1057 def test_POST_DIRURL_deepsize(self):
1058 d = self.POST(self.public_url + "/foo/?t=start-deep-size&ophandle=126",
1059 followRedirect=True)
1060 d.addCallback(self.wait_for_operation, "126")
1061 d.addCallback(self.get_operation_results, "126", "json")
1062 def _got_json(data):
1063 self.failUnlessEqual(data["finished"], True)
1065 self.failUnless(size > 1000)
1066 d.addCallback(_got_json)
1067 d.addCallback(self.get_operation_results, "126", "text")
1069 mo = re.search(r'^size: (\d+)$', res, re.M)
1070 self.failUnless(mo, res)
1071 size = int(mo.group(1))
1072 # with directories, the size varies.
1073 self.failUnless(size > 1000)
1074 d.addCallback(_got_text)
1077 def test_POST_DIRURL_deepstats_no_ophandle(self):
1078 d = self.shouldFail2(error.Error,
1079 "test_POST_DIRURL_deepstats_no_ophandle",
1081 "slow operation requires ophandle=",
1082 self.POST, self.public_url, t="start-deep-stats")
1085 def test_POST_DIRURL_deepstats(self):
1086 d = self.POST(self.public_url + "/foo/?t=start-deep-stats&ophandle=127",
1087 followRedirect=True)
1088 d.addCallback(self.wait_for_operation, "127")
1089 d.addCallback(self.get_operation_results, "127", "json")
1090 def _got_json(stats):
1091 expected = {"count-immutable-files": 3,
1092 "count-mutable-files": 0,
1093 "count-literal-files": 0,
1095 "count-directories": 3,
1096 "size-immutable-files": 57,
1097 "size-literal-files": 0,
1098 #"size-directories": 1912, # varies
1099 #"largest-directory": 1590,
1100 "largest-directory-children": 5,
1101 "largest-immutable-file": 19,
1103 for k,v in expected.iteritems():
1104 self.failUnlessEqual(stats[k], v,
1105 "stats[%s] was %s, not %s" %
1107 self.failUnlessEqual(stats["size-files-histogram"],
1109 d.addCallback(_got_json)
1112 def test_POST_DIRURL_stream_manifest(self):
1113 d = self.POST(self.public_url + "/foo/?t=stream-manifest")
1115 self.failUnless(res.endswith("\n"))
1116 units = [simplejson.loads(t) for t in res[:-1].split("\n")]
1117 self.failUnlessEqual(len(units), 7)
1118 self.failUnlessEqual(units[-1]["type"], "stats")
1120 self.failUnlessEqual(first["path"], [])
1121 self.failUnlessEqual(first["cap"], self._foo_uri)
1122 self.failUnlessEqual(first["type"], "directory")
1123 baz = [u for u in units[:-1] if u["cap"] == self._baz_file_uri][0]
1124 self.failUnlessEqual(baz["path"], ["sub", "baz.txt"])
1125 self.failIfEqual(baz["storage-index"], None)
1126 self.failIfEqual(baz["verifycap"], None)
1127 self.failIfEqual(baz["repaircap"], None)
1129 d.addCallback(_check)
1132 def test_GET_DIRURL_uri(self):
1133 d = self.GET(self.public_url + "/foo?t=uri")
1135 self.failUnlessEqual(res, self._foo_uri)
1136 d.addCallback(_check)
1139 def test_GET_DIRURL_readonly_uri(self):
1140 d = self.GET(self.public_url + "/foo?t=readonly-uri")
1142 self.failUnlessEqual(res, self._foo_readonly_uri)
1143 d.addCallback(_check)
1146 def test_PUT_NEWDIRURL(self):
1147 d = self.PUT(self.public_url + "/foo/newdir?t=mkdir", "")
1148 d.addCallback(lambda res:
1149 self.failUnlessNodeHasChild(self._foo_node, u"newdir"))
1150 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
1151 d.addCallback(self.failUnlessNodeKeysAre, [])
1154 def test_POST_NEWDIRURL(self):
1155 d = self.POST2(self.public_url + "/foo/newdir?t=mkdir", "")
1156 d.addCallback(lambda res:
1157 self.failUnlessNodeHasChild(self._foo_node, u"newdir"))
1158 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
1159 d.addCallback(self.failUnlessNodeKeysAre, [])
1162 def test_POST_NEWDIRURL_emptyname(self):
1163 # an empty pathname component (i.e. a double-slash) is disallowed
1164 d = self.shouldFail2(error.Error, "test_POST_NEWDIRURL_emptyname",
1166 "The webapi does not allow empty pathname components, i.e. a double slash",
1167 self.POST, self.public_url + "//?t=mkdir")
1170 def test_POST_NEWDIRURL_initial_children(self):
1171 (newkids, caps) = self._create_initial_children()
1172 d = self.POST2(self.public_url + "/foo/newdir?t=mkdir-with-children",
1173 simplejson.dumps(newkids))
1175 n = self.s.create_node_from_uri(uri.strip())
1176 d2 = self.failUnlessNodeKeysAre(n, newkids.keys())
1177 d2.addCallback(lambda ign:
1178 self.failUnlessROChildURIIs(n, u"child-imm",
1180 d2.addCallback(lambda ign:
1181 self.failUnlessRWChildURIIs(n, u"child-mutable",
1183 d2.addCallback(lambda ign:
1184 self.failUnlessROChildURIIs(n, u"child-mutable-ro",
1186 d2.addCallback(lambda ign:
1187 self.failUnlessROChildURIIs(n, u"unknownchild-ro",
1188 caps['unknown_rocap']))
1189 d2.addCallback(lambda ign:
1190 self.failUnlessRWChildURIIs(n, u"unknownchild-rw",
1191 caps['unknown_rwcap']))
1192 d2.addCallback(lambda ign:
1193 self.failUnlessROChildURIIs(n, u"unknownchild-imm",
1194 caps['unknown_immcap']))
1195 d2.addCallback(lambda ign:
1196 self.failUnlessRWChildURIIs(n, u"dirchild",
1199 d.addCallback(_check)
1200 d.addCallback(lambda res:
1201 self.failUnlessNodeHasChild(self._foo_node, u"newdir"))
1202 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
1203 d.addCallback(self.failUnlessNodeKeysAre, newkids.keys())
1204 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
1205 d.addCallback(self.failUnlessROChildURIIs, u"child-imm", caps['filecap1'])
1208 def test_POST_NEWDIRURL_immutable(self):
1209 (newkids, caps) = self._create_immutable_children()
1210 d = self.POST2(self.public_url + "/foo/newdir?t=mkdir-immutable",
1211 simplejson.dumps(newkids))
1213 n = self.s.create_node_from_uri(uri.strip())
1214 d2 = self.failUnlessNodeKeysAre(n, newkids.keys())
1215 d2.addCallback(lambda ign:
1216 self.failUnlessROChildURIIs(n, u"child-imm",
1218 d2.addCallback(lambda ign:
1219 self.failUnlessROChildURIIs(n, u"unknownchild-imm",
1220 caps['unknown_immcap']))
1221 d2.addCallback(lambda ign:
1222 self.failUnlessROChildURIIs(n, u"dirchild-imm",
1225 d.addCallback(_check)
1226 d.addCallback(lambda res:
1227 self.failUnlessNodeHasChild(self._foo_node, u"newdir"))
1228 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
1229 d.addCallback(self.failUnlessNodeKeysAre, newkids.keys())
1230 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
1231 d.addCallback(self.failUnlessROChildURIIs, u"child-imm", caps['filecap1'])
1232 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
1233 d.addCallback(self.failUnlessROChildURIIs, u"unknownchild-imm", caps['unknown_immcap'])
1234 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
1235 d.addCallback(self.failUnlessROChildURIIs, u"dirchild-imm", caps['immdircap'])
1236 d.addErrback(self.explain_web_error)
1239 def test_POST_NEWDIRURL_immutable_bad(self):
1240 (newkids, caps) = self._create_initial_children()
1241 d = self.shouldFail2(error.Error, "test_POST_NEWDIRURL_immutable_bad",
1243 "needed to be immutable but was not",
1245 self.public_url + "/foo/newdir?t=mkdir-immutable",
1246 simplejson.dumps(newkids))
1249 def test_PUT_NEWDIRURL_exists(self):
1250 d = self.PUT(self.public_url + "/foo/sub?t=mkdir", "")
1251 d.addCallback(lambda res:
1252 self.failUnlessNodeHasChild(self._foo_node, u"sub"))
1253 d.addCallback(lambda res: self._foo_node.get(u"sub"))
1254 d.addCallback(self.failUnlessNodeKeysAre, [u"baz.txt"])
1257 def test_PUT_NEWDIRURL_blocked(self):
1258 d = self.shouldFail2(error.Error, "PUT_NEWDIRURL_blocked",
1259 "409 Conflict", "Unable to create directory 'bar.txt': a file was in the way",
1261 self.public_url + "/foo/bar.txt/sub?t=mkdir", "")
1262 d.addCallback(lambda res:
1263 self.failUnlessNodeHasChild(self._foo_node, u"sub"))
1264 d.addCallback(lambda res: self._foo_node.get(u"sub"))
1265 d.addCallback(self.failUnlessNodeKeysAre, [u"baz.txt"])
1268 def test_PUT_NEWDIRURL_mkdir_p(self):
1269 d = defer.succeed(None)
1270 d.addCallback(lambda res: self.POST(self.public_url + "/foo", t='mkdir', name='mkp'))
1271 d.addCallback(lambda res: self.failUnlessNodeHasChild(self._foo_node, u"mkp"))
1272 d.addCallback(lambda res: self._foo_node.get(u"mkp"))
1273 def mkdir_p(mkpnode):
1274 url = '/uri/%s?t=mkdir-p&path=/sub1/sub2' % urllib.quote(mkpnode.get_uri())
1276 def made_subsub(ssuri):
1277 d = self._foo_node.get_child_at_path(u"mkp/sub1/sub2")
1278 d.addCallback(lambda ssnode: self.failUnlessEqual(ssnode.get_uri(), ssuri))
1280 d.addCallback(lambda uri2: self.failUnlessEqual(uri2, ssuri))
1282 d.addCallback(made_subsub)
1284 d.addCallback(mkdir_p)
1287 def test_PUT_NEWDIRURL_mkdirs(self):
1288 d = self.PUT(self.public_url + "/foo/subdir/newdir?t=mkdir", "")
1289 d.addCallback(lambda res:
1290 self.failIfNodeHasChild(self._foo_node, u"newdir"))
1291 d.addCallback(lambda res:
1292 self.failUnlessNodeHasChild(self._foo_node, u"subdir"))
1293 d.addCallback(lambda res:
1294 self._foo_node.get_child_at_path(u"subdir/newdir"))
1295 d.addCallback(self.failUnlessNodeKeysAre, [])
1298 def test_DELETE_DIRURL(self):
1299 d = self.DELETE(self.public_url + "/foo")
1300 d.addCallback(lambda res:
1301 self.failIfNodeHasChild(self.public_root, u"foo"))
1304 def test_DELETE_DIRURL_missing(self):
1305 d = self.DELETE(self.public_url + "/foo/missing")
1306 d.addBoth(self.should404, "test_DELETE_DIRURL_missing")
1307 d.addCallback(lambda res:
1308 self.failUnlessNodeHasChild(self.public_root, u"foo"))
1311 def test_DELETE_DIRURL_missing2(self):
1312 d = self.DELETE(self.public_url + "/missing")
1313 d.addBoth(self.should404, "test_DELETE_DIRURL_missing2")
1316 def dump_root(self):
1318 w = webish.DirnodeWalkerMixin()
1319 def visitor(childpath, childnode, metadata):
1321 d = w.walk(self.public_root, visitor)
1324 def failUnlessNodeKeysAre(self, node, expected_keys):
1325 for k in expected_keys:
1326 assert isinstance(k, unicode)
1328 def _check(children):
1329 self.failUnlessEqual(sorted(children.keys()), sorted(expected_keys))
1330 d.addCallback(_check)
1332 def failUnlessNodeHasChild(self, node, name):
1333 assert isinstance(name, unicode)
1335 def _check(children):
1336 self.failUnless(name in children)
1337 d.addCallback(_check)
1339 def failIfNodeHasChild(self, node, name):
1340 assert isinstance(name, unicode)
1342 def _check(children):
1343 self.failIf(name in children)
1344 d.addCallback(_check)
1347 def failUnlessChildContentsAre(self, node, name, expected_contents):
1348 assert isinstance(name, unicode)
1349 d = node.get_child_at_path(name)
1350 d.addCallback(lambda node: download_to_data(node))
1351 def _check(contents):
1352 self.failUnlessEqual(contents, expected_contents)
1353 d.addCallback(_check)
1356 def failUnlessMutableChildContentsAre(self, node, name, expected_contents):
1357 assert isinstance(name, unicode)
1358 d = node.get_child_at_path(name)
1359 d.addCallback(lambda node: node.download_best_version())
1360 def _check(contents):
1361 self.failUnlessEqual(contents, expected_contents)
1362 d.addCallback(_check)
1365 def failUnlessRWChildURIIs(self, node, name, expected_uri):
1366 assert isinstance(name, unicode)
1367 d = node.get_child_at_path(name)
1369 self.failUnless(child.is_unknown() or not child.is_readonly())
1370 self.failUnlessEqual(child.get_uri(), expected_uri.strip())
1371 self.failUnlessEqual(child.get_write_uri(), expected_uri.strip())
1372 expected_ro_uri = self._make_readonly(expected_uri)
1374 self.failUnlessEqual(child.get_readonly_uri(), expected_ro_uri.strip())
1375 d.addCallback(_check)
1378 def failUnlessROChildURIIs(self, node, name, expected_uri):
1379 assert isinstance(name, unicode)
1380 d = node.get_child_at_path(name)
1382 self.failUnless(child.is_unknown() or child.is_readonly())
1383 self.failUnlessEqual(child.get_write_uri(), None)
1384 self.failUnlessEqual(child.get_uri(), expected_uri.strip())
1385 self.failUnlessEqual(child.get_readonly_uri(), expected_uri.strip())
1386 d.addCallback(_check)
1389 def failUnlessURIMatchesRWChild(self, got_uri, node, name):
1390 assert isinstance(name, unicode)
1391 d = node.get_child_at_path(name)
1393 self.failUnless(child.is_unknown() or not child.is_readonly())
1394 self.failUnlessEqual(child.get_uri(), got_uri.strip())
1395 self.failUnlessEqual(child.get_write_uri(), got_uri.strip())
1396 expected_ro_uri = self._make_readonly(got_uri)
1398 self.failUnlessEqual(child.get_readonly_uri(), expected_ro_uri.strip())
1399 d.addCallback(_check)
1402 def failUnlessURIMatchesROChild(self, got_uri, node, name):
1403 assert isinstance(name, unicode)
1404 d = node.get_child_at_path(name)
1406 self.failUnless(child.is_unknown() or child.is_readonly())
1407 self.failUnlessEqual(child.get_write_uri(), None)
1408 self.failUnlessEqual(got_uri.strip(), child.get_uri())
1409 self.failUnlessEqual(got_uri.strip(), child.get_readonly_uri())
1410 d.addCallback(_check)
1413 def failUnlessCHKURIHasContents(self, got_uri, contents):
1414 self.failUnless(FakeCHKFileNode.all_contents[got_uri] == contents)
1416 def test_POST_upload(self):
1417 d = self.POST(self.public_url + "/foo", t="upload",
1418 file=("new.txt", self.NEWFILE_CONTENTS))
1420 d.addCallback(self.failUnlessURIMatchesROChild, fn, u"new.txt")
1421 d.addCallback(lambda res:
1422 self.failUnlessChildContentsAre(fn, u"new.txt",
1423 self.NEWFILE_CONTENTS))
1426 def test_POST_upload_unicode(self):
1427 filename = u"n\u00e9wer.txt" # n e-acute w e r . t x t
1428 d = self.POST(self.public_url + "/foo", t="upload",
1429 file=(filename, self.NEWFILE_CONTENTS))
1431 d.addCallback(self.failUnlessURIMatchesROChild, fn, filename)
1432 d.addCallback(lambda res:
1433 self.failUnlessChildContentsAre(fn, filename,
1434 self.NEWFILE_CONTENTS))
1435 target_url = self.public_url + "/foo/" + filename.encode("utf-8")
1436 d.addCallback(lambda res: self.GET(target_url))
1437 d.addCallback(lambda contents: self.failUnlessEqual(contents,
1438 self.NEWFILE_CONTENTS,
1442 def test_POST_upload_unicode_named(self):
1443 filename = u"n\u00e9wer.txt" # n e-acute w e r . t x t
1444 d = self.POST(self.public_url + "/foo", t="upload",
1446 file=("overridden", self.NEWFILE_CONTENTS))
1448 d.addCallback(self.failUnlessURIMatchesROChild, fn, filename)
1449 d.addCallback(lambda res:
1450 self.failUnlessChildContentsAre(fn, filename,
1451 self.NEWFILE_CONTENTS))
1452 target_url = self.public_url + "/foo/" + filename.encode("utf-8")
1453 d.addCallback(lambda res: self.GET(target_url))
1454 d.addCallback(lambda contents: self.failUnlessEqual(contents,
1455 self.NEWFILE_CONTENTS,
1459 def test_POST_upload_no_link(self):
1460 d = self.POST("/uri", t="upload",
1461 file=("new.txt", self.NEWFILE_CONTENTS))
1462 def _check_upload_results(page):
1463 # this should be a page which describes the results of the upload
1464 # that just finished.
1465 self.failUnless("Upload Results:" in page)
1466 self.failUnless("URI:" in page)
1467 uri_re = re.compile("URI: <tt><span>(.*)</span>")
1468 mo = uri_re.search(page)
1469 self.failUnless(mo, page)
1470 new_uri = mo.group(1)
1472 d.addCallback(_check_upload_results)
1473 d.addCallback(self.failUnlessCHKURIHasContents, self.NEWFILE_CONTENTS)
1476 def test_POST_upload_no_link_whendone(self):
1477 d = self.POST("/uri", t="upload", when_done="/",
1478 file=("new.txt", self.NEWFILE_CONTENTS))
1479 d.addBoth(self.shouldRedirect, "/")
1482 def shouldRedirect2(self, which, checker, callable, *args, **kwargs):
1483 d = defer.maybeDeferred(callable, *args, **kwargs)
1485 if isinstance(res, failure.Failure):
1486 res.trap(error.PageRedirect)
1487 statuscode = res.value.status
1488 target = res.value.location
1489 return checker(statuscode, target)
1490 self.fail("%s: callable was supposed to redirect, not return '%s'"
1495 def test_POST_upload_no_link_whendone_results(self):
1496 def check(statuscode, target):
1497 self.failUnlessEqual(statuscode, str(http.FOUND))
1498 self.failUnless(target.startswith(self.webish_url), target)
1499 return client.getPage(target, method="GET")
1500 d = self.shouldRedirect2("test_POST_upload_no_link_whendone_results",
1502 self.POST, "/uri", t="upload",
1503 when_done="/uri/%(uri)s",
1504 file=("new.txt", self.NEWFILE_CONTENTS))
1505 d.addCallback(lambda res:
1506 self.failUnlessEqual(res, self.NEWFILE_CONTENTS))
1509 def test_POST_upload_no_link_mutable(self):
1510 d = self.POST("/uri", t="upload", mutable="true",
1511 file=("new.txt", self.NEWFILE_CONTENTS))
1512 def _check(filecap):
1513 filecap = filecap.strip()
1514 self.failUnless(filecap.startswith("URI:SSK:"), filecap)
1515 self.filecap = filecap
1516 u = uri.WriteableSSKFileURI.init_from_string(filecap)
1517 self.failUnless(u.storage_index in FakeMutableFileNode.all_contents)
1518 n = self.s.create_node_from_uri(filecap)
1519 return n.download_best_version()
1520 d.addCallback(_check)
1522 self.failUnlessEqual(data, self.NEWFILE_CONTENTS)
1523 return self.GET("/uri/%s" % urllib.quote(self.filecap))
1524 d.addCallback(_check2)
1526 self.failUnlessEqual(data, self.NEWFILE_CONTENTS)
1527 return self.GET("/file/%s" % urllib.quote(self.filecap))
1528 d.addCallback(_check3)
1530 self.failUnlessEqual(data, self.NEWFILE_CONTENTS)
1531 d.addCallback(_check4)
1534 def test_POST_upload_no_link_mutable_toobig(self):
1535 d = self.shouldFail2(error.Error,
1536 "test_POST_upload_no_link_mutable_toobig",
1537 "413 Request Entity Too Large",
1538 "SDMF is limited to one segment, and 10001 > 10000",
1540 "/uri", t="upload", mutable="true",
1542 "b" * (self.s.MUTABLE_SIZELIMIT+1)) )
1545 def test_POST_upload_mutable(self):
1546 # this creates a mutable file
1547 d = self.POST(self.public_url + "/foo", t="upload", mutable="true",
1548 file=("new.txt", self.NEWFILE_CONTENTS))
1550 d.addCallback(self.failUnlessURIMatchesRWChild, fn, u"new.txt")
1551 d.addCallback(lambda res:
1552 self.failUnlessMutableChildContentsAre(fn, u"new.txt",
1553 self.NEWFILE_CONTENTS))
1554 d.addCallback(lambda res: self._foo_node.get(u"new.txt"))
1556 self.failUnless(IMutableFileNode.providedBy(newnode))
1557 self.failUnless(newnode.is_mutable())
1558 self.failIf(newnode.is_readonly())
1559 self._mutable_node = newnode
1560 self._mutable_uri = newnode.get_uri()
1563 # now upload it again and make sure that the URI doesn't change
1564 NEWER_CONTENTS = self.NEWFILE_CONTENTS + "newer\n"
1565 d.addCallback(lambda res:
1566 self.POST(self.public_url + "/foo", t="upload",
1568 file=("new.txt", NEWER_CONTENTS)))
1569 d.addCallback(self.failUnlessURIMatchesRWChild, fn, u"new.txt")
1570 d.addCallback(lambda res:
1571 self.failUnlessMutableChildContentsAre(fn, u"new.txt",
1573 d.addCallback(lambda res: self._foo_node.get(u"new.txt"))
1575 self.failUnless(IMutableFileNode.providedBy(newnode))
1576 self.failUnless(newnode.is_mutable())
1577 self.failIf(newnode.is_readonly())
1578 self.failUnlessEqual(self._mutable_uri, newnode.get_uri())
1579 d.addCallback(_got2)
1581 # upload a second time, using PUT instead of POST
1582 NEW2_CONTENTS = NEWER_CONTENTS + "overwrite with PUT\n"
1583 d.addCallback(lambda res:
1584 self.PUT(self.public_url + "/foo/new.txt", NEW2_CONTENTS))
1585 d.addCallback(self.failUnlessURIMatchesRWChild, fn, u"new.txt")
1586 d.addCallback(lambda res:
1587 self.failUnlessMutableChildContentsAre(fn, u"new.txt",
1590 # finally list the directory, since mutable files are displayed
1591 # slightly differently
1593 d.addCallback(lambda res:
1594 self.GET(self.public_url + "/foo/",
1595 followRedirect=True))
1596 def _check_page(res):
1597 # TODO: assert more about the contents
1598 self.failUnless("SSK" in res)
1600 d.addCallback(_check_page)
1602 d.addCallback(lambda res: self._foo_node.get(u"new.txt"))
1604 self.failUnless(IMutableFileNode.providedBy(newnode))
1605 self.failUnless(newnode.is_mutable())
1606 self.failIf(newnode.is_readonly())
1607 self.failUnlessEqual(self._mutable_uri, newnode.get_uri())
1608 d.addCallback(_got3)
1610 # look at the JSON form of the enclosing directory
1611 d.addCallback(lambda res:
1612 self.GET(self.public_url + "/foo/?t=json",
1613 followRedirect=True))
1614 def _check_page_json(res):
1615 parsed = simplejson.loads(res)
1616 self.failUnlessEqual(parsed[0], "dirnode")
1617 children = dict( [(unicode(name),value)
1619 in parsed[1]["children"].iteritems()] )
1620 self.failUnless("new.txt" in children)
1621 new_json = children["new.txt"]
1622 self.failUnlessEqual(new_json[0], "filenode")
1623 self.failUnless(new_json[1]["mutable"])
1624 self.failUnlessEqual(new_json[1]["rw_uri"], self._mutable_uri)
1625 ro_uri = unicode(self._mutable_node.get_readonly().to_string())
1626 self.failUnlessEqual(new_json[1]["ro_uri"], ro_uri)
1627 d.addCallback(_check_page_json)
1629 # and the JSON form of the file
1630 d.addCallback(lambda res:
1631 self.GET(self.public_url + "/foo/new.txt?t=json"))
1632 def _check_file_json(res):
1633 parsed = simplejson.loads(res)
1634 self.failUnlessEqual(parsed[0], "filenode")
1635 self.failUnless(parsed[1]["mutable"])
1636 self.failUnlessEqual(parsed[1]["rw_uri"], self._mutable_uri)
1637 ro_uri = unicode(self._mutable_node.get_readonly().to_string())
1638 self.failUnlessEqual(parsed[1]["ro_uri"], ro_uri)
1639 d.addCallback(_check_file_json)
1641 # and look at t=uri and t=readonly-uri
1642 d.addCallback(lambda res:
1643 self.GET(self.public_url + "/foo/new.txt?t=uri"))
1644 d.addCallback(lambda res: self.failUnlessEqual(res, self._mutable_uri))
1645 d.addCallback(lambda res:
1646 self.GET(self.public_url + "/foo/new.txt?t=readonly-uri"))
1647 def _check_ro_uri(res):
1648 ro_uri = unicode(self._mutable_node.get_readonly().to_string())
1649 self.failUnlessEqual(res, ro_uri)
1650 d.addCallback(_check_ro_uri)
1652 # make sure we can get to it from /uri/URI
1653 d.addCallback(lambda res:
1654 self.GET("/uri/%s" % urllib.quote(self._mutable_uri)))
1655 d.addCallback(lambda res:
1656 self.failUnlessEqual(res, NEW2_CONTENTS))
1658 # and that HEAD computes the size correctly
1659 d.addCallback(lambda res:
1660 self.HEAD(self.public_url + "/foo/new.txt",
1661 return_response=True))
1662 def _got_headers((res, status, headers)):
1663 self.failUnlessEqual(res, "")
1664 self.failUnlessEqual(headers["content-length"][0],
1665 str(len(NEW2_CONTENTS)))
1666 self.failUnlessEqual(headers["content-type"], ["text/plain"])
1667 d.addCallback(_got_headers)
1669 # make sure that size errors are displayed correctly for overwrite
1670 d.addCallback(lambda res:
1671 self.shouldFail2(error.Error,
1672 "test_POST_upload_mutable-toobig",
1673 "413 Request Entity Too Large",
1674 "SDMF is limited to one segment, and 10001 > 10000",
1676 self.public_url + "/foo", t="upload",
1679 "b" * (self.s.MUTABLE_SIZELIMIT+1)),
1682 d.addErrback(self.dump_error)
1685 def test_POST_upload_mutable_toobig(self):
1686 d = self.shouldFail2(error.Error,
1687 "test_POST_upload_mutable_toobig",
1688 "413 Request Entity Too Large",
1689 "SDMF is limited to one segment, and 10001 > 10000",
1691 self.public_url + "/foo",
1692 t="upload", mutable="true",
1694 "b" * (self.s.MUTABLE_SIZELIMIT+1)) )
1697 def dump_error(self, f):
1698 # if the web server returns an error code (like 400 Bad Request),
1699 # web.client.getPage puts the HTTP response body into the .response
1700 # attribute of the exception object that it gives back. It does not
1701 # appear in the Failure's repr(), so the ERROR that trial displays
1702 # will be rather terse and unhelpful. addErrback this method to the
1703 # end of your chain to get more information out of these errors.
1704 if f.check(error.Error):
1705 print "web.error.Error:"
1707 print f.value.response
1710 def test_POST_upload_replace(self):
1711 d = self.POST(self.public_url + "/foo", t="upload",
1712 file=("bar.txt", self.NEWFILE_CONTENTS))
1714 d.addCallback(self.failUnlessURIMatchesROChild, fn, u"bar.txt")
1715 d.addCallback(lambda res:
1716 self.failUnlessChildContentsAre(fn, u"bar.txt",
1717 self.NEWFILE_CONTENTS))
1720 def test_POST_upload_no_replace_ok(self):
1721 d = self.POST(self.public_url + "/foo?replace=false", t="upload",
1722 file=("new.txt", self.NEWFILE_CONTENTS))
1723 d.addCallback(lambda res: self.GET(self.public_url + "/foo/new.txt"))
1724 d.addCallback(lambda res: self.failUnlessEqual(res,
1725 self.NEWFILE_CONTENTS))
1728 def test_POST_upload_no_replace_queryarg(self):
1729 d = self.POST(self.public_url + "/foo?replace=false", t="upload",
1730 file=("bar.txt", self.NEWFILE_CONTENTS))
1731 d.addBoth(self.shouldFail, error.Error,
1732 "POST_upload_no_replace_queryarg",
1734 "There was already a child by that name, and you asked me "
1735 "to not replace it")
1736 d.addCallback(lambda res: self.GET(self.public_url + "/foo/bar.txt"))
1737 d.addCallback(self.failUnlessIsBarDotTxt)
1740 def test_POST_upload_no_replace_field(self):
1741 d = self.POST(self.public_url + "/foo", t="upload", replace="false",
1742 file=("bar.txt", self.NEWFILE_CONTENTS))
1743 d.addBoth(self.shouldFail, error.Error, "POST_upload_no_replace_field",
1745 "There was already a child by that name, and you asked me "
1746 "to not replace it")
1747 d.addCallback(lambda res: self.GET(self.public_url + "/foo/bar.txt"))
1748 d.addCallback(self.failUnlessIsBarDotTxt)
1751 def test_POST_upload_whendone(self):
1752 d = self.POST(self.public_url + "/foo", t="upload", when_done="/THERE",
1753 file=("new.txt", self.NEWFILE_CONTENTS))
1754 d.addBoth(self.shouldRedirect, "/THERE")
1756 d.addCallback(lambda res:
1757 self.failUnlessChildContentsAre(fn, u"new.txt",
1758 self.NEWFILE_CONTENTS))
1761 def test_POST_upload_named(self):
1763 d = self.POST(self.public_url + "/foo", t="upload",
1764 name="new.txt", file=self.NEWFILE_CONTENTS)
1765 d.addCallback(self.failUnlessURIMatchesROChild, fn, u"new.txt")
1766 d.addCallback(lambda res:
1767 self.failUnlessChildContentsAre(fn, u"new.txt",
1768 self.NEWFILE_CONTENTS))
1771 def test_POST_upload_named_badfilename(self):
1772 d = self.POST(self.public_url + "/foo", t="upload",
1773 name="slashes/are/bad.txt", file=self.NEWFILE_CONTENTS)
1774 d.addBoth(self.shouldFail, error.Error,
1775 "test_POST_upload_named_badfilename",
1777 "name= may not contain a slash",
1779 # make sure that nothing was added
1780 d.addCallback(lambda res:
1781 self.failUnlessNodeKeysAre(self._foo_node,
1782 [u"bar.txt", u"blockingfile",
1783 u"empty", u"n\u00fc.txt",
1787 def test_POST_FILEURL_check(self):
1788 bar_url = self.public_url + "/foo/bar.txt"
1789 d = self.POST(bar_url, t="check")
1791 self.failUnless("Healthy :" in res)
1792 d.addCallback(_check)
1793 redir_url = "http://allmydata.org/TARGET"
1794 def _check2(statuscode, target):
1795 self.failUnlessEqual(statuscode, str(http.FOUND))
1796 self.failUnlessEqual(target, redir_url)
1797 d.addCallback(lambda res:
1798 self.shouldRedirect2("test_POST_FILEURL_check",
1802 when_done=redir_url))
1803 d.addCallback(lambda res:
1804 self.POST(bar_url, t="check", return_to=redir_url))
1806 self.failUnless("Healthy :" in res)
1807 self.failUnless("Return to file" in res)
1808 self.failUnless(redir_url in res)
1809 d.addCallback(_check3)
1811 d.addCallback(lambda res:
1812 self.POST(bar_url, t="check", output="JSON"))
1813 def _check_json(res):
1814 data = simplejson.loads(res)
1815 self.failUnless("storage-index" in data)
1816 self.failUnless(data["results"]["healthy"])
1817 d.addCallback(_check_json)
1821 def test_POST_FILEURL_check_and_repair(self):
1822 bar_url = self.public_url + "/foo/bar.txt"
1823 d = self.POST(bar_url, t="check", repair="true")
1825 self.failUnless("Healthy :" in res)
1826 d.addCallback(_check)
1827 redir_url = "http://allmydata.org/TARGET"
1828 def _check2(statuscode, target):
1829 self.failUnlessEqual(statuscode, str(http.FOUND))
1830 self.failUnlessEqual(target, redir_url)
1831 d.addCallback(lambda res:
1832 self.shouldRedirect2("test_POST_FILEURL_check_and_repair",
1835 t="check", repair="true",
1836 when_done=redir_url))
1837 d.addCallback(lambda res:
1838 self.POST(bar_url, t="check", return_to=redir_url))
1840 self.failUnless("Healthy :" in res)
1841 self.failUnless("Return to file" in res)
1842 self.failUnless(redir_url in res)
1843 d.addCallback(_check3)
1846 def test_POST_DIRURL_check(self):
1847 foo_url = self.public_url + "/foo/"
1848 d = self.POST(foo_url, t="check")
1850 self.failUnless("Healthy :" in res, res)
1851 d.addCallback(_check)
1852 redir_url = "http://allmydata.org/TARGET"
1853 def _check2(statuscode, target):
1854 self.failUnlessEqual(statuscode, str(http.FOUND))
1855 self.failUnlessEqual(target, redir_url)
1856 d.addCallback(lambda res:
1857 self.shouldRedirect2("test_POST_DIRURL_check",
1861 when_done=redir_url))
1862 d.addCallback(lambda res:
1863 self.POST(foo_url, t="check", return_to=redir_url))
1865 self.failUnless("Healthy :" in res, res)
1866 self.failUnless("Return to file/directory" in res)
1867 self.failUnless(redir_url in res)
1868 d.addCallback(_check3)
1870 d.addCallback(lambda res:
1871 self.POST(foo_url, t="check", output="JSON"))
1872 def _check_json(res):
1873 data = simplejson.loads(res)
1874 self.failUnless("storage-index" in data)
1875 self.failUnless(data["results"]["healthy"])
1876 d.addCallback(_check_json)
1880 def test_POST_DIRURL_check_and_repair(self):
1881 foo_url = self.public_url + "/foo/"
1882 d = self.POST(foo_url, t="check", repair="true")
1884 self.failUnless("Healthy :" in res, res)
1885 d.addCallback(_check)
1886 redir_url = "http://allmydata.org/TARGET"
1887 def _check2(statuscode, target):
1888 self.failUnlessEqual(statuscode, str(http.FOUND))
1889 self.failUnlessEqual(target, redir_url)
1890 d.addCallback(lambda res:
1891 self.shouldRedirect2("test_POST_DIRURL_check_and_repair",
1894 t="check", repair="true",
1895 when_done=redir_url))
1896 d.addCallback(lambda res:
1897 self.POST(foo_url, t="check", return_to=redir_url))
1899 self.failUnless("Healthy :" in res)
1900 self.failUnless("Return to file/directory" in res)
1901 self.failUnless(redir_url in res)
1902 d.addCallback(_check3)
1905 def wait_for_operation(self, ignored, ophandle):
1906 url = "/operations/" + ophandle
1907 url += "?t=status&output=JSON"
1910 data = simplejson.loads(res)
1911 if not data["finished"]:
1912 d = self.stall(delay=1.0)
1913 d.addCallback(self.wait_for_operation, ophandle)
1919 def get_operation_results(self, ignored, ophandle, output=None):
1920 url = "/operations/" + ophandle
1923 url += "&output=" + output
1926 if output and output.lower() == "json":
1927 return simplejson.loads(res)
1932 def test_POST_DIRURL_deepcheck_no_ophandle(self):
1933 d = self.shouldFail2(error.Error,
1934 "test_POST_DIRURL_deepcheck_no_ophandle",
1936 "slow operation requires ophandle=",
1937 self.POST, self.public_url, t="start-deep-check")
1940 def test_POST_DIRURL_deepcheck(self):
1941 def _check_redirect(statuscode, target):
1942 self.failUnlessEqual(statuscode, str(http.FOUND))
1943 self.failUnless(target.endswith("/operations/123"))
1944 d = self.shouldRedirect2("test_POST_DIRURL_deepcheck", _check_redirect,
1945 self.POST, self.public_url,
1946 t="start-deep-check", ophandle="123")
1947 d.addCallback(self.wait_for_operation, "123")
1948 def _check_json(data):
1949 self.failUnlessEqual(data["finished"], True)
1950 self.failUnlessEqual(data["count-objects-checked"], 8)
1951 self.failUnlessEqual(data["count-objects-healthy"], 8)
1952 d.addCallback(_check_json)
1953 d.addCallback(self.get_operation_results, "123", "html")
1954 def _check_html(res):
1955 self.failUnless("Objects Checked: <span>8</span>" in res)
1956 self.failUnless("Objects Healthy: <span>8</span>" in res)
1957 d.addCallback(_check_html)
1959 d.addCallback(lambda res:
1960 self.GET("/operations/123/"))
1961 d.addCallback(_check_html) # should be the same as without the slash
1963 d.addCallback(lambda res:
1964 self.shouldFail2(error.Error, "one", "404 Not Found",
1965 "No detailed results for SI bogus",
1966 self.GET, "/operations/123/bogus"))
1968 foo_si = self._foo_node.get_storage_index()
1969 foo_si_s = base32.b2a(foo_si)
1970 d.addCallback(lambda res:
1971 self.GET("/operations/123/%s?output=JSON" % foo_si_s))
1972 def _check_foo_json(res):
1973 data = simplejson.loads(res)
1974 self.failUnlessEqual(data["storage-index"], foo_si_s)
1975 self.failUnless(data["results"]["healthy"])
1976 d.addCallback(_check_foo_json)
1979 def test_POST_DIRURL_deepcheck_and_repair(self):
1980 d = self.POST(self.public_url, t="start-deep-check", repair="true",
1981 ophandle="124", output="json", followRedirect=True)
1982 d.addCallback(self.wait_for_operation, "124")
1983 def _check_json(data):
1984 self.failUnlessEqual(data["finished"], True)
1985 self.failUnlessEqual(data["count-objects-checked"], 8)
1986 self.failUnlessEqual(data["count-objects-healthy-pre-repair"], 8)
1987 self.failUnlessEqual(data["count-objects-unhealthy-pre-repair"], 0)
1988 self.failUnlessEqual(data["count-corrupt-shares-pre-repair"], 0)
1989 self.failUnlessEqual(data["count-repairs-attempted"], 0)
1990 self.failUnlessEqual(data["count-repairs-successful"], 0)
1991 self.failUnlessEqual(data["count-repairs-unsuccessful"], 0)
1992 self.failUnlessEqual(data["count-objects-healthy-post-repair"], 8)
1993 self.failUnlessEqual(data["count-objects-unhealthy-post-repair"], 0)
1994 self.failUnlessEqual(data["count-corrupt-shares-post-repair"], 0)
1995 d.addCallback(_check_json)
1996 d.addCallback(self.get_operation_results, "124", "html")
1997 def _check_html(res):
1998 self.failUnless("Objects Checked: <span>8</span>" in res)
2000 self.failUnless("Objects Healthy (before repair): <span>8</span>" in res)
2001 self.failUnless("Objects Unhealthy (before repair): <span>0</span>" in res)
2002 self.failUnless("Corrupt Shares (before repair): <span>0</span>" in res)
2004 self.failUnless("Repairs Attempted: <span>0</span>" in res)
2005 self.failUnless("Repairs Successful: <span>0</span>" in res)
2006 self.failUnless("Repairs Unsuccessful: <span>0</span>" in res)
2008 self.failUnless("Objects Healthy (after repair): <span>8</span>" in res)
2009 self.failUnless("Objects Unhealthy (after repair): <span>0</span>" in res)
2010 self.failUnless("Corrupt Shares (after repair): <span>0</span>" in res)
2011 d.addCallback(_check_html)
2014 def test_POST_FILEURL_bad_t(self):
2015 d = self.shouldFail2(error.Error, "POST_bad_t", "400 Bad Request",
2016 "POST to file: bad t=bogus",
2017 self.POST, self.public_url + "/foo/bar.txt",
2021 def test_POST_mkdir(self): # return value?
2022 d = self.POST(self.public_url + "/foo", t="mkdir", name="newdir")
2023 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
2024 d.addCallback(self.failUnlessNodeKeysAre, [])
2027 def test_POST_mkdir_initial_children(self):
2028 (newkids, caps) = self._create_initial_children()
2029 d = self.POST2(self.public_url +
2030 "/foo?t=mkdir-with-children&name=newdir",
2031 simplejson.dumps(newkids))
2032 d.addCallback(lambda res:
2033 self.failUnlessNodeHasChild(self._foo_node, u"newdir"))
2034 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
2035 d.addCallback(self.failUnlessNodeKeysAre, newkids.keys())
2036 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
2037 d.addCallback(self.failUnlessROChildURIIs, u"child-imm", caps['filecap1'])
2040 def test_POST_mkdir_immutable(self):
2041 (newkids, caps) = self._create_immutable_children()
2042 d = self.POST2(self.public_url +
2043 "/foo?t=mkdir-immutable&name=newdir",
2044 simplejson.dumps(newkids))
2045 d.addCallback(lambda res:
2046 self.failUnlessNodeHasChild(self._foo_node, u"newdir"))
2047 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
2048 d.addCallback(self.failUnlessNodeKeysAre, newkids.keys())
2049 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
2050 d.addCallback(self.failUnlessROChildURIIs, u"child-imm", caps['filecap1'])
2051 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
2052 d.addCallback(self.failUnlessROChildURIIs, u"unknownchild-imm", caps['unknown_immcap'])
2053 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
2054 d.addCallback(self.failUnlessROChildURIIs, u"dirchild-imm", caps['immdircap'])
2057 def test_POST_mkdir_immutable_bad(self):
2058 (newkids, caps) = self._create_initial_children()
2059 d = self.shouldFail2(error.Error, "test_POST_mkdir_immutable_bad",
2061 "needed to be immutable but was not",
2064 "/foo?t=mkdir-immutable&name=newdir",
2065 simplejson.dumps(newkids))
2068 def test_POST_mkdir_2(self):
2069 d = self.POST(self.public_url + "/foo/newdir?t=mkdir", "")
2070 d.addCallback(lambda res:
2071 self.failUnlessNodeHasChild(self._foo_node, u"newdir"))
2072 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
2073 d.addCallback(self.failUnlessNodeKeysAre, [])
2076 def test_POST_mkdirs_2(self):
2077 d = self.POST(self.public_url + "/foo/bardir/newdir?t=mkdir", "")
2078 d.addCallback(lambda res:
2079 self.failUnlessNodeHasChild(self._foo_node, u"bardir"))
2080 d.addCallback(lambda res: self._foo_node.get(u"bardir"))
2081 d.addCallback(lambda bardirnode: bardirnode.get(u"newdir"))
2082 d.addCallback(self.failUnlessNodeKeysAre, [])
2085 def test_POST_mkdir_no_parentdir_noredirect(self):
2086 d = self.POST("/uri?t=mkdir")
2087 def _after_mkdir(res):
2088 uri.DirectoryURI.init_from_string(res)
2089 d.addCallback(_after_mkdir)
2092 def test_POST_mkdir_no_parentdir_noredirect2(self):
2093 # make sure form-based arguments (as on the welcome page) still work
2094 d = self.POST("/uri", t="mkdir")
2095 def _after_mkdir(res):
2096 uri.DirectoryURI.init_from_string(res)
2097 d.addCallback(_after_mkdir)
2098 d.addErrback(self.explain_web_error)
2101 def test_POST_mkdir_no_parentdir_redirect(self):
2102 d = self.POST("/uri?t=mkdir&redirect_to_result=true")
2103 d.addBoth(self.shouldRedirect, None, statuscode='303')
2104 def _check_target(target):
2105 target = urllib.unquote(target)
2106 self.failUnless(target.startswith("uri/URI:DIR2:"), target)
2107 d.addCallback(_check_target)
2110 def test_POST_mkdir_no_parentdir_redirect2(self):
2111 d = self.POST("/uri", t="mkdir", redirect_to_result="true")
2112 d.addBoth(self.shouldRedirect, None, statuscode='303')
2113 def _check_target(target):
2114 target = urllib.unquote(target)
2115 self.failUnless(target.startswith("uri/URI:DIR2:"), target)
2116 d.addCallback(_check_target)
2117 d.addErrback(self.explain_web_error)
2120 def _make_readonly(self, u):
2121 ro_uri = uri.from_string(u).get_readonly()
2124 return ro_uri.to_string()
2126 def _create_initial_children(self):
2127 contents, n, filecap1 = self.makefile(12)
2128 md1 = {"metakey1": "metavalue1"}
2129 filecap2 = make_mutable_file_uri()
2130 node3 = self.s.create_node_from_uri(make_mutable_file_uri())
2131 filecap3 = node3.get_readonly_uri()
2132 unknown_rwcap = "lafs://from_the_future"
2133 unknown_rocap = "ro.lafs://readonly_from_the_future"
2134 unknown_immcap = "imm.lafs://immutable_from_the_future"
2135 node4 = self.s.create_node_from_uri(make_mutable_file_uri())
2136 dircap = DirectoryNode(node4, None, None).get_uri()
2137 newkids = {u"child-imm": ["filenode", {"rw_uri": filecap1,
2138 "ro_uri": self._make_readonly(filecap1),
2139 "metadata": md1, }],
2140 u"child-mutable": ["filenode", {"rw_uri": filecap2,
2141 "ro_uri": self._make_readonly(filecap2)}],
2142 u"child-mutable-ro": ["filenode", {"ro_uri": filecap3}],
2143 u"unknownchild-rw": ["unknown", {"rw_uri": unknown_rwcap,
2144 "ro_uri": unknown_rocap}],
2145 u"unknownchild-ro": ["unknown", {"ro_uri": unknown_rocap}],
2146 u"unknownchild-imm": ["unknown", {"ro_uri": unknown_immcap}],
2147 u"dirchild": ["dirnode", {"rw_uri": dircap,
2148 "ro_uri": self._make_readonly(dircap)}],
2150 return newkids, {'filecap1': filecap1,
2151 'filecap2': filecap2,
2152 'filecap3': filecap3,
2153 'unknown_rwcap': unknown_rwcap,
2154 'unknown_rocap': unknown_rocap,
2155 'unknown_immcap': unknown_immcap,
2158 def _create_immutable_children(self):
2159 contents, n, filecap1 = self.makefile(12)
2160 md1 = {"metakey1": "metavalue1"}
2161 tnode = create_chk_filenode("immutable directory contents\n"*10)
2162 dnode = DirectoryNode(tnode, None, None)
2163 assert not dnode.is_mutable()
2164 unknown_immcap = "imm.lafs://immutable_from_the_future"
2165 immdircap = dnode.get_uri()
2166 newkids = {u"child-imm": ["filenode", {"ro_uri": filecap1,
2167 "metadata": md1, }],
2168 u"unknownchild-imm": ["unknown", {"ro_uri": unknown_immcap}],
2169 u"dirchild-imm": ["dirnode", {"ro_uri": immdircap}],
2171 return newkids, {'filecap1': filecap1,
2172 'unknown_immcap': unknown_immcap,
2173 'immdircap': immdircap}
2175 def test_POST_mkdir_no_parentdir_initial_children(self):
2176 (newkids, caps) = self._create_initial_children()
2177 d = self.POST2("/uri?t=mkdir-with-children", simplejson.dumps(newkids))
2178 def _after_mkdir(res):
2179 self.failUnless(res.startswith("URI:DIR"), res)
2180 n = self.s.create_node_from_uri(res)
2181 d2 = self.failUnlessNodeKeysAre(n, newkids.keys())
2182 d2.addCallback(lambda ign:
2183 self.failUnlessROChildURIIs(n, u"child-imm",
2185 d2.addCallback(lambda ign:
2186 self.failUnlessRWChildURIIs(n, u"child-mutable",
2188 d2.addCallback(lambda ign:
2189 self.failUnlessROChildURIIs(n, u"child-mutable-ro",
2191 d2.addCallback(lambda ign:
2192 self.failUnlessRWChildURIIs(n, u"unknownchild-rw",
2193 caps['unknown_rwcap']))
2194 d2.addCallback(lambda ign:
2195 self.failUnlessROChildURIIs(n, u"unknownchild-ro",
2196 caps['unknown_rocap']))
2197 d2.addCallback(lambda ign:
2198 self.failUnlessROChildURIIs(n, u"unknownchild-imm",
2199 caps['unknown_immcap']))
2200 d2.addCallback(lambda ign:
2201 self.failUnlessRWChildURIIs(n, u"dirchild",
2204 d.addCallback(_after_mkdir)
2207 def test_POST_mkdir_no_parentdir_unexpected_children(self):
2208 # the regular /uri?t=mkdir operation is specified to ignore its body.
2209 # Only t=mkdir-with-children pays attention to it.
2210 (newkids, caps) = self._create_initial_children()
2211 d = self.shouldHTTPError("POST t=mkdir unexpected children",
2213 "t=mkdir does not accept children=, "
2214 "try t=mkdir-with-children instead",
2215 self.POST2, "/uri?t=mkdir", # without children
2216 simplejson.dumps(newkids))
2219 def test_POST_noparent_bad(self):
2220 d = self.shouldHTTPError("POST /uri?t=bogus", 400, "Bad Request",
2221 "/uri accepts only PUT, PUT?t=mkdir, "
2222 "POST?t=upload, and POST?t=mkdir",
2223 self.POST, "/uri?t=bogus")
2226 def test_POST_mkdir_no_parentdir_immutable(self):
2227 (newkids, caps) = self._create_immutable_children()
2228 d = self.POST2("/uri?t=mkdir-immutable", simplejson.dumps(newkids))
2229 def _after_mkdir(res):
2230 self.failUnless(res.startswith("URI:DIR"), res)
2231 n = self.s.create_node_from_uri(res)
2232 d2 = self.failUnlessNodeKeysAre(n, newkids.keys())
2233 d2.addCallback(lambda ign:
2234 self.failUnlessROChildURIIs(n, u"child-imm",
2236 d2.addCallback(lambda ign:
2237 self.failUnlessROChildURIIs(n, u"unknownchild-imm",
2238 caps['unknown_immcap']))
2239 d2.addCallback(lambda ign:
2240 self.failUnlessROChildURIIs(n, u"dirchild-imm",
2243 d.addCallback(_after_mkdir)
2246 def test_POST_mkdir_no_parentdir_immutable_bad(self):
2247 (newkids, caps) = self._create_initial_children()
2248 d = self.shouldFail2(error.Error,
2249 "test_POST_mkdir_no_parentdir_immutable_bad",
2251 "needed to be immutable but was not",
2253 "/uri?t=mkdir-immutable",
2254 simplejson.dumps(newkids))
2257 def test_welcome_page_mkdir_button(self):
2258 # Fetch the welcome page.
2260 def _after_get_welcome_page(res):
2261 MKDIR_BUTTON_RE=re.compile('<form action="([^"]*)" method="post".*?<input type="hidden" name="t" value="([^"]*)" /><input type="hidden" name="([^"]*)" value="([^"]*)" /><input type="submit" value="Create a directory" />', re.I)
2262 mo = MKDIR_BUTTON_RE.search(res)
2263 formaction = mo.group(1)
2265 formaname = mo.group(3)
2266 formavalue = mo.group(4)
2267 return (formaction, formt, formaname, formavalue)
2268 d.addCallback(_after_get_welcome_page)
2269 def _after_parse_form(res):
2270 (formaction, formt, formaname, formavalue) = res
2271 return self.POST("/%s?t=%s&%s=%s" % (formaction, formt, formaname, formavalue))
2272 d.addCallback(_after_parse_form)
2273 d.addBoth(self.shouldRedirect, None, statuscode='303')
2276 def test_POST_mkdir_replace(self): # return value?
2277 d = self.POST(self.public_url + "/foo", t="mkdir", name="sub")
2278 d.addCallback(lambda res: self._foo_node.get(u"sub"))
2279 d.addCallback(self.failUnlessNodeKeysAre, [])
2282 def test_POST_mkdir_no_replace_queryarg(self): # return value?
2283 d = self.POST(self.public_url + "/foo?replace=false", t="mkdir", name="sub")
2284 d.addBoth(self.shouldFail, error.Error,
2285 "POST_mkdir_no_replace_queryarg",
2287 "There was already a child by that name, and you asked me "
2288 "to not replace it")
2289 d.addCallback(lambda res: self._foo_node.get(u"sub"))
2290 d.addCallback(self.failUnlessNodeKeysAre, [u"baz.txt"])
2293 def test_POST_mkdir_no_replace_field(self): # return value?
2294 d = self.POST(self.public_url + "/foo", t="mkdir", name="sub",
2296 d.addBoth(self.shouldFail, error.Error, "POST_mkdir_no_replace_field",
2298 "There was already a child by that name, and you asked me "
2299 "to not replace it")
2300 d.addCallback(lambda res: self._foo_node.get(u"sub"))
2301 d.addCallback(self.failUnlessNodeKeysAre, [u"baz.txt"])
2304 def test_POST_mkdir_whendone_field(self):
2305 d = self.POST(self.public_url + "/foo",
2306 t="mkdir", name="newdir", when_done="/THERE")
2307 d.addBoth(self.shouldRedirect, "/THERE")
2308 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
2309 d.addCallback(self.failUnlessNodeKeysAre, [])
2312 def test_POST_mkdir_whendone_queryarg(self):
2313 d = self.POST(self.public_url + "/foo?when_done=/THERE",
2314 t="mkdir", name="newdir")
2315 d.addBoth(self.shouldRedirect, "/THERE")
2316 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
2317 d.addCallback(self.failUnlessNodeKeysAre, [])
2320 def test_POST_bad_t(self):
2321 d = self.shouldFail2(error.Error, "POST_bad_t", "400 Bad Request",
2322 "POST to a directory with bad t=BOGUS",
2323 self.POST, self.public_url + "/foo", t="BOGUS")
2326 def test_POST_set_children(self, command_name="set_children"):
2327 contents9, n9, newuri9 = self.makefile(9)
2328 contents10, n10, newuri10 = self.makefile(10)
2329 contents11, n11, newuri11 = self.makefile(11)
2332 "atomic_added_1": [ "filenode", { "rw_uri": "%s",
2335 "ctime": 1002777696.7564139,
2336 "mtime": 1002777696.7564139
2339 "atomic_added_2": [ "filenode", { "rw_uri": "%s",
2342 "ctime": 1002777696.7564139,
2343 "mtime": 1002777696.7564139
2346 "atomic_added_3": [ "filenode", { "rw_uri": "%s",
2349 "ctime": 1002777696.7564139,
2350 "mtime": 1002777696.7564139
2353 }""" % (newuri9, newuri10, newuri11)
2355 url = self.webish_url + self.public_url + "/foo" + "?t=" + command_name
2357 d = client.getPage(url, method="POST", postdata=reqbody)
2359 self.failUnlessURIMatchesROChild(newuri9, self._foo_node, u"atomic_added_1")
2360 self.failUnlessURIMatchesROChild(newuri10, self._foo_node, u"atomic_added_2")
2361 self.failUnlessURIMatchesROChild(newuri11, self._foo_node, u"atomic_added_3")
2363 d.addCallback(_then)
2364 d.addErrback(self.dump_error)
2367 def test_POST_set_children_with_hyphen(self):
2368 return self.test_POST_set_children(command_name="set-children")
2370 def test_POST_put_uri(self):
2371 contents, n, newuri = self.makefile(8)
2372 d = self.POST(self.public_url + "/foo", t="uri", name="new.txt", uri=newuri)
2373 d.addCallback(self.failUnlessURIMatchesROChild, self._foo_node, u"new.txt")
2374 d.addCallback(lambda res:
2375 self.failUnlessChildContentsAre(self._foo_node, u"new.txt",
2379 def test_POST_put_uri_replace(self):
2380 contents, n, newuri = self.makefile(8)
2381 d = self.POST(self.public_url + "/foo", t="uri", name="bar.txt", uri=newuri)
2382 d.addCallback(self.failUnlessURIMatchesROChild, self._foo_node, u"bar.txt")
2383 d.addCallback(lambda res:
2384 self.failUnlessChildContentsAre(self._foo_node, u"bar.txt",
2388 def test_POST_put_uri_no_replace_queryarg(self):
2389 contents, n, newuri = self.makefile(8)
2390 d = self.POST(self.public_url + "/foo?replace=false", t="uri",
2391 name="bar.txt", uri=newuri)
2392 d.addBoth(self.shouldFail, error.Error,
2393 "POST_put_uri_no_replace_queryarg",
2395 "There was already a child by that name, and you asked me "
2396 "to not replace it")
2397 d.addCallback(lambda res: self.GET(self.public_url + "/foo/bar.txt"))
2398 d.addCallback(self.failUnlessIsBarDotTxt)
2401 def test_POST_put_uri_no_replace_field(self):
2402 contents, n, newuri = self.makefile(8)
2403 d = self.POST(self.public_url + "/foo", t="uri", replace="false",
2404 name="bar.txt", uri=newuri)
2405 d.addBoth(self.shouldFail, error.Error,
2406 "POST_put_uri_no_replace_field",
2408 "There was already a child by that name, and you asked me "
2409 "to not replace it")
2410 d.addCallback(lambda res: self.GET(self.public_url + "/foo/bar.txt"))
2411 d.addCallback(self.failUnlessIsBarDotTxt)
2414 def test_POST_delete(self):
2415 d = self.POST(self.public_url + "/foo", t="delete", name="bar.txt")
2416 d.addCallback(lambda res: self._foo_node.list())
2417 def _check(children):
2418 self.failIf(u"bar.txt" in children)
2419 d.addCallback(_check)
2422 def test_POST_rename_file(self):
2423 d = self.POST(self.public_url + "/foo", t="rename",
2424 from_name="bar.txt", to_name='wibble.txt')
2425 d.addCallback(lambda res:
2426 self.failIfNodeHasChild(self._foo_node, u"bar.txt"))
2427 d.addCallback(lambda res:
2428 self.failUnlessNodeHasChild(self._foo_node, u"wibble.txt"))
2429 d.addCallback(lambda res: self.GET(self.public_url + "/foo/wibble.txt"))
2430 d.addCallback(self.failUnlessIsBarDotTxt)
2431 d.addCallback(lambda res: self.GET(self.public_url + "/foo/wibble.txt?t=json"))
2432 d.addCallback(self.failUnlessIsBarJSON)
2435 def test_POST_rename_file_redundant(self):
2436 d = self.POST(self.public_url + "/foo", t="rename",
2437 from_name="bar.txt", to_name='bar.txt')
2438 d.addCallback(lambda res:
2439 self.failUnlessNodeHasChild(self._foo_node, u"bar.txt"))
2440 d.addCallback(lambda res: self.GET(self.public_url + "/foo/bar.txt"))
2441 d.addCallback(self.failUnlessIsBarDotTxt)
2442 d.addCallback(lambda res: self.GET(self.public_url + "/foo/bar.txt?t=json"))
2443 d.addCallback(self.failUnlessIsBarJSON)
2446 def test_POST_rename_file_replace(self):
2447 # rename a file and replace a directory with it
2448 d = self.POST(self.public_url + "/foo", t="rename",
2449 from_name="bar.txt", to_name='empty')
2450 d.addCallback(lambda res:
2451 self.failIfNodeHasChild(self._foo_node, u"bar.txt"))
2452 d.addCallback(lambda res:
2453 self.failUnlessNodeHasChild(self._foo_node, u"empty"))
2454 d.addCallback(lambda res: self.GET(self.public_url + "/foo/empty"))
2455 d.addCallback(self.failUnlessIsBarDotTxt)
2456 d.addCallback(lambda res: self.GET(self.public_url + "/foo/empty?t=json"))
2457 d.addCallback(self.failUnlessIsBarJSON)
2460 def test_POST_rename_file_no_replace_queryarg(self):
2461 # rename a file and replace a directory with it
2462 d = self.POST(self.public_url + "/foo?replace=false", t="rename",
2463 from_name="bar.txt", to_name='empty')
2464 d.addBoth(self.shouldFail, error.Error,
2465 "POST_rename_file_no_replace_queryarg",
2467 "There was already a child by that name, and you asked me "
2468 "to not replace it")
2469 d.addCallback(lambda res: self.GET(self.public_url + "/foo/empty?t=json"))
2470 d.addCallback(self.failUnlessIsEmptyJSON)
2473 def test_POST_rename_file_no_replace_field(self):
2474 # rename a file and replace a directory with it
2475 d = self.POST(self.public_url + "/foo", t="rename", replace="false",
2476 from_name="bar.txt", to_name='empty')
2477 d.addBoth(self.shouldFail, error.Error,
2478 "POST_rename_file_no_replace_field",
2480 "There was already a child by that name, and you asked me "
2481 "to not replace it")
2482 d.addCallback(lambda res: self.GET(self.public_url + "/foo/empty?t=json"))
2483 d.addCallback(self.failUnlessIsEmptyJSON)
2486 def failUnlessIsEmptyJSON(self, res):
2487 data = simplejson.loads(res)
2488 self.failUnlessEqual(data[0], "dirnode", data)
2489 self.failUnlessEqual(len(data[1]["children"]), 0)
2491 def test_POST_rename_file_slash_fail(self):
2492 d = self.POST(self.public_url + "/foo", t="rename",
2493 from_name="bar.txt", to_name='kirk/spock.txt')
2494 d.addBoth(self.shouldFail, error.Error,
2495 "test_POST_rename_file_slash_fail",
2497 "to_name= may not contain a slash",
2499 d.addCallback(lambda res:
2500 self.failUnlessNodeHasChild(self._foo_node, u"bar.txt"))
2503 def test_POST_rename_dir(self):
2504 d = self.POST(self.public_url, t="rename",
2505 from_name="foo", to_name='plunk')
2506 d.addCallback(lambda res:
2507 self.failIfNodeHasChild(self.public_root, u"foo"))
2508 d.addCallback(lambda res:
2509 self.failUnlessNodeHasChild(self.public_root, u"plunk"))
2510 d.addCallback(lambda res: self.GET(self.public_url + "/plunk?t=json"))
2511 d.addCallback(self.failUnlessIsFooJSON)
2514 def shouldRedirect(self, res, target=None, statuscode=None, which=""):
2515 """ If target is not None then the redirection has to go to target. If
2516 statuscode is not None then the redirection has to be accomplished with
2517 that HTTP status code."""
2518 if not isinstance(res, failure.Failure):
2519 to_where = (target is None) and "somewhere" or ("to " + target)
2520 self.fail("%s: we were expecting to get redirected %s, not get an"
2521 " actual page: %s" % (which, to_where, res))
2522 res.trap(error.PageRedirect)
2523 if statuscode is not None:
2524 self.failUnlessEqual(res.value.status, statuscode,
2525 "%s: not a redirect" % which)
2526 if target is not None:
2527 # the PageRedirect does not seem to capture the uri= query arg
2528 # properly, so we can't check for it.
2529 realtarget = self.webish_url + target
2530 self.failUnlessEqual(res.value.location, realtarget,
2531 "%s: wrong target" % which)
2532 return res.value.location
2534 def test_GET_URI_form(self):
2535 base = "/uri?uri=%s" % self._bar_txt_uri
2536 # this is supposed to give us a redirect to /uri/$URI, plus arguments
2537 targetbase = "/uri/%s" % urllib.quote(self._bar_txt_uri)
2539 d.addBoth(self.shouldRedirect, targetbase)
2540 d.addCallback(lambda res: self.GET(base+"&filename=bar.txt"))
2541 d.addBoth(self.shouldRedirect, targetbase+"?filename=bar.txt")
2542 d.addCallback(lambda res: self.GET(base+"&t=json"))
2543 d.addBoth(self.shouldRedirect, targetbase+"?t=json")
2544 d.addCallback(self.log, "about to get file by uri")
2545 d.addCallback(lambda res: self.GET(base, followRedirect=True))
2546 d.addCallback(self.failUnlessIsBarDotTxt)
2547 d.addCallback(self.log, "got file by uri, about to get dir by uri")
2548 d.addCallback(lambda res: self.GET("/uri?uri=%s&t=json" % self._foo_uri,
2549 followRedirect=True))
2550 d.addCallback(self.failUnlessIsFooJSON)
2551 d.addCallback(self.log, "got dir by uri")
2555 def test_GET_URI_form_bad(self):
2556 d = self.shouldFail2(error.Error, "test_GET_URI_form_bad",
2557 "400 Bad Request", "GET /uri requires uri=",
2561 def test_GET_rename_form(self):
2562 d = self.GET(self.public_url + "/foo?t=rename-form&name=bar.txt",
2563 followRedirect=True)
2565 self.failUnless('name="when_done" value="."' in res, res)
2566 self.failUnless(re.search(r'name="from_name" value="bar\.txt"', res))
2567 d.addCallback(_check)
2570 def log(self, res, msg):
2571 #print "MSG: %s RES: %s" % (msg, res)
2575 def test_GET_URI_URL(self):
2576 base = "/uri/%s" % self._bar_txt_uri
2578 d.addCallback(self.failUnlessIsBarDotTxt)
2579 d.addCallback(lambda res: self.GET(base+"?filename=bar.txt"))
2580 d.addCallback(self.failUnlessIsBarDotTxt)
2581 d.addCallback(lambda res: self.GET(base+"?filename=bar.txt&save=true"))
2582 d.addCallback(self.failUnlessIsBarDotTxt)
2585 def test_GET_URI_URL_dir(self):
2586 base = "/uri/%s?t=json" % self._foo_uri
2588 d.addCallback(self.failUnlessIsFooJSON)
2591 def test_GET_URI_URL_missing(self):
2592 base = "/uri/%s" % self._bad_file_uri
2593 d = self.shouldHTTPError("test_GET_URI_URL_missing",
2594 http.GONE, None, "NotEnoughSharesError",
2596 # TODO: how can we exercise both sides of WebDownloadTarget.fail
2597 # here? we must arrange for a download to fail after target.open()
2598 # has been called, and then inspect the response to see that it is
2599 # shorter than we expected.
2602 def test_PUT_DIRURL_uri(self):
2603 d = self.s.create_dirnode()
2605 new_uri = dn.get_uri()
2606 # replace /foo with a new (empty) directory
2607 d = self.PUT(self.public_url + "/foo?t=uri", new_uri)
2608 d.addCallback(lambda res:
2609 self.failUnlessEqual(res.strip(), new_uri))
2610 d.addCallback(lambda res:
2611 self.failUnlessRWChildURIIs(self.public_root,
2615 d.addCallback(_made_dir)
2618 def test_PUT_DIRURL_uri_noreplace(self):
2619 d = self.s.create_dirnode()
2621 new_uri = dn.get_uri()
2622 # replace /foo with a new (empty) directory, but ask that
2623 # replace=false, so it should fail
2624 d = self.shouldFail2(error.Error, "test_PUT_DIRURL_uri_noreplace",
2625 "409 Conflict", "There was already a child by that name, and you asked me to not replace it",
2627 self.public_url + "/foo?t=uri&replace=false",
2629 d.addCallback(lambda res:
2630 self.failUnlessRWChildURIIs(self.public_root,
2634 d.addCallback(_made_dir)
2637 def test_PUT_DIRURL_bad_t(self):
2638 d = self.shouldFail2(error.Error, "test_PUT_DIRURL_bad_t",
2639 "400 Bad Request", "PUT to a directory",
2640 self.PUT, self.public_url + "/foo?t=BOGUS", "")
2641 d.addCallback(lambda res:
2642 self.failUnlessRWChildURIIs(self.public_root,
2647 def test_PUT_NEWFILEURL_uri(self):
2648 contents, n, new_uri = self.makefile(8)
2649 d = self.PUT(self.public_url + "/foo/new.txt?t=uri", new_uri)
2650 d.addCallback(lambda res: self.failUnlessEqual(res.strip(), new_uri))
2651 d.addCallback(lambda res:
2652 self.failUnlessChildContentsAre(self._foo_node, u"new.txt",
2656 def test_PUT_NEWFILEURL_uri_replace(self):
2657 contents, n, new_uri = self.makefile(8)
2658 d = self.PUT(self.public_url + "/foo/bar.txt?t=uri", new_uri)
2659 d.addCallback(lambda res: self.failUnlessEqual(res.strip(), new_uri))
2660 d.addCallback(lambda res:
2661 self.failUnlessChildContentsAre(self._foo_node, u"bar.txt",
2665 def test_PUT_NEWFILEURL_uri_no_replace(self):
2666 contents, n, new_uri = self.makefile(8)
2667 d = self.PUT(self.public_url + "/foo/bar.txt?t=uri&replace=false", new_uri)
2668 d.addBoth(self.shouldFail, error.Error, "PUT_NEWFILEURL_uri_no_replace",
2670 "There was already a child by that name, and you asked me "
2671 "to not replace it")
2674 def test_PUT_NEWFILE_URI(self):
2675 file_contents = "New file contents here\n"
2676 d = self.PUT("/uri", file_contents)
2678 assert isinstance(uri, str), uri
2679 self.failUnless(uri in FakeCHKFileNode.all_contents)
2680 self.failUnlessEqual(FakeCHKFileNode.all_contents[uri],
2682 return self.GET("/uri/%s" % uri)
2683 d.addCallback(_check)
2685 self.failUnlessEqual(res, file_contents)
2686 d.addCallback(_check2)
2689 def test_PUT_NEWFILE_URI_not_mutable(self):
2690 file_contents = "New file contents here\n"
2691 d = self.PUT("/uri?mutable=false", file_contents)
2693 assert isinstance(uri, str), uri
2694 self.failUnless(uri in FakeCHKFileNode.all_contents)
2695 self.failUnlessEqual(FakeCHKFileNode.all_contents[uri],
2697 return self.GET("/uri/%s" % uri)
2698 d.addCallback(_check)
2700 self.failUnlessEqual(res, file_contents)
2701 d.addCallback(_check2)
2704 def test_PUT_NEWFILE_URI_only_PUT(self):
2705 d = self.PUT("/uri?t=bogus", "")
2706 d.addBoth(self.shouldFail, error.Error,
2707 "PUT_NEWFILE_URI_only_PUT",
2709 "/uri accepts only PUT, PUT?t=mkdir, POST?t=upload, and POST?t=mkdir")
2712 def test_PUT_NEWFILE_URI_mutable(self):
2713 file_contents = "New file contents here\n"
2714 d = self.PUT("/uri?mutable=true", file_contents)
2715 def _check1(filecap):
2716 filecap = filecap.strip()
2717 self.failUnless(filecap.startswith("URI:SSK:"), filecap)
2718 self.filecap = filecap
2719 u = uri.WriteableSSKFileURI.init_from_string(filecap)
2720 self.failUnless(u.storage_index in FakeMutableFileNode.all_contents)
2721 n = self.s.create_node_from_uri(filecap)
2722 return n.download_best_version()
2723 d.addCallback(_check1)
2725 self.failUnlessEqual(data, file_contents)
2726 return self.GET("/uri/%s" % urllib.quote(self.filecap))
2727 d.addCallback(_check2)
2729 self.failUnlessEqual(res, file_contents)
2730 d.addCallback(_check3)
2733 def test_PUT_mkdir(self):
2734 d = self.PUT("/uri?t=mkdir", "")
2736 n = self.s.create_node_from_uri(uri.strip())
2737 d2 = self.failUnlessNodeKeysAre(n, [])
2738 d2.addCallback(lambda res:
2739 self.GET("/uri/%s?t=json" % uri))
2741 d.addCallback(_check)
2742 d.addCallback(self.failUnlessIsEmptyJSON)
2745 def test_POST_check(self):
2746 d = self.POST(self.public_url + "/foo", t="check", name="bar.txt")
2748 # this returns a string form of the results, which are probably
2749 # None since we're using fake filenodes.
2750 # TODO: verify that the check actually happened, by changing
2751 # FakeCHKFileNode to count how many times .check() has been
2754 d.addCallback(_done)
2757 def test_bad_method(self):
2758 url = self.webish_url + self.public_url + "/foo/bar.txt"
2759 d = self.shouldHTTPError("test_bad_method",
2760 501, "Not Implemented",
2761 "I don't know how to treat a BOGUS request.",
2762 client.getPage, url, method="BOGUS")
2765 def test_short_url(self):
2766 url = self.webish_url + "/uri"
2767 d = self.shouldHTTPError("test_short_url", 501, "Not Implemented",
2768 "I don't know how to treat a DELETE request.",
2769 client.getPage, url, method="DELETE")
2772 def test_ophandle_bad(self):
2773 url = self.webish_url + "/operations/bogus?t=status"
2774 d = self.shouldHTTPError("test_ophandle_bad", 404, "404 Not Found",
2775 "unknown/expired handle 'bogus'",
2776 client.getPage, url)
2779 def test_ophandle_cancel(self):
2780 d = self.POST(self.public_url + "/foo/?t=start-manifest&ophandle=128",
2781 followRedirect=True)
2782 d.addCallback(lambda ignored:
2783 self.GET("/operations/128?t=status&output=JSON"))
2785 data = simplejson.loads(res)
2786 self.failUnless("finished" in data, res)
2787 monitor = self.ws.root.child_operations.handles["128"][0]
2788 d = self.POST("/operations/128?t=cancel&output=JSON")
2790 data = simplejson.loads(res)
2791 self.failUnless("finished" in data, res)
2792 # t=cancel causes the handle to be forgotten
2793 self.failUnless(monitor.is_cancelled())
2794 d.addCallback(_check2)
2796 d.addCallback(_check1)
2797 d.addCallback(lambda ignored:
2798 self.shouldHTTPError("test_ophandle_cancel",
2799 404, "404 Not Found",
2800 "unknown/expired handle '128'",
2802 "/operations/128?t=status&output=JSON"))
2805 def test_ophandle_retainfor(self):
2806 d = self.POST(self.public_url + "/foo/?t=start-manifest&ophandle=129&retain-for=60",
2807 followRedirect=True)
2808 d.addCallback(lambda ignored:
2809 self.GET("/operations/129?t=status&output=JSON&retain-for=0"))
2811 data = simplejson.loads(res)
2812 self.failUnless("finished" in data, res)
2813 d.addCallback(_check1)
2814 # the retain-for=0 will cause the handle to be expired very soon
2815 d.addCallback(self.stall, 2.0)
2816 d.addCallback(lambda ignored:
2817 self.shouldHTTPError("test_ophandle_retainfor",
2818 404, "404 Not Found",
2819 "unknown/expired handle '129'",
2821 "/operations/129?t=status&output=JSON"))
2824 def test_ophandle_release_after_complete(self):
2825 d = self.POST(self.public_url + "/foo/?t=start-manifest&ophandle=130",
2826 followRedirect=True)
2827 d.addCallback(self.wait_for_operation, "130")
2828 d.addCallback(lambda ignored:
2829 self.GET("/operations/130?t=status&output=JSON&release-after-complete=true"))
2830 # the release-after-complete=true will cause the handle to be expired
2831 d.addCallback(lambda ignored:
2832 self.shouldHTTPError("test_ophandle_release_after_complete",
2833 404, "404 Not Found",
2834 "unknown/expired handle '130'",
2836 "/operations/130?t=status&output=JSON"))
2839 def test_incident(self):
2840 d = self.POST("/report_incident", details="eek")
2842 self.failUnless("Thank you for your report!" in res, res)
2843 d.addCallback(_done)
2846 def test_static(self):
2847 webdir = os.path.join(self.staticdir, "subdir")
2848 fileutil.make_dirs(webdir)
2849 f = open(os.path.join(webdir, "hello.txt"), "wb")
2853 d = self.GET("/static/subdir/hello.txt")
2855 self.failUnlessEqual(res, "hello")
2856 d.addCallback(_check)
2860 class Util(unittest.TestCase, ShouldFailMixin):
2861 def test_parse_replace_arg(self):
2862 self.failUnlessEqual(common.parse_replace_arg("true"), True)
2863 self.failUnlessEqual(common.parse_replace_arg("false"), False)
2864 self.failUnlessEqual(common.parse_replace_arg("only-files"),
2866 self.shouldFail(AssertionError, "test_parse_replace_arg", "",
2867 common.parse_replace_arg, "only_fles")
2869 def test_abbreviate_time(self):
2870 self.failUnlessEqual(common.abbreviate_time(None), "")
2871 self.failUnlessEqual(common.abbreviate_time(1.234), "1.23s")
2872 self.failUnlessEqual(common.abbreviate_time(0.123), "123ms")
2873 self.failUnlessEqual(common.abbreviate_time(0.00123), "1.2ms")
2874 self.failUnlessEqual(common.abbreviate_time(0.000123), "123us")
2876 def test_abbreviate_rate(self):
2877 self.failUnlessEqual(common.abbreviate_rate(None), "")
2878 self.failUnlessEqual(common.abbreviate_rate(1234000), "1.23MBps")
2879 self.failUnlessEqual(common.abbreviate_rate(12340), "12.3kBps")
2880 self.failUnlessEqual(common.abbreviate_rate(123), "123Bps")
2882 def test_abbreviate_size(self):
2883 self.failUnlessEqual(common.abbreviate_size(None), "")
2884 self.failUnlessEqual(common.abbreviate_size(1.23*1000*1000*1000), "1.23GB")
2885 self.failUnlessEqual(common.abbreviate_size(1.23*1000*1000), "1.23MB")
2886 self.failUnlessEqual(common.abbreviate_size(1230), "1.2kB")
2887 self.failUnlessEqual(common.abbreviate_size(123), "123B")
2889 def test_plural(self):
2891 return "%d second%s" % (s, status.plural(s))
2892 self.failUnlessEqual(convert(0), "0 seconds")
2893 self.failUnlessEqual(convert(1), "1 second")
2894 self.failUnlessEqual(convert(2), "2 seconds")
2896 return "has share%s: %s" % (status.plural(s), ",".join(s))
2897 self.failUnlessEqual(convert2([]), "has shares: ")
2898 self.failUnlessEqual(convert2(["1"]), "has share: 1")
2899 self.failUnlessEqual(convert2(["1","2"]), "has shares: 1,2")
2902 class Grid(GridTestMixin, WebErrorMixin, unittest.TestCase, ShouldFailMixin):
2904 def CHECK(self, ign, which, args, clientnum=0):
2905 fileurl = self.fileurls[which]
2906 url = fileurl + "?" + args
2907 return self.GET(url, method="POST", clientnum=clientnum)
2909 def test_filecheck(self):
2910 self.basedir = "web/Grid/filecheck"
2912 c0 = self.g.clients[0]
2915 d = c0.upload(upload.Data(DATA, convergence=""))
2916 def _stash_uri(ur, which):
2917 self.uris[which] = ur.uri
2918 d.addCallback(_stash_uri, "good")
2919 d.addCallback(lambda ign:
2920 c0.upload(upload.Data(DATA+"1", convergence="")))
2921 d.addCallback(_stash_uri, "sick")
2922 d.addCallback(lambda ign:
2923 c0.upload(upload.Data(DATA+"2", convergence="")))
2924 d.addCallback(_stash_uri, "dead")
2925 def _stash_mutable_uri(n, which):
2926 self.uris[which] = n.get_uri()
2927 assert isinstance(self.uris[which], str)
2928 d.addCallback(lambda ign: c0.create_mutable_file(DATA+"3"))
2929 d.addCallback(_stash_mutable_uri, "corrupt")
2930 d.addCallback(lambda ign:
2931 c0.upload(upload.Data("literal", convergence="")))
2932 d.addCallback(_stash_uri, "small")
2933 d.addCallback(lambda ign: c0.create_immutable_dirnode({}))
2934 d.addCallback(_stash_mutable_uri, "smalldir")
2936 def _compute_fileurls(ignored):
2938 for which in self.uris:
2939 self.fileurls[which] = "uri/" + urllib.quote(self.uris[which])
2940 d.addCallback(_compute_fileurls)
2942 def _clobber_shares(ignored):
2943 good_shares = self.find_shares(self.uris["good"])
2944 self.failUnlessEqual(len(good_shares), 10)
2945 sick_shares = self.find_shares(self.uris["sick"])
2946 os.unlink(sick_shares[0][2])
2947 dead_shares = self.find_shares(self.uris["dead"])
2948 for i in range(1, 10):
2949 os.unlink(dead_shares[i][2])
2950 c_shares = self.find_shares(self.uris["corrupt"])
2951 cso = CorruptShareOptions()
2952 cso.stdout = StringIO()
2953 cso.parseOptions([c_shares[0][2]])
2955 d.addCallback(_clobber_shares)
2957 d.addCallback(self.CHECK, "good", "t=check")
2958 def _got_html_good(res):
2959 self.failUnless("Healthy" in res, res)
2960 self.failIf("Not Healthy" in res, res)
2961 d.addCallback(_got_html_good)
2962 d.addCallback(self.CHECK, "good", "t=check&return_to=somewhere")
2963 def _got_html_good_return_to(res):
2964 self.failUnless("Healthy" in res, res)
2965 self.failIf("Not Healthy" in res, res)
2966 self.failUnless('<a href="somewhere">Return to file'
2968 d.addCallback(_got_html_good_return_to)
2969 d.addCallback(self.CHECK, "good", "t=check&output=json")
2970 def _got_json_good(res):
2971 r = simplejson.loads(res)
2972 self.failUnlessEqual(r["summary"], "Healthy")
2973 self.failUnless(r["results"]["healthy"])
2974 self.failIf(r["results"]["needs-rebalancing"])
2975 self.failUnless(r["results"]["recoverable"])
2976 d.addCallback(_got_json_good)
2978 d.addCallback(self.CHECK, "small", "t=check")
2979 def _got_html_small(res):
2980 self.failUnless("Literal files are always healthy" in res, res)
2981 self.failIf("Not Healthy" in res, res)
2982 d.addCallback(_got_html_small)
2983 d.addCallback(self.CHECK, "small", "t=check&return_to=somewhere")
2984 def _got_html_small_return_to(res):
2985 self.failUnless("Literal files are always healthy" in res, res)
2986 self.failIf("Not Healthy" in res, res)
2987 self.failUnless('<a href="somewhere">Return to file'
2989 d.addCallback(_got_html_small_return_to)
2990 d.addCallback(self.CHECK, "small", "t=check&output=json")
2991 def _got_json_small(res):
2992 r = simplejson.loads(res)
2993 self.failUnlessEqual(r["storage-index"], "")
2994 self.failUnless(r["results"]["healthy"])
2995 d.addCallback(_got_json_small)
2997 d.addCallback(self.CHECK, "smalldir", "t=check")
2998 def _got_html_smalldir(res):
2999 self.failUnless("Literal files are always healthy" in res, res)
3000 self.failIf("Not Healthy" in res, res)
3001 d.addCallback(_got_html_smalldir)
3002 d.addCallback(self.CHECK, "smalldir", "t=check&output=json")
3003 def _got_json_smalldir(res):
3004 r = simplejson.loads(res)
3005 self.failUnlessEqual(r["storage-index"], "")
3006 self.failUnless(r["results"]["healthy"])
3007 d.addCallback(_got_json_smalldir)
3009 d.addCallback(self.CHECK, "sick", "t=check")
3010 def _got_html_sick(res):
3011 self.failUnless("Not Healthy" in res, res)
3012 d.addCallback(_got_html_sick)
3013 d.addCallback(self.CHECK, "sick", "t=check&output=json")
3014 def _got_json_sick(res):
3015 r = simplejson.loads(res)
3016 self.failUnlessEqual(r["summary"],
3017 "Not Healthy: 9 shares (enc 3-of-10)")
3018 self.failIf(r["results"]["healthy"])
3019 self.failIf(r["results"]["needs-rebalancing"])
3020 self.failUnless(r["results"]["recoverable"])
3021 d.addCallback(_got_json_sick)
3023 d.addCallback(self.CHECK, "dead", "t=check")
3024 def _got_html_dead(res):
3025 self.failUnless("Not Healthy" in res, res)
3026 d.addCallback(_got_html_dead)
3027 d.addCallback(self.CHECK, "dead", "t=check&output=json")
3028 def _got_json_dead(res):
3029 r = simplejson.loads(res)
3030 self.failUnlessEqual(r["summary"],
3031 "Not Healthy: 1 shares (enc 3-of-10)")
3032 self.failIf(r["results"]["healthy"])
3033 self.failIf(r["results"]["needs-rebalancing"])
3034 self.failIf(r["results"]["recoverable"])
3035 d.addCallback(_got_json_dead)
3037 d.addCallback(self.CHECK, "corrupt", "t=check&verify=true")
3038 def _got_html_corrupt(res):
3039 self.failUnless("Not Healthy! : Unhealthy" in res, res)
3040 d.addCallback(_got_html_corrupt)
3041 d.addCallback(self.CHECK, "corrupt", "t=check&verify=true&output=json")
3042 def _got_json_corrupt(res):
3043 r = simplejson.loads(res)
3044 self.failUnless("Unhealthy: 9 shares (enc 3-of-10)" in r["summary"],
3046 self.failIf(r["results"]["healthy"])
3047 self.failUnless(r["results"]["recoverable"])
3048 self.failUnlessEqual(r["results"]["count-shares-good"], 9)
3049 self.failUnlessEqual(r["results"]["count-corrupt-shares"], 1)
3050 d.addCallback(_got_json_corrupt)
3052 d.addErrback(self.explain_web_error)
3055 def test_repair_html(self):
3056 self.basedir = "web/Grid/repair_html"
3058 c0 = self.g.clients[0]
3061 d = c0.upload(upload.Data(DATA, convergence=""))
3062 def _stash_uri(ur, which):
3063 self.uris[which] = ur.uri
3064 d.addCallback(_stash_uri, "good")
3065 d.addCallback(lambda ign:
3066 c0.upload(upload.Data(DATA+"1", convergence="")))
3067 d.addCallback(_stash_uri, "sick")
3068 d.addCallback(lambda ign:
3069 c0.upload(upload.Data(DATA+"2", convergence="")))
3070 d.addCallback(_stash_uri, "dead")
3071 def _stash_mutable_uri(n, which):
3072 self.uris[which] = n.get_uri()
3073 assert isinstance(self.uris[which], str)
3074 d.addCallback(lambda ign: c0.create_mutable_file(DATA+"3"))
3075 d.addCallback(_stash_mutable_uri, "corrupt")
3077 def _compute_fileurls(ignored):
3079 for which in self.uris:
3080 self.fileurls[which] = "uri/" + urllib.quote(self.uris[which])
3081 d.addCallback(_compute_fileurls)
3083 def _clobber_shares(ignored):
3084 good_shares = self.find_shares(self.uris["good"])
3085 self.failUnlessEqual(len(good_shares), 10)
3086 sick_shares = self.find_shares(self.uris["sick"])
3087 os.unlink(sick_shares[0][2])
3088 dead_shares = self.find_shares(self.uris["dead"])
3089 for i in range(1, 10):
3090 os.unlink(dead_shares[i][2])
3091 c_shares = self.find_shares(self.uris["corrupt"])
3092 cso = CorruptShareOptions()
3093 cso.stdout = StringIO()
3094 cso.parseOptions([c_shares[0][2]])
3096 d.addCallback(_clobber_shares)
3098 d.addCallback(self.CHECK, "good", "t=check&repair=true")
3099 def _got_html_good(res):
3100 self.failUnless("Healthy" in res, res)
3101 self.failIf("Not Healthy" in res, res)
3102 self.failUnless("No repair necessary" in res, res)
3103 d.addCallback(_got_html_good)
3105 d.addCallback(self.CHECK, "sick", "t=check&repair=true")
3106 def _got_html_sick(res):
3107 self.failUnless("Healthy : healthy" in res, res)
3108 self.failIf("Not Healthy" in res, res)
3109 self.failUnless("Repair successful" in res, res)
3110 d.addCallback(_got_html_sick)
3112 # repair of a dead file will fail, of course, but it isn't yet
3113 # clear how this should be reported. Right now it shows up as
3116 #d.addCallback(self.CHECK, "dead", "t=check&repair=true")
3117 #def _got_html_dead(res):
3119 # self.failUnless("Healthy : healthy" in res, res)
3120 # self.failIf("Not Healthy" in res, res)
3121 # self.failUnless("No repair necessary" in res, res)
3122 #d.addCallback(_got_html_dead)
3124 d.addCallback(self.CHECK, "corrupt", "t=check&verify=true&repair=true")
3125 def _got_html_corrupt(res):
3126 self.failUnless("Healthy : Healthy" in res, res)
3127 self.failIf("Not Healthy" in res, res)
3128 self.failUnless("Repair successful" in res, res)
3129 d.addCallback(_got_html_corrupt)
3131 d.addErrback(self.explain_web_error)
3134 def test_repair_json(self):
3135 self.basedir = "web/Grid/repair_json"
3137 c0 = self.g.clients[0]
3140 d = c0.upload(upload.Data(DATA+"1", convergence=""))
3141 def _stash_uri(ur, which):
3142 self.uris[which] = ur.uri
3143 d.addCallback(_stash_uri, "sick")
3145 def _compute_fileurls(ignored):
3147 for which in self.uris:
3148 self.fileurls[which] = "uri/" + urllib.quote(self.uris[which])
3149 d.addCallback(_compute_fileurls)
3151 def _clobber_shares(ignored):
3152 sick_shares = self.find_shares(self.uris["sick"])
3153 os.unlink(sick_shares[0][2])
3154 d.addCallback(_clobber_shares)
3156 d.addCallback(self.CHECK, "sick", "t=check&repair=true&output=json")
3157 def _got_json_sick(res):
3158 r = simplejson.loads(res)
3159 self.failUnlessEqual(r["repair-attempted"], True)
3160 self.failUnlessEqual(r["repair-successful"], True)
3161 self.failUnlessEqual(r["pre-repair-results"]["summary"],
3162 "Not Healthy: 9 shares (enc 3-of-10)")
3163 self.failIf(r["pre-repair-results"]["results"]["healthy"])
3164 self.failUnlessEqual(r["post-repair-results"]["summary"], "healthy")
3165 self.failUnless(r["post-repair-results"]["results"]["healthy"])
3166 d.addCallback(_got_json_sick)
3168 d.addErrback(self.explain_web_error)
3171 def test_unknown(self, immutable=False):
3172 self.basedir = "web/Grid/unknown"
3174 self.basedir = "web/Grid/unknown-immutable"
3177 c0 = self.g.clients[0]
3181 future_write_uri = "x-tahoe-crazy://I_am_from_the_future."
3182 future_read_uri = "x-tahoe-crazy-readonly://I_am_from_the_future."
3183 # the future cap format may contain slashes, which must be tolerated
3184 expected_info_url = "uri/%s?t=info" % urllib.quote(future_write_uri,
3188 name = u"future-imm"
3189 future_node = UnknownNode(None, future_read_uri, deep_immutable=True)
3190 d = c0.create_immutable_dirnode({name: (future_node, {})})
3193 future_node = UnknownNode(future_write_uri, future_read_uri)
3194 d = c0.create_dirnode()
3196 def _stash_root_and_create_file(n):
3198 self.rooturl = "uri/" + urllib.quote(n.get_uri()) + "/"
3199 self.rourl = "uri/" + urllib.quote(n.get_readonly_uri()) + "/"
3201 return self.rootnode.set_node(name, future_node)
3202 d.addCallback(_stash_root_and_create_file)
3204 # make sure directory listing tolerates unknown nodes
3205 d.addCallback(lambda ign: self.GET(self.rooturl))
3206 def _check_directory_html(res):
3207 self.failUnlessIn("<td>%s</td>" % (str(name),), res)
3208 # find the More Info link for name, should be relative
3209 mo = re.search(r'<a href="([^"]+)">More Info</a>', res)
3210 info_url = mo.group(1)
3211 self.failUnlessEqual(info_url, "%s?t=info" % (str(name),))
3212 d.addCallback(_check_directory_html)
3214 d.addCallback(lambda ign: self.GET(self.rooturl+"?t=json"))
3215 def _check_directory_json(res, expect_rw_uri):
3216 data = simplejson.loads(res)
3217 self.failUnlessEqual(data[0], "dirnode")
3218 f = data[1]["children"][name]
3219 self.failUnlessEqual(f[0], "unknown")
3221 self.failUnlessEqual(f[1]["rw_uri"], future_write_uri)
3223 self.failIfIn("rw_uri", f[1])
3224 self.failUnlessEqual(f[1]["ro_uri"],
3225 ("imm." if immutable else "ro.") + future_read_uri)
3226 self.failUnless("metadata" in f[1])
3227 d.addCallback(_check_directory_json, expect_rw_uri=not immutable)
3229 def _check_info(res, expect_rw_uri, expect_ro_uri):
3230 self.failUnlessIn("Object Type: <span>unknown</span>", res)
3232 self.failUnlessIn(future_write_uri, res)
3234 self.failUnlessIn(future_read_uri, res)
3236 self.failIfIn(future_read_uri, res)
3237 self.failIfIn("Raw data as", res)
3238 self.failIfIn("Directory writecap", res)
3239 self.failIfIn("Checker Operations", res)
3240 self.failIfIn("Mutable File Operations", res)
3241 self.failIfIn("Directory Operations", res)
3243 # FIXME: these should have expect_rw_uri=not immutable; I don't know
3244 # why they fail. Possibly related to ticket #922.
3246 d.addCallback(lambda ign: self.GET(expected_info_url))
3247 d.addCallback(_check_info, expect_rw_uri=False, expect_ro_uri=False)
3248 d.addCallback(lambda ign: self.GET("%s%s?t=info" % (self.rooturl, str(name))))
3249 d.addCallback(_check_info, expect_rw_uri=False, expect_ro_uri=True)
3251 def _check_json(res, expect_rw_uri):
3252 data = simplejson.loads(res)
3253 self.failUnlessEqual(data[0], "unknown")
3255 self.failUnlessEqual(data[1]["rw_uri"], future_write_uri)
3257 self.failIfIn("rw_uri", data[1])
3258 self.failUnlessEqual(data[1]["ro_uri"],
3259 ("imm." if immutable else "ro.") + future_read_uri)
3260 # TODO: check metadata contents
3261 self.failUnless("metadata" in data[1])
3263 d.addCallback(lambda ign: self.GET("%s%s?t=json" % (self.rooturl, str(name))))
3264 d.addCallback(_check_json, expect_rw_uri=not immutable)
3266 # and make sure that a read-only version of the directory can be
3267 # rendered too. This version will not have future_write_uri, whether
3268 # or not future_node was immutable.
3269 d.addCallback(lambda ign: self.GET(self.rourl))
3270 d.addCallback(_check_directory_html)
3271 d.addCallback(lambda ign: self.GET(self.rourl+"?t=json"))
3272 d.addCallback(_check_directory_json, expect_rw_uri=False)
3274 d.addCallback(lambda ign: self.GET("%s%s?t=json" % (self.rourl, str(name))))
3275 d.addCallback(_check_json, expect_rw_uri=False)
3277 # TODO: check that getting t=info from the Info link in the ro directory
3278 # works, and does not include the writecap URI.
3281 def test_immutable_unknown(self):
3282 return self.test_unknown(immutable=True)
3284 def test_mutant_dirnodes_are_omitted(self):
3285 self.basedir = "web/Grid/mutant_dirnodes_are_omitted"
3288 c = self.g.clients[0]
3293 lonely_uri = "URI:LIT:n5xgk" # LIT for "one"
3294 mut_write_uri = "URI:SSK:vfvcbdfbszyrsaxchgevhmmlii:euw4iw7bbnkrrwpzuburbhppuxhc3gwxv26f6imekhz7zyw2ojnq"
3295 mut_read_uri = "URI:SSK-RO:e3mdrzfwhoq42hy5ubcz6rp3o4:ybyibhnp3vvwuq2vaw2ckjmesgkklfs6ghxleztqidihjyofgw7q"
3297 # This method tests mainly dirnode, but we'd have to duplicate code in order to
3298 # test the dirnode and web layers separately.
3300 # 'lonely' is a valid LIT child, 'ro' is a mutant child with an SSK-RO readcap,
3301 # and 'write-in-ro' is a mutant child with an SSK writecap in the ro_uri field.
3302 # When the directory is read, the mutants should be silently disposed of, leaving
3303 # their lonely sibling.
3304 # We don't test the case of a retrieving a cap from the encrypted rw_uri field,
3305 # because immutable directories don't have a writecap and therefore that field
3306 # isn't (and can't be) decrypted.
3307 # TODO: The field still exists in the netstring. Technically we should check what
3308 # happens if something is put there (it should be ignored), but that can wait.
3310 lonely_child = nm.create_from_cap(lonely_uri)
3311 mutant_ro_child = nm.create_from_cap(mut_read_uri)
3312 mutant_write_in_ro_child = nm.create_from_cap(mut_write_uri)
3314 def _by_hook_or_by_crook():
3316 for n in [mutant_ro_child, mutant_write_in_ro_child]:
3317 n.is_allowed_in_immutable_directory = _by_hook_or_by_crook
3319 mutant_write_in_ro_child.get_write_uri = lambda: None
3320 mutant_write_in_ro_child.get_readonly_uri = lambda: mut_write_uri
3322 kids = {u"lonely": (lonely_child, {}),
3323 u"ro": (mutant_ro_child, {}),
3324 u"write-in-ro": (mutant_write_in_ro_child, {}),
3326 d = c.create_immutable_dirnode(kids)
3329 self.failUnless(isinstance(dn, dirnode.DirectoryNode))
3330 self.failIf(dn.is_mutable())
3331 self.failUnless(dn.is_readonly())
3332 # This checks that if we somehow ended up calling dn._decrypt_rwcapdata, it would fail.
3333 self.failIf(hasattr(dn._node, 'get_writekey'))
3335 self.failUnless("RO-IMM" in rep)
3337 self.failUnlessIn("CHK", cap.to_string())
3340 self.rooturl = "uri/" + urllib.quote(dn.get_uri()) + "/"
3341 return download_to_data(dn._node)
3342 d.addCallback(_created)
3344 def _check_data(data):
3345 # Decode the netstring representation of the directory to check that all children
3346 # are present. This is a bit of an abstraction violation, but there's not really
3347 # any other way to do it given that the real DirectoryNode._unpack_contents would
3348 # strip the mutant children out (which is what we're trying to test, later).
3351 while position < len(data):
3352 entries, position = split_netstring(data, 1, position)
3354 (name, ro_uri, rwcapdata, metadata_s), subpos = split_netstring(entry, 4)
3355 name = name.decode("utf-8")
3356 self.failUnless(rwcapdata == "")
3357 ro_uri = ro_uri.strip()
3359 self.failIfEqual(ro_uri, "")
3360 (expected_child, ign) = kids[name]
3361 self.failUnlessEqual(ro_uri, expected_child.get_readonly_uri())
3364 self.failUnlessEqual(numkids, 3)
3365 return self.rootnode.list()
3366 d.addCallback(_check_data)
3368 # Now when we use the real directory listing code, the mutants should be absent.
3369 def _check_kids(children):
3370 self.failUnlessEqual(sorted(children.keys()), [u"lonely"])
3371 lonely_node, lonely_metadata = children[u"lonely"]
3373 self.failUnlessEqual(lonely_node.get_write_uri(), None)
3374 self.failUnlessEqual(lonely_node.get_readonly_uri(), lonely_uri)
3375 d.addCallback(_check_kids)
3377 d.addCallback(lambda ign: nm.create_from_cap(self.cap.to_string()))
3378 d.addCallback(lambda n: n.list())
3379 d.addCallback(_check_kids) # again with dirnode recreated from cap
3381 # Make sure the lonely child can be listed in HTML...
3382 d.addCallback(lambda ign: self.GET(self.rooturl))
3383 def _check_html(res):
3384 self.failIfIn("URI:SSK", res)
3385 get_lonely = "".join([r'<td>FILE</td>',
3387 r'<a href="[^"]+%s[^"]+">lonely</a>' % (urllib.quote(lonely_uri),),
3389 r'\s+<td>%d</td>' % len("one"),
3391 self.failUnless(re.search(get_lonely, res), res)
3393 # find the More Info link for name, should be relative
3394 mo = re.search(r'<a href="([^"]+)">More Info</a>', res)
3395 info_url = mo.group(1)
3396 self.failUnless(info_url.endswith(urllib.quote(lonely_uri) + "?t=info"), info_url)
3397 d.addCallback(_check_html)
3400 d.addCallback(lambda ign: self.GET(self.rooturl+"?t=json"))
3401 def _check_json(res):
3402 data = simplejson.loads(res)
3403 self.failUnlessEqual(data[0], "dirnode")
3404 listed_children = data[1]["children"]
3405 self.failUnlessEqual(sorted(listed_children.keys()), [u"lonely"])
3406 ll_type, ll_data = listed_children[u"lonely"]
3407 self.failUnlessEqual(ll_type, "filenode")
3408 self.failIf("rw_uri" in ll_data)
3409 self.failUnlessEqual(ll_data["ro_uri"], lonely_uri)
3410 d.addCallback(_check_json)
3413 def test_deep_check(self):
3414 self.basedir = "web/Grid/deep_check"
3416 c0 = self.g.clients[0]
3420 d = c0.create_dirnode()
3421 def _stash_root_and_create_file(n):
3423 self.fileurls["root"] = "uri/" + urllib.quote(n.get_uri()) + "/"
3424 return n.add_file(u"good", upload.Data(DATA, convergence=""))
3425 d.addCallback(_stash_root_and_create_file)
3426 def _stash_uri(fn, which):
3427 self.uris[which] = fn.get_uri()
3429 d.addCallback(_stash_uri, "good")
3430 d.addCallback(lambda ign:
3431 self.rootnode.add_file(u"small",
3432 upload.Data("literal",
3434 d.addCallback(_stash_uri, "small")
3435 d.addCallback(lambda ign:
3436 self.rootnode.add_file(u"sick",
3437 upload.Data(DATA+"1",
3439 d.addCallback(_stash_uri, "sick")
3441 # this tests that deep-check and stream-manifest will ignore
3442 # UnknownNode instances. Hopefully this will also cover deep-stats.
3443 future_write_uri = "x-tahoe-crazy://I_am_from_the_future."
3444 future_read_uri = "x-tahoe-crazy-readonly://I_am_from_the_future."
3445 future_node = UnknownNode(future_write_uri, future_read_uri)
3446 d.addCallback(lambda ign: self.rootnode.set_node(u"future", future_node))
3448 def _clobber_shares(ignored):
3449 self.delete_shares_numbered(self.uris["sick"], [0,1])
3450 d.addCallback(_clobber_shares)
3458 d.addCallback(self.CHECK, "root", "t=stream-deep-check")
3461 units = [simplejson.loads(line)
3462 for line in res.splitlines()
3465 print "response is:", res
3466 print "undecodeable line was '%s'" % line
3468 self.failUnlessEqual(len(units), 5+1)
3469 # should be parent-first
3471 self.failUnlessEqual(u0["path"], [])
3472 self.failUnlessEqual(u0["type"], "directory")
3473 self.failUnlessEqual(u0["cap"], self.rootnode.get_uri())
3474 u0cr = u0["check-results"]
3475 self.failUnlessEqual(u0cr["results"]["count-shares-good"], 10)
3477 ugood = [u for u in units
3478 if u["type"] == "file" and u["path"] == [u"good"]][0]
3479 self.failUnlessEqual(ugood["cap"], self.uris["good"])
3480 ugoodcr = ugood["check-results"]
3481 self.failUnlessEqual(ugoodcr["results"]["count-shares-good"], 10)
3484 self.failUnlessEqual(stats["type"], "stats")
3486 self.failUnlessEqual(s["count-immutable-files"], 2)
3487 self.failUnlessEqual(s["count-literal-files"], 1)
3488 self.failUnlessEqual(s["count-directories"], 1)
3489 self.failUnlessEqual(s["count-unknown"], 1)
3490 d.addCallback(_done)
3492 d.addCallback(self.CHECK, "root", "t=stream-manifest")
3493 def _check_manifest(res):
3494 self.failUnless(res.endswith("\n"))
3495 units = [simplejson.loads(t) for t in res[:-1].split("\n")]
3496 self.failUnlessEqual(len(units), 5+1)
3497 self.failUnlessEqual(units[-1]["type"], "stats")
3499 self.failUnlessEqual(first["path"], [])
3500 self.failUnlessEqual(first["cap"], self.rootnode.get_uri())
3501 self.failUnlessEqual(first["type"], "directory")
3502 stats = units[-1]["stats"]
3503 self.failUnlessEqual(stats["count-immutable-files"], 2)
3504 self.failUnlessEqual(stats["count-literal-files"], 1)
3505 self.failUnlessEqual(stats["count-mutable-files"], 0)
3506 self.failUnlessEqual(stats["count-immutable-files"], 2)
3507 self.failUnlessEqual(stats["count-unknown"], 1)
3508 d.addCallback(_check_manifest)
3510 # now add root/subdir and root/subdir/grandchild, then make subdir
3511 # unrecoverable, then see what happens
3513 d.addCallback(lambda ign:
3514 self.rootnode.create_subdirectory(u"subdir"))
3515 d.addCallback(_stash_uri, "subdir")
3516 d.addCallback(lambda subdir_node:
3517 subdir_node.add_file(u"grandchild",
3518 upload.Data(DATA+"2",
3520 d.addCallback(_stash_uri, "grandchild")
3522 d.addCallback(lambda ign:
3523 self.delete_shares_numbered(self.uris["subdir"],
3531 # root/subdir [unrecoverable]
3532 # root/subdir/grandchild
3534 # how should a streaming-JSON API indicate fatal error?
3535 # answer: emit ERROR: instead of a JSON string
3537 d.addCallback(self.CHECK, "root", "t=stream-manifest")
3538 def _check_broken_manifest(res):
3539 lines = res.splitlines()
3541 for (i,line) in enumerate(lines)
3542 if line.startswith("ERROR:")]
3544 self.fail("no ERROR: in output: %s" % (res,))
3545 first_error = error_lines[0]
3546 error_line = lines[first_error]
3547 error_msg = lines[first_error+1:]
3548 error_msg_s = "\n".join(error_msg) + "\n"
3549 self.failUnlessIn("ERROR: UnrecoverableFileError(no recoverable versions)",
3551 self.failUnless(len(error_msg) > 2, error_msg_s) # some traceback
3552 units = [simplejson.loads(line) for line in lines[:first_error]]
3553 self.failUnlessEqual(len(units), 6) # includes subdir
3554 last_unit = units[-1]
3555 self.failUnlessEqual(last_unit["path"], ["subdir"])
3556 d.addCallback(_check_broken_manifest)
3558 d.addCallback(self.CHECK, "root", "t=stream-deep-check")
3559 def _check_broken_deepcheck(res):
3560 lines = res.splitlines()
3562 for (i,line) in enumerate(lines)
3563 if line.startswith("ERROR:")]
3565 self.fail("no ERROR: in output: %s" % (res,))
3566 first_error = error_lines[0]
3567 error_line = lines[first_error]
3568 error_msg = lines[first_error+1:]
3569 error_msg_s = "\n".join(error_msg) + "\n"
3570 self.failUnlessIn("ERROR: UnrecoverableFileError(no recoverable versions)",
3572 self.failUnless(len(error_msg) > 2, error_msg_s) # some traceback
3573 units = [simplejson.loads(line) for line in lines[:first_error]]
3574 self.failUnlessEqual(len(units), 6) # includes subdir
3575 last_unit = units[-1]
3576 self.failUnlessEqual(last_unit["path"], ["subdir"])
3577 r = last_unit["check-results"]["results"]
3578 self.failUnlessEqual(r["count-recoverable-versions"], 0)
3579 self.failUnlessEqual(r["count-shares-good"], 1)
3580 self.failUnlessEqual(r["recoverable"], False)
3581 d.addCallback(_check_broken_deepcheck)
3583 d.addErrback(self.explain_web_error)
3586 def test_deep_check_and_repair(self):
3587 self.basedir = "web/Grid/deep_check_and_repair"
3589 c0 = self.g.clients[0]
3593 d = c0.create_dirnode()
3594 def _stash_root_and_create_file(n):
3596 self.fileurls["root"] = "uri/" + urllib.quote(n.get_uri()) + "/"
3597 return n.add_file(u"good", upload.Data(DATA, convergence=""))
3598 d.addCallback(_stash_root_and_create_file)
3599 def _stash_uri(fn, which):
3600 self.uris[which] = fn.get_uri()
3601 d.addCallback(_stash_uri, "good")
3602 d.addCallback(lambda ign:
3603 self.rootnode.add_file(u"small",
3604 upload.Data("literal",
3606 d.addCallback(_stash_uri, "small")
3607 d.addCallback(lambda ign:
3608 self.rootnode.add_file(u"sick",
3609 upload.Data(DATA+"1",
3611 d.addCallback(_stash_uri, "sick")
3612 #d.addCallback(lambda ign:
3613 # self.rootnode.add_file(u"dead",
3614 # upload.Data(DATA+"2",
3616 #d.addCallback(_stash_uri, "dead")
3618 #d.addCallback(lambda ign: c0.create_mutable_file("mutable"))
3619 #d.addCallback(lambda fn: self.rootnode.set_node(u"corrupt", fn))
3620 #d.addCallback(_stash_uri, "corrupt")
3622 def _clobber_shares(ignored):
3623 good_shares = self.find_shares(self.uris["good"])
3624 self.failUnlessEqual(len(good_shares), 10)
3625 sick_shares = self.find_shares(self.uris["sick"])
3626 os.unlink(sick_shares[0][2])
3627 #dead_shares = self.find_shares(self.uris["dead"])
3628 #for i in range(1, 10):
3629 # os.unlink(dead_shares[i][2])
3631 #c_shares = self.find_shares(self.uris["corrupt"])
3632 #cso = CorruptShareOptions()
3633 #cso.stdout = StringIO()
3634 #cso.parseOptions([c_shares[0][2]])
3636 d.addCallback(_clobber_shares)
3639 # root/good CHK, 10 shares
3641 # root/sick CHK, 9 shares
3643 d.addCallback(self.CHECK, "root", "t=stream-deep-check&repair=true")
3645 units = [simplejson.loads(line)
3646 for line in res.splitlines()
3648 self.failUnlessEqual(len(units), 4+1)
3649 # should be parent-first
3651 self.failUnlessEqual(u0["path"], [])
3652 self.failUnlessEqual(u0["type"], "directory")
3653 self.failUnlessEqual(u0["cap"], self.rootnode.get_uri())
3654 u0crr = u0["check-and-repair-results"]
3655 self.failUnlessEqual(u0crr["repair-attempted"], False)
3656 self.failUnlessEqual(u0crr["pre-repair-results"]["results"]["count-shares-good"], 10)
3658 ugood = [u for u in units
3659 if u["type"] == "file" and u["path"] == [u"good"]][0]
3660 self.failUnlessEqual(ugood["cap"], self.uris["good"])
3661 ugoodcrr = ugood["check-and-repair-results"]
3662 self.failUnlessEqual(ugoodcrr["repair-attempted"], False)
3663 self.failUnlessEqual(ugoodcrr["pre-repair-results"]["results"]["count-shares-good"], 10)
3665 usick = [u for u in units
3666 if u["type"] == "file" and u["path"] == [u"sick"]][0]
3667 self.failUnlessEqual(usick["cap"], self.uris["sick"])
3668 usickcrr = usick["check-and-repair-results"]
3669 self.failUnlessEqual(usickcrr["repair-attempted"], True)
3670 self.failUnlessEqual(usickcrr["repair-successful"], True)
3671 self.failUnlessEqual(usickcrr["pre-repair-results"]["results"]["count-shares-good"], 9)
3672 self.failUnlessEqual(usickcrr["post-repair-results"]["results"]["count-shares-good"], 10)
3675 self.failUnlessEqual(stats["type"], "stats")
3677 self.failUnlessEqual(s["count-immutable-files"], 2)
3678 self.failUnlessEqual(s["count-literal-files"], 1)
3679 self.failUnlessEqual(s["count-directories"], 1)
3680 d.addCallback(_done)
3682 d.addErrback(self.explain_web_error)
3685 def _count_leases(self, ignored, which):
3686 u = self.uris[which]
3687 shares = self.find_shares(u)
3689 for shnum, serverid, fn in shares:
3690 sf = get_share_file(fn)
3691 num_leases = len(list(sf.get_leases()))
3692 lease_counts.append( (fn, num_leases) )
3695 def _assert_leasecount(self, lease_counts, expected):
3696 for (fn, num_leases) in lease_counts:
3697 if num_leases != expected:
3698 self.fail("expected %d leases, have %d, on %s" %
3699 (expected, num_leases, fn))
3701 def test_add_lease(self):
3702 self.basedir = "web/Grid/add_lease"
3703 self.set_up_grid(num_clients=2)
3704 c0 = self.g.clients[0]
3707 d = c0.upload(upload.Data(DATA, convergence=""))
3708 def _stash_uri(ur, which):
3709 self.uris[which] = ur.uri
3710 d.addCallback(_stash_uri, "one")
3711 d.addCallback(lambda ign:
3712 c0.upload(upload.Data(DATA+"1", convergence="")))
3713 d.addCallback(_stash_uri, "two")
3714 def _stash_mutable_uri(n, which):
3715 self.uris[which] = n.get_uri()
3716 assert isinstance(self.uris[which], str)
3717 d.addCallback(lambda ign: c0.create_mutable_file(DATA+"2"))
3718 d.addCallback(_stash_mutable_uri, "mutable")
3720 def _compute_fileurls(ignored):
3722 for which in self.uris:
3723 self.fileurls[which] = "uri/" + urllib.quote(self.uris[which])
3724 d.addCallback(_compute_fileurls)
3726 d.addCallback(self._count_leases, "one")
3727 d.addCallback(self._assert_leasecount, 1)
3728 d.addCallback(self._count_leases, "two")
3729 d.addCallback(self._assert_leasecount, 1)
3730 d.addCallback(self._count_leases, "mutable")
3731 d.addCallback(self._assert_leasecount, 1)
3733 d.addCallback(self.CHECK, "one", "t=check") # no add-lease
3734 def _got_html_good(res):
3735 self.failUnless("Healthy" in res, res)
3736 self.failIf("Not Healthy" in res, res)
3737 d.addCallback(_got_html_good)
3739 d.addCallback(self._count_leases, "one")
3740 d.addCallback(self._assert_leasecount, 1)
3741 d.addCallback(self._count_leases, "two")
3742 d.addCallback(self._assert_leasecount, 1)
3743 d.addCallback(self._count_leases, "mutable")
3744 d.addCallback(self._assert_leasecount, 1)
3746 # this CHECK uses the original client, which uses the same
3747 # lease-secrets, so it will just renew the original lease
3748 d.addCallback(self.CHECK, "one", "t=check&add-lease=true")
3749 d.addCallback(_got_html_good)
3751 d.addCallback(self._count_leases, "one")
3752 d.addCallback(self._assert_leasecount, 1)
3753 d.addCallback(self._count_leases, "two")
3754 d.addCallback(self._assert_leasecount, 1)
3755 d.addCallback(self._count_leases, "mutable")
3756 d.addCallback(self._assert_leasecount, 1)
3758 # this CHECK uses an alternate client, which adds a second lease
3759 d.addCallback(self.CHECK, "one", "t=check&add-lease=true", clientnum=1)
3760 d.addCallback(_got_html_good)
3762 d.addCallback(self._count_leases, "one")
3763 d.addCallback(self._assert_leasecount, 2)
3764 d.addCallback(self._count_leases, "two")
3765 d.addCallback(self._assert_leasecount, 1)
3766 d.addCallback(self._count_leases, "mutable")
3767 d.addCallback(self._assert_leasecount, 1)
3769 d.addCallback(self.CHECK, "mutable", "t=check&add-lease=true")
3770 d.addCallback(_got_html_good)
3772 d.addCallback(self._count_leases, "one")
3773 d.addCallback(self._assert_leasecount, 2)
3774 d.addCallback(self._count_leases, "two")
3775 d.addCallback(self._assert_leasecount, 1)
3776 d.addCallback(self._count_leases, "mutable")
3777 d.addCallback(self._assert_leasecount, 1)
3779 d.addCallback(self.CHECK, "mutable", "t=check&add-lease=true",
3781 d.addCallback(_got_html_good)
3783 d.addCallback(self._count_leases, "one")
3784 d.addCallback(self._assert_leasecount, 2)
3785 d.addCallback(self._count_leases, "two")
3786 d.addCallback(self._assert_leasecount, 1)
3787 d.addCallback(self._count_leases, "mutable")
3788 d.addCallback(self._assert_leasecount, 2)
3790 d.addErrback(self.explain_web_error)
3793 def test_deep_add_lease(self):
3794 self.basedir = "web/Grid/deep_add_lease"
3795 self.set_up_grid(num_clients=2)
3796 c0 = self.g.clients[0]
3800 d = c0.create_dirnode()
3801 def _stash_root_and_create_file(n):
3803 self.uris["root"] = n.get_uri()
3804 self.fileurls["root"] = "uri/" + urllib.quote(n.get_uri()) + "/"
3805 return n.add_file(u"one", upload.Data(DATA, convergence=""))
3806 d.addCallback(_stash_root_and_create_file)
3807 def _stash_uri(fn, which):
3808 self.uris[which] = fn.get_uri()
3809 d.addCallback(_stash_uri, "one")
3810 d.addCallback(lambda ign:
3811 self.rootnode.add_file(u"small",
3812 upload.Data("literal",
3814 d.addCallback(_stash_uri, "small")
3816 d.addCallback(lambda ign: c0.create_mutable_file("mutable"))
3817 d.addCallback(lambda fn: self.rootnode.set_node(u"mutable", fn))
3818 d.addCallback(_stash_uri, "mutable")
3820 d.addCallback(self.CHECK, "root", "t=stream-deep-check") # no add-lease
3822 units = [simplejson.loads(line)
3823 for line in res.splitlines()
3825 # root, one, small, mutable, stats
3826 self.failUnlessEqual(len(units), 4+1)
3827 d.addCallback(_done)
3829 d.addCallback(self._count_leases, "root")
3830 d.addCallback(self._assert_leasecount, 1)
3831 d.addCallback(self._count_leases, "one")
3832 d.addCallback(self._assert_leasecount, 1)
3833 d.addCallback(self._count_leases, "mutable")
3834 d.addCallback(self._assert_leasecount, 1)
3836 d.addCallback(self.CHECK, "root", "t=stream-deep-check&add-lease=true")
3837 d.addCallback(_done)
3839 d.addCallback(self._count_leases, "root")
3840 d.addCallback(self._assert_leasecount, 1)
3841 d.addCallback(self._count_leases, "one")
3842 d.addCallback(self._assert_leasecount, 1)
3843 d.addCallback(self._count_leases, "mutable")
3844 d.addCallback(self._assert_leasecount, 1)
3846 d.addCallback(self.CHECK, "root", "t=stream-deep-check&add-lease=true",
3848 d.addCallback(_done)
3850 d.addCallback(self._count_leases, "root")
3851 d.addCallback(self._assert_leasecount, 2)
3852 d.addCallback(self._count_leases, "one")
3853 d.addCallback(self._assert_leasecount, 2)
3854 d.addCallback(self._count_leases, "mutable")
3855 d.addCallback(self._assert_leasecount, 2)
3857 d.addErrback(self.explain_web_error)
3861 def test_exceptions(self):
3862 self.basedir = "web/Grid/exceptions"
3863 self.set_up_grid(num_clients=1, num_servers=2)
3864 c0 = self.g.clients[0]
3867 d = c0.create_dirnode()
3869 self.fileurls["root"] = "uri/" + urllib.quote(n.get_uri()) + "/"
3870 self.fileurls["imaginary"] = self.fileurls["root"] + "imaginary"
3872 d.addCallback(_stash_root)
3873 d.addCallback(lambda ign: c0.upload(upload.Data(DATA, convergence="")))
3875 self.fileurls["1share"] = "uri/" + urllib.quote(ur.uri)
3876 self.delete_shares_numbered(ur.uri, range(1,10))
3878 u = uri.from_string(ur.uri)
3879 u.key = testutil.flip_bit(u.key, 0)
3880 baduri = u.to_string()
3881 self.fileurls["0shares"] = "uri/" + urllib.quote(baduri)
3882 d.addCallback(_stash_bad)
3883 d.addCallback(lambda ign: c0.create_dirnode())
3884 def _mangle_dirnode_1share(n):
3886 url = self.fileurls["dir-1share"] = "uri/" + urllib.quote(u) + "/"
3887 self.fileurls["dir-1share-json"] = url + "?t=json"
3888 self.delete_shares_numbered(u, range(1,10))
3889 d.addCallback(_mangle_dirnode_1share)
3890 d.addCallback(lambda ign: c0.create_dirnode())
3891 def _mangle_dirnode_0share(n):
3893 url = self.fileurls["dir-0share"] = "uri/" + urllib.quote(u) + "/"
3894 self.fileurls["dir-0share-json"] = url + "?t=json"
3895 self.delete_shares_numbered(u, range(0,10))
3896 d.addCallback(_mangle_dirnode_0share)
3898 # NotEnoughSharesError should be reported sensibly, with a
3899 # text/plain explanation of the problem, and perhaps some
3900 # information on which shares *could* be found.
3902 d.addCallback(lambda ignored:
3903 self.shouldHTTPError("GET unrecoverable",
3904 410, "Gone", "NoSharesError",
3905 self.GET, self.fileurls["0shares"]))
3906 def _check_zero_shares(body):
3907 self.failIf("<html>" in body, body)
3908 body = " ".join(body.strip().split())
3909 exp = ("NoSharesError: no shares could be found. "
3910 "Zero shares usually indicates a corrupt URI, or that "
3911 "no servers were connected, but it might also indicate "
3912 "severe corruption. You should perform a filecheck on "
3913 "this object to learn more. The full error message is: "
3914 "Failed to get enough shareholders: have 0, need 3")
3915 self.failUnlessEqual(exp, body)
3916 d.addCallback(_check_zero_shares)
3919 d.addCallback(lambda ignored:
3920 self.shouldHTTPError("GET 1share",
3921 410, "Gone", "NotEnoughSharesError",
3922 self.GET, self.fileurls["1share"]))
3923 def _check_one_share(body):
3924 self.failIf("<html>" in body, body)
3925 body = " ".join(body.strip().split())
3926 exp = ("NotEnoughSharesError: This indicates that some "
3927 "servers were unavailable, or that shares have been "
3928 "lost to server departure, hard drive failure, or disk "
3929 "corruption. You should perform a filecheck on "
3930 "this object to learn more. The full error message is:"
3931 " Failed to get enough shareholders: have 1, need 3")
3932 self.failUnlessEqual(exp, body)
3933 d.addCallback(_check_one_share)
3935 d.addCallback(lambda ignored:
3936 self.shouldHTTPError("GET imaginary",
3937 404, "Not Found", None,
3938 self.GET, self.fileurls["imaginary"]))
3939 def _missing_child(body):
3940 self.failUnless("No such child: imaginary" in body, body)
3941 d.addCallback(_missing_child)
3943 d.addCallback(lambda ignored: self.GET(self.fileurls["dir-0share"]))
3944 def _check_0shares_dir_html(body):
3945 self.failUnless("<html>" in body, body)
3946 # we should see the regular page, but without the child table or
3948 body = " ".join(body.strip().split())
3949 self.failUnlessIn('href="?t=info">More info on this directory',
3951 exp = ("UnrecoverableFileError: the directory (or mutable file) "
3952 "could not be retrieved, because there were insufficient "
3953 "good shares. This might indicate that no servers were "
3954 "connected, insufficient servers were connected, the URI "
3955 "was corrupt, or that shares have been lost due to server "
3956 "departure, hard drive failure, or disk corruption. You "
3957 "should perform a filecheck on this object to learn more.")
3958 self.failUnlessIn(exp, body)
3959 self.failUnlessIn("No upload forms: directory is unreadable", body)
3960 d.addCallback(_check_0shares_dir_html)
3962 d.addCallback(lambda ignored: self.GET(self.fileurls["dir-1share"]))
3963 def _check_1shares_dir_html(body):
3964 # at some point, we'll split UnrecoverableFileError into 0-shares
3965 # and some-shares like we did for immutable files (since there
3966 # are different sorts of advice to offer in each case). For now,
3967 # they present the same way.
3968 self.failUnless("<html>" in body, body)
3969 body = " ".join(body.strip().split())
3970 self.failUnlessIn('href="?t=info">More info on this directory',
3972 exp = ("UnrecoverableFileError: the directory (or mutable file) "
3973 "could not be retrieved, because there were insufficient "
3974 "good shares. This might indicate that no servers were "
3975 "connected, insufficient servers were connected, the URI "
3976 "was corrupt, or that shares have been lost due to server "
3977 "departure, hard drive failure, or disk corruption. You "
3978 "should perform a filecheck on this object to learn more.")
3979 self.failUnlessIn(exp, body)
3980 self.failUnlessIn("No upload forms: directory is unreadable", body)
3981 d.addCallback(_check_1shares_dir_html)
3983 d.addCallback(lambda ignored:
3984 self.shouldHTTPError("GET dir-0share-json",
3985 410, "Gone", "UnrecoverableFileError",
3987 self.fileurls["dir-0share-json"]))
3988 def _check_unrecoverable_file(body):
3989 self.failIf("<html>" in body, body)
3990 body = " ".join(body.strip().split())
3991 exp = ("UnrecoverableFileError: the directory (or mutable file) "
3992 "could not be retrieved, because there were insufficient "
3993 "good shares. This might indicate that no servers were "
3994 "connected, insufficient servers were connected, the URI "
3995 "was corrupt, or that shares have been lost due to server "
3996 "departure, hard drive failure, or disk corruption. You "
3997 "should perform a filecheck on this object to learn more.")
3998 self.failUnlessEqual(exp, body)
3999 d.addCallback(_check_unrecoverable_file)
4001 d.addCallback(lambda ignored:
4002 self.shouldHTTPError("GET dir-1share-json",
4003 410, "Gone", "UnrecoverableFileError",
4005 self.fileurls["dir-1share-json"]))
4006 d.addCallback(_check_unrecoverable_file)
4008 d.addCallback(lambda ignored:
4009 self.shouldHTTPError("GET imaginary",
4010 404, "Not Found", None,
4011 self.GET, self.fileurls["imaginary"]))
4013 # attach a webapi child that throws a random error, to test how it
4015 w = c0.getServiceNamed("webish")
4016 w.root.putChild("ERRORBOOM", ErrorBoom())
4018 # "Accept: */*" : should get a text/html stack trace
4019 # "Accept: text/plain" : should get a text/plain stack trace
4020 # "Accept: text/plain, application/octet-stream" : text/plain (CLI)
4021 # no Accept header: should get a text/html stack trace
4023 d.addCallback(lambda ignored:
4024 self.shouldHTTPError("GET errorboom_html",
4025 500, "Internal Server Error", None,
4026 self.GET, "ERRORBOOM",
4027 headers={"accept": ["*/*"]}))
4028 def _internal_error_html1(body):
4029 self.failUnless("<html>" in body, "expected HTML, not '%s'" % body)
4030 d.addCallback(_internal_error_html1)
4032 d.addCallback(lambda ignored:
4033 self.shouldHTTPError("GET errorboom_text",
4034 500, "Internal Server Error", None,
4035 self.GET, "ERRORBOOM",
4036 headers={"accept": ["text/plain"]}))
4037 def _internal_error_text2(body):
4038 self.failIf("<html>" in body, body)
4039 self.failUnless(body.startswith("Traceback "), body)
4040 d.addCallback(_internal_error_text2)
4042 CLI_accepts = "text/plain, application/octet-stream"
4043 d.addCallback(lambda ignored:
4044 self.shouldHTTPError("GET errorboom_text",
4045 500, "Internal Server Error", None,
4046 self.GET, "ERRORBOOM",
4047 headers={"accept": [CLI_accepts]}))
4048 def _internal_error_text3(body):
4049 self.failIf("<html>" in body, body)
4050 self.failUnless(body.startswith("Traceback "), body)
4051 d.addCallback(_internal_error_text3)
4053 d.addCallback(lambda ignored:
4054 self.shouldHTTPError("GET errorboom_text",
4055 500, "Internal Server Error", None,
4056 self.GET, "ERRORBOOM"))
4057 def _internal_error_html4(body):
4058 self.failUnless("<html>" in body, "expected HTML, not '%s'" % body)
4059 d.addCallback(_internal_error_html4)
4061 def _flush_errors(res):
4062 # Trial: please ignore the CompletelyUnhandledError in the logs
4063 self.flushLoggedErrors(CompletelyUnhandledError)
4065 d.addBoth(_flush_errors)
4069 class CompletelyUnhandledError(Exception):
4071 class ErrorBoom(rend.Page):
4072 def beforeRender(self, ctx):
4073 raise CompletelyUnhandledError("whoops")