1 import os.path, re, urllib
3 from StringIO import StringIO
4 from twisted.application import service
5 from twisted.trial import unittest
6 from twisted.internet import defer, reactor
7 from twisted.web import client, error, http
8 from twisted.python import failure, log
10 from allmydata import interfaces, uri, webish
11 from allmydata.storage.shares import get_share_file
12 from allmydata.storage_client import StorageFarmBroker
13 from allmydata.immutable import upload, download
14 from allmydata.nodemaker import NodeMaker
15 from allmydata.unknown import UnknownNode
16 from allmydata.web import status, common
17 from allmydata.scripts.debug import CorruptShareOptions, corrupt_share
18 from allmydata.util import fileutil, base32
19 from allmydata.test.common import FakeCHKFileNode, FakeMutableFileNode, \
20 create_chk_filenode, WebErrorMixin, ShouldFailMixin
21 from allmydata.interfaces import IMutableFileNode
22 from allmydata.mutable import servermap, publish, retrieve
23 import common_util as testutil
24 from allmydata.test.no_network import GridTestMixin
25 from allmydata.test.common_web import HTTPClientGETFactory, \
27 from allmydata.client import Client
29 # create a fake uploader/downloader, and a couple of fake dirnodes, then
30 # create a webserver that works against them
32 timeout = 480 # Most of these take longer than 240 seconds on Francois's arm box.
34 class FakeStatsProvider:
36 stats = {'stats': {}, 'counters': {}}
39 class FakeNodeMaker(NodeMaker):
40 def _create_lit(self, cap):
41 return FakeCHKFileNode(cap)
42 def _create_immutable(self, cap):
43 return FakeCHKFileNode(cap)
44 def _create_mutable(self, cap):
45 return FakeMutableFileNode(None, None, None, None).init_from_uri(cap)
46 def create_mutable_file(self, contents="", keysize=None):
47 n = FakeMutableFileNode(None, None, None, None)
48 return n.create(contents)
50 class FakeUploader(service.Service):
52 def upload(self, uploadable, history=None):
53 d = uploadable.get_size()
54 d.addCallback(lambda size: uploadable.read(size))
57 n = create_chk_filenode(data)
58 results = upload.UploadResults()
59 results.uri = n.get_uri()
61 d.addCallback(_got_data)
63 def get_helper_info(self):
67 _all_upload_status = [upload.UploadStatus()]
68 _all_download_status = [download.DownloadStatus()]
69 _all_mapupdate_statuses = [servermap.UpdateStatus()]
70 _all_publish_statuses = [publish.PublishStatus()]
71 _all_retrieve_statuses = [retrieve.RetrieveStatus()]
73 def list_all_upload_statuses(self):
74 return self._all_upload_status
75 def list_all_download_statuses(self):
76 return self._all_download_status
77 def list_all_mapupdate_statuses(self):
78 return self._all_mapupdate_statuses
79 def list_all_publish_statuses(self):
80 return self._all_publish_statuses
81 def list_all_retrieve_statuses(self):
82 return self._all_retrieve_statuses
83 def list_all_helper_statuses(self):
86 class FakeClient(Client):
88 # don't upcall to Client.__init__, since we only want to initialize a
90 service.MultiService.__init__(self)
91 self.nodeid = "fake_nodeid"
92 self.nickname = "fake_nickname"
93 self.introducer_furl = "None"
94 self.stats_provider = FakeStatsProvider()
95 self._secret_holder = None
97 self.convergence = "some random string"
98 self.storage_broker = StorageFarmBroker(None, permute_peers=True)
99 self.introducer_client = None
100 self.history = FakeHistory()
101 self.uploader = FakeUploader()
102 self.uploader.setServiceParent(self)
103 self.nodemaker = FakeNodeMaker(None, None, None,
104 self.uploader, None, None,
107 def startService(self):
108 return service.MultiService.startService(self)
109 def stopService(self):
110 return service.MultiService.stopService(self)
112 MUTABLE_SIZELIMIT = FakeMutableFileNode.MUTABLE_SIZELIMIT
114 class WebMixin(object):
116 self.s = FakeClient()
117 self.s.startService()
118 self.staticdir = self.mktemp()
119 self.ws = webish.WebishServer(self.s, "0", staticdir=self.staticdir)
120 self.ws.setServiceParent(self.s)
121 self.webish_port = port = self.ws.listener._port.getHost().port
122 self.webish_url = "http://localhost:%d" % port
124 l = [ self.s.create_dirnode() for x in range(6) ]
125 d = defer.DeferredList(l)
127 self.public_root = res[0][1]
128 assert interfaces.IDirectoryNode.providedBy(self.public_root), res
129 self.public_url = "/uri/" + self.public_root.get_uri()
130 self.private_root = res[1][1]
134 self._foo_uri = foo.get_uri()
135 self._foo_readonly_uri = foo.get_readonly_uri()
136 self._foo_verifycap = foo.get_verify_cap().to_string()
137 # NOTE: we ignore the deferred on all set_uri() calls, because we
138 # know the fake nodes do these synchronously
139 self.public_root.set_uri(u"foo", foo.get_uri(),
140 foo.get_readonly_uri())
142 self.BAR_CONTENTS, n, self._bar_txt_uri = self.makefile(0)
143 foo.set_uri(u"bar.txt", self._bar_txt_uri, self._bar_txt_uri)
144 self._bar_txt_verifycap = n.get_verify_cap().to_string()
146 foo.set_uri(u"empty", res[3][1].get_uri(),
147 res[3][1].get_readonly_uri())
148 sub_uri = res[4][1].get_uri()
149 self._sub_uri = sub_uri
150 foo.set_uri(u"sub", sub_uri, sub_uri)
151 sub = self.s.create_node_from_uri(sub_uri)
153 _ign, n, blocking_uri = self.makefile(1)
154 foo.set_uri(u"blockingfile", blocking_uri, blocking_uri)
156 unicode_filename = u"n\u00fc.txt" # n u-umlaut . t x t
157 # ok, unicode calls it LATIN SMALL LETTER U WITH DIAERESIS but I
158 # still think of it as an umlaut
159 foo.set_uri(unicode_filename, self._bar_txt_uri, self._bar_txt_uri)
161 _ign, n, baz_file = self.makefile(2)
162 self._baz_file_uri = baz_file
163 sub.set_uri(u"baz.txt", baz_file, baz_file)
165 _ign, n, self._bad_file_uri = self.makefile(3)
166 # this uri should not be downloadable
167 del FakeCHKFileNode.all_contents[self._bad_file_uri]
170 self.public_root.set_uri(u"reedownlee", rodir.get_readonly_uri(),
171 rodir.get_readonly_uri())
172 rodir.set_uri(u"nor", baz_file, baz_file)
177 # public/foo/blockingfile
180 # public/foo/sub/baz.txt
182 # public/reedownlee/nor
183 self.NEWFILE_CONTENTS = "newfile contents\n"
185 return foo.get_metadata_for(u"bar.txt")
187 def _got_metadata(metadata):
188 self._bar_txt_metadata = metadata
189 d.addCallback(_got_metadata)
192 def makefile(self, number):
193 contents = "contents of file %s\n" % number
194 n = create_chk_filenode(contents)
195 return contents, n, n.get_uri()
198 return self.s.stopService()
200 def failUnlessIsBarDotTxt(self, res):
201 self.failUnlessEqual(res, self.BAR_CONTENTS, res)
203 def failUnlessIsBarJSON(self, res):
204 data = simplejson.loads(res)
205 self.failUnless(isinstance(data, list))
206 self.failUnlessEqual(data[0], u"filenode")
207 self.failUnless(isinstance(data[1], dict))
208 self.failIf(data[1]["mutable"])
209 self.failIf("rw_uri" in data[1]) # immutable
210 self.failUnlessEqual(data[1]["ro_uri"], self._bar_txt_uri)
211 self.failUnlessEqual(data[1]["verify_uri"], self._bar_txt_verifycap)
212 self.failUnlessEqual(data[1]["size"], len(self.BAR_CONTENTS))
214 def failUnlessIsFooJSON(self, res):
215 data = simplejson.loads(res)
216 self.failUnless(isinstance(data, list))
217 self.failUnlessEqual(data[0], "dirnode", res)
218 self.failUnless(isinstance(data[1], dict))
219 self.failUnless(data[1]["mutable"])
220 self.failUnless("rw_uri" in data[1]) # mutable
221 self.failUnlessEqual(data[1]["rw_uri"], self._foo_uri)
222 self.failUnlessEqual(data[1]["ro_uri"], self._foo_readonly_uri)
223 self.failUnlessEqual(data[1]["verify_uri"], self._foo_verifycap)
225 kidnames = sorted([unicode(n) for n in data[1]["children"]])
226 self.failUnlessEqual(kidnames,
227 [u"bar.txt", u"blockingfile", u"empty",
228 u"n\u00fc.txt", u"sub"])
229 kids = dict( [(unicode(name),value)
231 in data[1]["children"].iteritems()] )
232 self.failUnlessEqual(kids[u"sub"][0], "dirnode")
233 self.failUnless("metadata" in kids[u"sub"][1])
234 self.failUnless("ctime" in kids[u"sub"][1]["metadata"])
235 self.failUnless("mtime" in kids[u"sub"][1]["metadata"])
236 self.failUnlessEqual(kids[u"bar.txt"][0], "filenode")
237 self.failUnlessEqual(kids[u"bar.txt"][1]["size"], len(self.BAR_CONTENTS))
238 self.failUnlessEqual(kids[u"bar.txt"][1]["ro_uri"], self._bar_txt_uri)
239 self.failUnlessEqual(kids[u"bar.txt"][1]["verify_uri"],
240 self._bar_txt_verifycap)
241 self.failUnlessEqual(kids[u"bar.txt"][1]["metadata"]["ctime"],
242 self._bar_txt_metadata["ctime"])
243 self.failUnlessEqual(kids[u"n\u00fc.txt"][1]["ro_uri"],
246 def GET(self, urlpath, followRedirect=False, return_response=False,
248 # if return_response=True, this fires with (data, statuscode,
249 # respheaders) instead of just data.
250 assert not isinstance(urlpath, unicode)
251 url = self.webish_url + urlpath
252 factory = HTTPClientGETFactory(url, method="GET",
253 followRedirect=followRedirect, **kwargs)
254 reactor.connectTCP("localhost", self.webish_port, factory)
257 return (data, factory.status, factory.response_headers)
259 d.addCallback(_got_data)
260 return factory.deferred
262 def HEAD(self, urlpath, return_response=False, **kwargs):
263 # this requires some surgery, because twisted.web.client doesn't want
264 # to give us back the response headers.
265 factory = HTTPClientHEADFactory(urlpath, method="HEAD", **kwargs)
266 reactor.connectTCP("localhost", self.webish_port, factory)
269 return (data, factory.status, factory.response_headers)
271 d.addCallback(_got_data)
272 return factory.deferred
274 def PUT(self, urlpath, data, **kwargs):
275 url = self.webish_url + urlpath
276 return client.getPage(url, method="PUT", postdata=data, **kwargs)
278 def DELETE(self, urlpath):
279 url = self.webish_url + urlpath
280 return client.getPage(url, method="DELETE")
282 def POST(self, urlpath, followRedirect=False, **fields):
283 url = self.webish_url + urlpath
284 sepbase = "boogabooga"
288 form.append('Content-Disposition: form-data; name="_charset"')
292 for name, value in fields.iteritems():
293 if isinstance(value, tuple):
294 filename, value = value
295 form.append('Content-Disposition: form-data; name="%s"; '
296 'filename="%s"' % (name, filename.encode("utf-8")))
298 form.append('Content-Disposition: form-data; name="%s"' % name)
300 if isinstance(value, unicode):
301 value = value.encode("utf-8")
304 assert isinstance(value, str)
308 body = "\r\n".join(form) + "\r\n"
309 headers = {"content-type": "multipart/form-data; boundary=%s" % sepbase,
311 return client.getPage(url, method="POST", postdata=body,
312 headers=headers, followRedirect=followRedirect)
314 def shouldFail(self, res, expected_failure, which,
315 substring=None, response_substring=None):
316 if isinstance(res, failure.Failure):
317 res.trap(expected_failure)
319 self.failUnless(substring in str(res),
320 "substring '%s' not in '%s'"
321 % (substring, str(res)))
322 if response_substring:
323 self.failUnless(response_substring in res.value.response,
324 "response substring '%s' not in '%s'"
325 % (response_substring, res.value.response))
327 self.fail("%s was supposed to raise %s, not get '%s'" %
328 (which, expected_failure, res))
330 def shouldFail2(self, expected_failure, which, substring,
332 callable, *args, **kwargs):
333 assert substring is None or isinstance(substring, str)
334 assert response_substring is None or isinstance(response_substring, str)
335 d = defer.maybeDeferred(callable, *args, **kwargs)
337 if isinstance(res, failure.Failure):
338 res.trap(expected_failure)
340 self.failUnless(substring in str(res),
341 "%s: substring '%s' not in '%s'"
342 % (which, substring, str(res)))
343 if response_substring:
344 self.failUnless(response_substring in res.value.response,
345 "%s: response substring '%s' not in '%s'"
347 response_substring, res.value.response))
349 self.fail("%s was supposed to raise %s, not get '%s'" %
350 (which, expected_failure, res))
354 def should404(self, res, which):
355 if isinstance(res, failure.Failure):
356 res.trap(error.Error)
357 self.failUnlessEqual(res.value.status, "404")
359 self.fail("%s was supposed to Error(404), not get '%s'" %
363 class Web(WebMixin, WebErrorMixin, testutil.StallMixin, unittest.TestCase):
364 def test_create(self):
367 def test_welcome(self):
370 self.failUnless('Welcome To TahoeLAFS' in res, res)
372 self.s.basedir = 'web/test_welcome'
373 fileutil.make_dirs("web/test_welcome")
374 fileutil.make_dirs("web/test_welcome/private")
376 d.addCallback(_check)
379 def test_provisioning(self):
380 d = self.GET("/provisioning/")
382 self.failUnless('Tahoe Provisioning Tool' in res)
383 fields = {'filled': True,
384 "num_users": int(50e3),
385 "files_per_user": 1000,
386 "space_per_user": int(1e9),
387 "sharing_ratio": 1.0,
388 "encoding_parameters": "3-of-10-5",
390 "ownership_mode": "A",
391 "download_rate": 100,
396 return self.POST("/provisioning/", **fields)
398 d.addCallback(_check)
400 self.failUnless('Tahoe Provisioning Tool' in res)
401 self.failUnless("Share space consumed: 167.01TB" in res)
403 fields = {'filled': True,
404 "num_users": int(50e6),
405 "files_per_user": 1000,
406 "space_per_user": int(5e9),
407 "sharing_ratio": 1.0,
408 "encoding_parameters": "25-of-100-50",
409 "num_servers": 30000,
410 "ownership_mode": "E",
411 "drive_failure_model": "U",
413 "download_rate": 1000,
418 return self.POST("/provisioning/", **fields)
419 d.addCallback(_check2)
421 self.failUnless("Share space consumed: huge!" in res)
422 fields = {'filled': True}
423 return self.POST("/provisioning/", **fields)
424 d.addCallback(_check3)
426 self.failUnless("Share space consumed:" in res)
427 d.addCallback(_check4)
430 def test_reliability_tool(self):
432 from allmydata import reliability
433 _hush_pyflakes = reliability
435 raise unittest.SkipTest("reliability tool requires NumPy")
437 d = self.GET("/reliability/")
439 self.failUnless('Tahoe Reliability Tool' in res)
440 fields = {'drive_lifetime': "8Y",
445 "check_period": "1M",
446 "report_period": "3M",
449 return self.POST("/reliability/", **fields)
451 d.addCallback(_check)
453 self.failUnless('Tahoe Reliability Tool' in res)
454 r = r'Probability of loss \(no maintenance\):\s+<span>0.033591'
455 self.failUnless(re.search(r, res), res)
456 d.addCallback(_check2)
459 def test_status(self):
460 h = self.s.get_history()
461 dl_num = h.list_all_download_statuses()[0].get_counter()
462 ul_num = h.list_all_upload_statuses()[0].get_counter()
463 mu_num = h.list_all_mapupdate_statuses()[0].get_counter()
464 pub_num = h.list_all_publish_statuses()[0].get_counter()
465 ret_num = h.list_all_retrieve_statuses()[0].get_counter()
466 d = self.GET("/status", followRedirect=True)
468 self.failUnless('Upload and Download Status' in res, res)
469 self.failUnless('"down-%d"' % dl_num in res, res)
470 self.failUnless('"up-%d"' % ul_num in res, res)
471 self.failUnless('"mapupdate-%d"' % mu_num in res, res)
472 self.failUnless('"publish-%d"' % pub_num in res, res)
473 self.failUnless('"retrieve-%d"' % ret_num in res, res)
474 d.addCallback(_check)
475 d.addCallback(lambda res: self.GET("/status/?t=json"))
476 def _check_json(res):
477 data = simplejson.loads(res)
478 self.failUnless(isinstance(data, dict))
479 active = data["active"]
480 # TODO: test more. We need a way to fake an active operation
482 d.addCallback(_check_json)
484 d.addCallback(lambda res: self.GET("/status/down-%d" % dl_num))
486 self.failUnless("File Download Status" in res, res)
487 d.addCallback(_check_dl)
488 d.addCallback(lambda res: self.GET("/status/up-%d" % ul_num))
490 self.failUnless("File Upload Status" in res, res)
491 d.addCallback(_check_ul)
492 d.addCallback(lambda res: self.GET("/status/mapupdate-%d" % mu_num))
493 def _check_mapupdate(res):
494 self.failUnless("Mutable File Servermap Update Status" in res, res)
495 d.addCallback(_check_mapupdate)
496 d.addCallback(lambda res: self.GET("/status/publish-%d" % pub_num))
497 def _check_publish(res):
498 self.failUnless("Mutable File Publish Status" in res, res)
499 d.addCallback(_check_publish)
500 d.addCallback(lambda res: self.GET("/status/retrieve-%d" % ret_num))
501 def _check_retrieve(res):
502 self.failUnless("Mutable File Retrieve Status" in res, res)
503 d.addCallback(_check_retrieve)
507 def test_status_numbers(self):
508 drrm = status.DownloadResultsRendererMixin()
509 self.failUnlessEqual(drrm.render_time(None, None), "")
510 self.failUnlessEqual(drrm.render_time(None, 2.5), "2.50s")
511 self.failUnlessEqual(drrm.render_time(None, 0.25), "250ms")
512 self.failUnlessEqual(drrm.render_time(None, 0.0021), "2.1ms")
513 self.failUnlessEqual(drrm.render_time(None, 0.000123), "123us")
514 self.failUnlessEqual(drrm.render_rate(None, None), "")
515 self.failUnlessEqual(drrm.render_rate(None, 2500000), "2.50MBps")
516 self.failUnlessEqual(drrm.render_rate(None, 30100), "30.1kBps")
517 self.failUnlessEqual(drrm.render_rate(None, 123), "123Bps")
519 urrm = status.UploadResultsRendererMixin()
520 self.failUnlessEqual(urrm.render_time(None, None), "")
521 self.failUnlessEqual(urrm.render_time(None, 2.5), "2.50s")
522 self.failUnlessEqual(urrm.render_time(None, 0.25), "250ms")
523 self.failUnlessEqual(urrm.render_time(None, 0.0021), "2.1ms")
524 self.failUnlessEqual(urrm.render_time(None, 0.000123), "123us")
525 self.failUnlessEqual(urrm.render_rate(None, None), "")
526 self.failUnlessEqual(urrm.render_rate(None, 2500000), "2.50MBps")
527 self.failUnlessEqual(urrm.render_rate(None, 30100), "30.1kBps")
528 self.failUnlessEqual(urrm.render_rate(None, 123), "123Bps")
530 def test_GET_FILEURL(self):
531 d = self.GET(self.public_url + "/foo/bar.txt")
532 d.addCallback(self.failUnlessIsBarDotTxt)
535 def test_GET_FILEURL_range(self):
536 headers = {"range": "bytes=1-10"}
537 d = self.GET(self.public_url + "/foo/bar.txt", headers=headers,
538 return_response=True)
539 def _got((res, status, headers)):
540 self.failUnlessEqual(int(status), 206)
541 self.failUnless(headers.has_key("content-range"))
542 self.failUnlessEqual(headers["content-range"][0],
543 "bytes 1-10/%d" % len(self.BAR_CONTENTS))
544 self.failUnlessEqual(res, self.BAR_CONTENTS[1:11])
548 def test_GET_FILEURL_partial_range(self):
549 headers = {"range": "bytes=5-"}
550 length = len(self.BAR_CONTENTS)
551 d = self.GET(self.public_url + "/foo/bar.txt", headers=headers,
552 return_response=True)
553 def _got((res, status, headers)):
554 self.failUnlessEqual(int(status), 206)
555 self.failUnless(headers.has_key("content-range"))
556 self.failUnlessEqual(headers["content-range"][0],
557 "bytes 5-%d/%d" % (length-1, length))
558 self.failUnlessEqual(res, self.BAR_CONTENTS[5:])
562 def test_HEAD_FILEURL_range(self):
563 headers = {"range": "bytes=1-10"}
564 d = self.HEAD(self.public_url + "/foo/bar.txt", headers=headers,
565 return_response=True)
566 def _got((res, status, headers)):
567 self.failUnlessEqual(res, "")
568 self.failUnlessEqual(int(status), 206)
569 self.failUnless(headers.has_key("content-range"))
570 self.failUnlessEqual(headers["content-range"][0],
571 "bytes 1-10/%d" % len(self.BAR_CONTENTS))
575 def test_HEAD_FILEURL_partial_range(self):
576 headers = {"range": "bytes=5-"}
577 length = len(self.BAR_CONTENTS)
578 d = self.HEAD(self.public_url + "/foo/bar.txt", headers=headers,
579 return_response=True)
580 def _got((res, status, headers)):
581 self.failUnlessEqual(int(status), 206)
582 self.failUnless(headers.has_key("content-range"))
583 self.failUnlessEqual(headers["content-range"][0],
584 "bytes 5-%d/%d" % (length-1, length))
588 def test_GET_FILEURL_range_bad(self):
589 headers = {"range": "BOGUS=fizbop-quarnak"}
590 d = self.shouldFail2(error.Error, "test_GET_FILEURL_range_bad",
592 "Syntactically invalid http range header",
593 self.GET, self.public_url + "/foo/bar.txt",
597 def test_HEAD_FILEURL(self):
598 d = self.HEAD(self.public_url + "/foo/bar.txt", return_response=True)
599 def _got((res, status, headers)):
600 self.failUnlessEqual(res, "")
601 self.failUnlessEqual(headers["content-length"][0],
602 str(len(self.BAR_CONTENTS)))
603 self.failUnlessEqual(headers["content-type"], ["text/plain"])
607 def test_GET_FILEURL_named(self):
608 base = "/file/%s" % urllib.quote(self._bar_txt_uri)
609 base2 = "/named/%s" % urllib.quote(self._bar_txt_uri)
610 d = self.GET(base + "/@@name=/blah.txt")
611 d.addCallback(self.failUnlessIsBarDotTxt)
612 d.addCallback(lambda res: self.GET(base + "/blah.txt"))
613 d.addCallback(self.failUnlessIsBarDotTxt)
614 d.addCallback(lambda res: self.GET(base + "/ignore/lots/blah.txt"))
615 d.addCallback(self.failUnlessIsBarDotTxt)
616 d.addCallback(lambda res: self.GET(base2 + "/@@name=/blah.txt"))
617 d.addCallback(self.failUnlessIsBarDotTxt)
618 save_url = base + "?save=true&filename=blah.txt"
619 d.addCallback(lambda res: self.GET(save_url))
620 d.addCallback(self.failUnlessIsBarDotTxt) # TODO: check headers
621 u_filename = u"n\u00e9wer.txt" # n e-acute w e r . t x t
622 u_fn_e = urllib.quote(u_filename.encode("utf-8"))
623 u_url = base + "?save=true&filename=" + u_fn_e
624 d.addCallback(lambda res: self.GET(u_url))
625 d.addCallback(self.failUnlessIsBarDotTxt) # TODO: check headers
628 def test_PUT_FILEURL_named_bad(self):
629 base = "/file/%s" % urllib.quote(self._bar_txt_uri)
630 d = self.shouldFail2(error.Error, "test_PUT_FILEURL_named_bad",
632 "/file can only be used with GET or HEAD",
633 self.PUT, base + "/@@name=/blah.txt", "")
636 def test_GET_DIRURL_named_bad(self):
637 base = "/file/%s" % urllib.quote(self._foo_uri)
638 d = self.shouldFail2(error.Error, "test_PUT_DIRURL_named_bad",
641 self.GET, base + "/@@name=/blah.txt")
644 def test_GET_slash_file_bad(self):
645 d = self.shouldFail2(error.Error, "test_GET_slash_file_bad",
647 "/file must be followed by a file-cap and a name",
651 def test_GET_unhandled_URI_named(self):
652 contents, n, newuri = self.makefile(12)
653 verifier_cap = n.get_verify_cap().to_string()
654 base = "/file/%s" % urllib.quote(verifier_cap)
655 # client.create_node_from_uri() can't handle verify-caps
656 d = self.shouldFail2(error.Error, "GET_unhandled_URI_named",
657 "400 Bad Request", "is not a file-cap",
661 def test_GET_unhandled_URI(self):
662 contents, n, newuri = self.makefile(12)
663 verifier_cap = n.get_verify_cap().to_string()
664 base = "/uri/%s" % urllib.quote(verifier_cap)
665 # client.create_node_from_uri() can't handle verify-caps
666 d = self.shouldFail2(error.Error, "test_GET_unhandled_URI",
668 "GET unknown URI type: can only do t=info",
672 def test_GET_FILE_URI(self):
673 base = "/uri/%s" % urllib.quote(self._bar_txt_uri)
675 d.addCallback(self.failUnlessIsBarDotTxt)
678 def test_GET_FILE_URI_badchild(self):
679 base = "/uri/%s/boguschild" % urllib.quote(self._bar_txt_uri)
680 errmsg = "Files have no children, certainly not named 'boguschild'"
681 d = self.shouldFail2(error.Error, "test_GET_FILE_URI_badchild",
682 "400 Bad Request", errmsg,
686 def test_PUT_FILE_URI_badchild(self):
687 base = "/uri/%s/boguschild" % urllib.quote(self._bar_txt_uri)
688 errmsg = "Cannot create directory 'boguschild', because its parent is a file, not a directory"
689 d = self.shouldFail2(error.Error, "test_GET_FILE_URI_badchild",
690 "400 Bad Request", errmsg,
694 def test_GET_FILEURL_save(self):
695 d = self.GET(self.public_url + "/foo/bar.txt?filename=bar.txt&save=true")
696 # TODO: look at the headers, expect a Content-Disposition: attachment
698 d.addCallback(self.failUnlessIsBarDotTxt)
701 def test_GET_FILEURL_missing(self):
702 d = self.GET(self.public_url + "/foo/missing")
703 d.addBoth(self.should404, "test_GET_FILEURL_missing")
706 def test_PUT_overwrite_only_files(self):
707 # create a directory, put a file in that directory.
708 contents, n, filecap = self.makefile(8)
709 d = self.PUT(self.public_url + "/foo/dir?t=mkdir", "")
710 d.addCallback(lambda res:
711 self.PUT(self.public_url + "/foo/dir/file1.txt",
712 self.NEWFILE_CONTENTS))
713 # try to overwrite the file with replace=only-files
715 d.addCallback(lambda res:
716 self.PUT(self.public_url + "/foo/dir/file1.txt?t=uri&replace=only-files",
718 d.addCallback(lambda res:
719 self.shouldFail2(error.Error, "PUT_bad_t", "409 Conflict",
720 "There was already a child by that name, and you asked me "
722 self.PUT, self.public_url + "/foo/dir?t=uri&replace=only-files",
726 def test_PUT_NEWFILEURL(self):
727 d = self.PUT(self.public_url + "/foo/new.txt", self.NEWFILE_CONTENTS)
728 # TODO: we lose the response code, so we can't check this
729 #self.failUnlessEqual(responsecode, 201)
730 d.addCallback(self.failUnlessURIMatchesChild, self._foo_node, u"new.txt")
731 d.addCallback(lambda res:
732 self.failUnlessChildContentsAre(self._foo_node, u"new.txt",
733 self.NEWFILE_CONTENTS))
736 def test_PUT_NEWFILEURL_not_mutable(self):
737 d = self.PUT(self.public_url + "/foo/new.txt?mutable=false",
738 self.NEWFILE_CONTENTS)
739 # TODO: we lose the response code, so we can't check this
740 #self.failUnlessEqual(responsecode, 201)
741 d.addCallback(self.failUnlessURIMatchesChild, self._foo_node, u"new.txt")
742 d.addCallback(lambda res:
743 self.failUnlessChildContentsAre(self._foo_node, u"new.txt",
744 self.NEWFILE_CONTENTS))
747 def test_PUT_NEWFILEURL_range_bad(self):
748 headers = {"content-range": "bytes 1-10/%d" % len(self.NEWFILE_CONTENTS)}
749 target = self.public_url + "/foo/new.txt"
750 d = self.shouldFail2(error.Error, "test_PUT_NEWFILEURL_range_bad",
751 "501 Not Implemented",
752 "Content-Range in PUT not yet supported",
753 # (and certainly not for immutable files)
754 self.PUT, target, self.NEWFILE_CONTENTS[1:11],
756 d.addCallback(lambda res:
757 self.failIfNodeHasChild(self._foo_node, u"new.txt"))
760 def test_PUT_NEWFILEURL_mutable(self):
761 d = self.PUT(self.public_url + "/foo/new.txt?mutable=true",
762 self.NEWFILE_CONTENTS)
763 # TODO: we lose the response code, so we can't check this
764 #self.failUnlessEqual(responsecode, 201)
766 u = uri.from_string_mutable_filenode(res)
767 self.failUnless(u.is_mutable())
768 self.failIf(u.is_readonly())
770 d.addCallback(_check_uri)
771 d.addCallback(self.failUnlessURIMatchesChild, self._foo_node, u"new.txt")
772 d.addCallback(lambda res:
773 self.failUnlessMutableChildContentsAre(self._foo_node,
775 self.NEWFILE_CONTENTS))
778 def test_PUT_NEWFILEURL_mutable_toobig(self):
779 d = self.shouldFail2(error.Error, "test_PUT_NEWFILEURL_mutable_toobig",
780 "413 Request Entity Too Large",
781 "SDMF is limited to one segment, and 10001 > 10000",
783 self.public_url + "/foo/new.txt?mutable=true",
784 "b" * (self.s.MUTABLE_SIZELIMIT+1))
787 def test_PUT_NEWFILEURL_replace(self):
788 d = self.PUT(self.public_url + "/foo/bar.txt", self.NEWFILE_CONTENTS)
789 # TODO: we lose the response code, so we can't check this
790 #self.failUnlessEqual(responsecode, 200)
791 d.addCallback(self.failUnlessURIMatchesChild, self._foo_node, u"bar.txt")
792 d.addCallback(lambda res:
793 self.failUnlessChildContentsAre(self._foo_node, u"bar.txt",
794 self.NEWFILE_CONTENTS))
797 def test_PUT_NEWFILEURL_bad_t(self):
798 d = self.shouldFail2(error.Error, "PUT_bad_t", "400 Bad Request",
799 "PUT to a file: bad t=bogus",
800 self.PUT, self.public_url + "/foo/bar.txt?t=bogus",
804 def test_PUT_NEWFILEURL_no_replace(self):
805 d = self.PUT(self.public_url + "/foo/bar.txt?replace=false",
806 self.NEWFILE_CONTENTS)
807 d.addBoth(self.shouldFail, error.Error, "PUT_NEWFILEURL_no_replace",
809 "There was already a child by that name, and you asked me "
813 def test_PUT_NEWFILEURL_mkdirs(self):
814 d = self.PUT(self.public_url + "/foo/newdir/new.txt", self.NEWFILE_CONTENTS)
816 d.addCallback(self.failUnlessURIMatchesChild, fn, u"newdir/new.txt")
817 d.addCallback(lambda res: self.failIfNodeHasChild(fn, u"new.txt"))
818 d.addCallback(lambda res: self.failUnlessNodeHasChild(fn, u"newdir"))
819 d.addCallback(lambda res:
820 self.failUnlessChildContentsAre(fn, u"newdir/new.txt",
821 self.NEWFILE_CONTENTS))
824 def test_PUT_NEWFILEURL_blocked(self):
825 d = self.PUT(self.public_url + "/foo/blockingfile/new.txt",
826 self.NEWFILE_CONTENTS)
827 d.addBoth(self.shouldFail, error.Error, "PUT_NEWFILEURL_blocked",
829 "Unable to create directory 'blockingfile': a file was in the way")
832 def test_DELETE_FILEURL(self):
833 d = self.DELETE(self.public_url + "/foo/bar.txt")
834 d.addCallback(lambda res:
835 self.failIfNodeHasChild(self._foo_node, u"bar.txt"))
838 def test_DELETE_FILEURL_missing(self):
839 d = self.DELETE(self.public_url + "/foo/missing")
840 d.addBoth(self.should404, "test_DELETE_FILEURL_missing")
843 def test_DELETE_FILEURL_missing2(self):
844 d = self.DELETE(self.public_url + "/missing/missing")
845 d.addBoth(self.should404, "test_DELETE_FILEURL_missing2")
848 def test_GET_FILEURL_json(self):
849 # twisted.web.http.parse_qs ignores any query args without an '=', so
850 # I can't do "GET /path?json", I have to do "GET /path/t=json"
851 # instead. This may make it tricky to emulate the S3 interface
853 d = self.GET(self.public_url + "/foo/bar.txt?t=json")
854 d.addCallback(self.failUnlessIsBarJSON)
857 def test_GET_FILEURL_json_missing(self):
858 d = self.GET(self.public_url + "/foo/missing?json")
859 d.addBoth(self.should404, "test_GET_FILEURL_json_missing")
862 def test_GET_FILEURL_uri(self):
863 d = self.GET(self.public_url + "/foo/bar.txt?t=uri")
865 self.failUnlessEqual(res, self._bar_txt_uri)
866 d.addCallback(_check)
867 d.addCallback(lambda res:
868 self.GET(self.public_url + "/foo/bar.txt?t=readonly-uri"))
870 # for now, for files, uris and readonly-uris are the same
871 self.failUnlessEqual(res, self._bar_txt_uri)
872 d.addCallback(_check2)
875 def test_GET_FILEURL_badtype(self):
876 d = self.shouldHTTPError("GET t=bogus", 400, "Bad Request",
879 self.public_url + "/foo/bar.txt?t=bogus")
882 def test_GET_FILEURL_uri_missing(self):
883 d = self.GET(self.public_url + "/foo/missing?t=uri")
884 d.addBoth(self.should404, "test_GET_FILEURL_uri_missing")
887 def test_GET_DIRURL(self):
888 # the addSlash means we get a redirect here
889 # from /uri/$URI/foo/ , we need ../../../ to get back to the root
891 d = self.GET(self.public_url + "/foo", followRedirect=True)
893 self.failUnless(('<a href="%s">Return to Welcome page' % ROOT)
895 # the FILE reference points to a URI, but it should end in bar.txt
896 bar_url = ("%s/file/%s/@@named=/bar.txt" %
897 (ROOT, urllib.quote(self._bar_txt_uri)))
898 get_bar = "".join([r'<td>FILE</td>',
900 r'<a href="%s">bar.txt</a>' % bar_url,
902 r'\s+<td>%d</td>' % len(self.BAR_CONTENTS),
904 self.failUnless(re.search(get_bar, res), res)
905 for line in res.split("\n"):
906 # find the line that contains the delete button for bar.txt
907 if ("form action" in line and
908 'value="delete"' in line and
909 'value="bar.txt"' in line):
910 # the form target should use a relative URL
911 foo_url = urllib.quote("%s/uri/%s/" % (ROOT, self._foo_uri))
912 self.failUnless(('action="%s"' % foo_url) in line, line)
913 # and the when_done= should too
914 #done_url = urllib.quote(???)
915 #self.failUnless(('name="when_done" value="%s"' % done_url)
919 self.fail("unable to find delete-bar.txt line", res)
921 # the DIR reference just points to a URI
922 sub_url = ("%s/uri/%s/" % (ROOT, urllib.quote(self._sub_uri)))
923 get_sub = ((r'<td>DIR</td>')
924 +r'\s+<td><a href="%s">sub</a></td>' % sub_url)
925 self.failUnless(re.search(get_sub, res), res)
926 d.addCallback(_check)
928 # look at a directory which is readonly
929 d.addCallback(lambda res:
930 self.GET(self.public_url + "/reedownlee", followRedirect=True))
932 self.failUnless("(read-only)" in res, res)
933 self.failIf("Upload a file" in res, res)
934 d.addCallback(_check2)
936 # and at a directory that contains a readonly directory
937 d.addCallback(lambda res:
938 self.GET(self.public_url, followRedirect=True))
940 self.failUnless(re.search('<td>DIR-RO</td>'
941 r'\s+<td><a href="[\.\/]+/uri/URI%3ADIR2-RO%3A[^"]+">reedownlee</a></td>', res), res)
942 d.addCallback(_check3)
944 # and an empty directory
945 d.addCallback(lambda res: self.GET(self.public_url + "/foo/empty/"))
947 self.failUnless("directory is empty" in res, res)
948 MKDIR_BUTTON_RE=re.compile('<input type="hidden" name="t" value="mkdir" />.*<legend class="freeform-form-label">Create a new directory in this directory</legend>.*<input type="submit" value="Create" />', re.I)
949 self.failUnless(MKDIR_BUTTON_RE.search(res), res)
950 d.addCallback(_check4)
954 def test_GET_DIRURL_badtype(self):
955 d = self.shouldHTTPError("test_GET_DIRURL_badtype",
959 self.public_url + "/foo?t=bogus")
962 def test_GET_DIRURL_json(self):
963 d = self.GET(self.public_url + "/foo?t=json")
964 d.addCallback(self.failUnlessIsFooJSON)
968 def test_POST_DIRURL_manifest_no_ophandle(self):
969 d = self.shouldFail2(error.Error,
970 "test_POST_DIRURL_manifest_no_ophandle",
972 "slow operation requires ophandle=",
973 self.POST, self.public_url, t="start-manifest")
976 def test_POST_DIRURL_manifest(self):
977 d = defer.succeed(None)
978 def getman(ignored, output):
979 d = self.POST(self.public_url + "/foo/?t=start-manifest&ophandle=125",
981 d.addCallback(self.wait_for_operation, "125")
982 d.addCallback(self.get_operation_results, "125", output)
984 d.addCallback(getman, None)
985 def _got_html(manifest):
986 self.failUnless("Manifest of SI=" in manifest)
987 self.failUnless("<td>sub</td>" in manifest)
988 self.failUnless(self._sub_uri in manifest)
989 self.failUnless("<td>sub/baz.txt</td>" in manifest)
990 d.addCallback(_got_html)
992 # both t=status and unadorned GET should be identical
993 d.addCallback(lambda res: self.GET("/operations/125"))
994 d.addCallback(_got_html)
996 d.addCallback(getman, "html")
997 d.addCallback(_got_html)
998 d.addCallback(getman, "text")
999 def _got_text(manifest):
1000 self.failUnless("\nsub " + self._sub_uri + "\n" in manifest)
1001 self.failUnless("\nsub/baz.txt URI:CHK:" in manifest)
1002 d.addCallback(_got_text)
1003 d.addCallback(getman, "JSON")
1005 data = res["manifest"]
1007 for (path_list, cap) in data:
1008 got[tuple(path_list)] = cap
1009 self.failUnlessEqual(got[(u"sub",)], self._sub_uri)
1010 self.failUnless((u"sub",u"baz.txt") in got)
1011 self.failUnless("finished" in res)
1012 self.failUnless("origin" in res)
1013 self.failUnless("storage-index" in res)
1014 self.failUnless("verifycaps" in res)
1015 self.failUnless("stats" in res)
1016 d.addCallback(_got_json)
1019 def test_POST_DIRURL_deepsize_no_ophandle(self):
1020 d = self.shouldFail2(error.Error,
1021 "test_POST_DIRURL_deepsize_no_ophandle",
1023 "slow operation requires ophandle=",
1024 self.POST, self.public_url, t="start-deep-size")
1027 def test_POST_DIRURL_deepsize(self):
1028 d = self.POST(self.public_url + "/foo/?t=start-deep-size&ophandle=126",
1029 followRedirect=True)
1030 d.addCallback(self.wait_for_operation, "126")
1031 d.addCallback(self.get_operation_results, "126", "json")
1032 def _got_json(data):
1033 self.failUnlessEqual(data["finished"], True)
1035 self.failUnless(size > 1000)
1036 d.addCallback(_got_json)
1037 d.addCallback(self.get_operation_results, "126", "text")
1039 mo = re.search(r'^size: (\d+)$', res, re.M)
1040 self.failUnless(mo, res)
1041 size = int(mo.group(1))
1042 # with directories, the size varies.
1043 self.failUnless(size > 1000)
1044 d.addCallback(_got_text)
1047 def test_POST_DIRURL_deepstats_no_ophandle(self):
1048 d = self.shouldFail2(error.Error,
1049 "test_POST_DIRURL_deepstats_no_ophandle",
1051 "slow operation requires ophandle=",
1052 self.POST, self.public_url, t="start-deep-stats")
1055 def test_POST_DIRURL_deepstats(self):
1056 d = self.POST(self.public_url + "/foo/?t=start-deep-stats&ophandle=127",
1057 followRedirect=True)
1058 d.addCallback(self.wait_for_operation, "127")
1059 d.addCallback(self.get_operation_results, "127", "json")
1060 def _got_json(stats):
1061 expected = {"count-immutable-files": 3,
1062 "count-mutable-files": 0,
1063 "count-literal-files": 0,
1065 "count-directories": 3,
1066 "size-immutable-files": 57,
1067 "size-literal-files": 0,
1068 #"size-directories": 1912, # varies
1069 #"largest-directory": 1590,
1070 "largest-directory-children": 5,
1071 "largest-immutable-file": 19,
1073 for k,v in expected.iteritems():
1074 self.failUnlessEqual(stats[k], v,
1075 "stats[%s] was %s, not %s" %
1077 self.failUnlessEqual(stats["size-files-histogram"],
1079 d.addCallback(_got_json)
1082 def test_POST_DIRURL_stream_manifest(self):
1083 d = self.POST(self.public_url + "/foo/?t=stream-manifest")
1085 self.failUnless(res.endswith("\n"))
1086 units = [simplejson.loads(t) for t in res[:-1].split("\n")]
1087 self.failUnlessEqual(len(units), 7)
1088 self.failUnlessEqual(units[-1]["type"], "stats")
1090 self.failUnlessEqual(first["path"], [])
1091 self.failUnlessEqual(first["cap"], self._foo_uri)
1092 self.failUnlessEqual(first["type"], "directory")
1093 baz = [u for u in units[:-1] if u["cap"] == self._baz_file_uri][0]
1094 self.failUnlessEqual(baz["path"], ["sub", "baz.txt"])
1095 self.failIfEqual(baz["storage-index"], None)
1096 self.failIfEqual(baz["verifycap"], None)
1097 self.failIfEqual(baz["repaircap"], None)
1099 d.addCallback(_check)
1102 def test_GET_DIRURL_uri(self):
1103 d = self.GET(self.public_url + "/foo?t=uri")
1105 self.failUnlessEqual(res, self._foo_uri)
1106 d.addCallback(_check)
1109 def test_GET_DIRURL_readonly_uri(self):
1110 d = self.GET(self.public_url + "/foo?t=readonly-uri")
1112 self.failUnlessEqual(res, self._foo_readonly_uri)
1113 d.addCallback(_check)
1116 def test_PUT_NEWDIRURL(self):
1117 d = self.PUT(self.public_url + "/foo/newdir?t=mkdir", "")
1118 d.addCallback(lambda res:
1119 self.failUnlessNodeHasChild(self._foo_node, u"newdir"))
1120 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
1121 d.addCallback(self.failUnlessNodeKeysAre, [])
1124 def test_PUT_NEWDIRURL_exists(self):
1125 d = self.PUT(self.public_url + "/foo/sub?t=mkdir", "")
1126 d.addCallback(lambda res:
1127 self.failUnlessNodeHasChild(self._foo_node, u"sub"))
1128 d.addCallback(lambda res: self._foo_node.get(u"sub"))
1129 d.addCallback(self.failUnlessNodeKeysAre, [u"baz.txt"])
1132 def test_PUT_NEWDIRURL_blocked(self):
1133 d = self.shouldFail2(error.Error, "PUT_NEWDIRURL_blocked",
1134 "409 Conflict", "Unable to create directory 'bar.txt': a file was in the way",
1136 self.public_url + "/foo/bar.txt/sub?t=mkdir", "")
1137 d.addCallback(lambda res:
1138 self.failUnlessNodeHasChild(self._foo_node, u"sub"))
1139 d.addCallback(lambda res: self._foo_node.get(u"sub"))
1140 d.addCallback(self.failUnlessNodeKeysAre, [u"baz.txt"])
1143 def test_PUT_NEWDIRURL_mkdir_p(self):
1144 d = defer.succeed(None)
1145 d.addCallback(lambda res: self.POST(self.public_url + "/foo", t='mkdir', name='mkp'))
1146 d.addCallback(lambda res: self.failUnlessNodeHasChild(self._foo_node, u"mkp"))
1147 d.addCallback(lambda res: self._foo_node.get(u"mkp"))
1148 def mkdir_p(mkpnode):
1149 url = '/uri/%s?t=mkdir-p&path=/sub1/sub2' % urllib.quote(mkpnode.get_uri())
1151 def made_subsub(ssuri):
1152 d = self._foo_node.get_child_at_path(u"mkp/sub1/sub2")
1153 d.addCallback(lambda ssnode: self.failUnlessEqual(ssnode.get_uri(), ssuri))
1155 d.addCallback(lambda uri2: self.failUnlessEqual(uri2, ssuri))
1157 d.addCallback(made_subsub)
1159 d.addCallback(mkdir_p)
1162 def test_PUT_NEWDIRURL_mkdirs(self):
1163 d = self.PUT(self.public_url + "/foo/subdir/newdir?t=mkdir", "")
1164 d.addCallback(lambda res:
1165 self.failIfNodeHasChild(self._foo_node, u"newdir"))
1166 d.addCallback(lambda res:
1167 self.failUnlessNodeHasChild(self._foo_node, u"subdir"))
1168 d.addCallback(lambda res:
1169 self._foo_node.get_child_at_path(u"subdir/newdir"))
1170 d.addCallback(self.failUnlessNodeKeysAre, [])
1173 def test_DELETE_DIRURL(self):
1174 d = self.DELETE(self.public_url + "/foo")
1175 d.addCallback(lambda res:
1176 self.failIfNodeHasChild(self.public_root, u"foo"))
1179 def test_DELETE_DIRURL_missing(self):
1180 d = self.DELETE(self.public_url + "/foo/missing")
1181 d.addBoth(self.should404, "test_DELETE_DIRURL_missing")
1182 d.addCallback(lambda res:
1183 self.failUnlessNodeHasChild(self.public_root, u"foo"))
1186 def test_DELETE_DIRURL_missing2(self):
1187 d = self.DELETE(self.public_url + "/missing")
1188 d.addBoth(self.should404, "test_DELETE_DIRURL_missing2")
1191 def dump_root(self):
1193 w = webish.DirnodeWalkerMixin()
1194 def visitor(childpath, childnode, metadata):
1196 d = w.walk(self.public_root, visitor)
1199 def failUnlessNodeKeysAre(self, node, expected_keys):
1200 for k in expected_keys:
1201 assert isinstance(k, unicode)
1203 def _check(children):
1204 self.failUnlessEqual(sorted(children.keys()), sorted(expected_keys))
1205 d.addCallback(_check)
1207 def failUnlessNodeHasChild(self, node, name):
1208 assert isinstance(name, unicode)
1210 def _check(children):
1211 self.failUnless(name in children)
1212 d.addCallback(_check)
1214 def failIfNodeHasChild(self, node, name):
1215 assert isinstance(name, unicode)
1217 def _check(children):
1218 self.failIf(name in children)
1219 d.addCallback(_check)
1222 def failUnlessChildContentsAre(self, node, name, expected_contents):
1223 assert isinstance(name, unicode)
1224 d = node.get_child_at_path(name)
1225 d.addCallback(lambda node: node.download_to_data())
1226 def _check(contents):
1227 self.failUnlessEqual(contents, expected_contents)
1228 d.addCallback(_check)
1231 def failUnlessMutableChildContentsAre(self, node, name, expected_contents):
1232 assert isinstance(name, unicode)
1233 d = node.get_child_at_path(name)
1234 d.addCallback(lambda node: node.download_best_version())
1235 def _check(contents):
1236 self.failUnlessEqual(contents, expected_contents)
1237 d.addCallback(_check)
1240 def failUnlessChildURIIs(self, node, name, expected_uri):
1241 assert isinstance(name, unicode)
1242 d = node.get_child_at_path(name)
1244 self.failUnlessEqual(child.get_uri(), expected_uri.strip())
1245 d.addCallback(_check)
1248 def failUnlessURIMatchesChild(self, got_uri, node, name):
1249 assert isinstance(name, unicode)
1250 d = node.get_child_at_path(name)
1252 self.failUnlessEqual(got_uri.strip(), child.get_uri())
1253 d.addCallback(_check)
1256 def failUnlessCHKURIHasContents(self, got_uri, contents):
1257 self.failUnless(FakeCHKFileNode.all_contents[got_uri] == contents)
1259 def test_POST_upload(self):
1260 d = self.POST(self.public_url + "/foo", t="upload",
1261 file=("new.txt", self.NEWFILE_CONTENTS))
1263 d.addCallback(self.failUnlessURIMatchesChild, fn, u"new.txt")
1264 d.addCallback(lambda res:
1265 self.failUnlessChildContentsAre(fn, u"new.txt",
1266 self.NEWFILE_CONTENTS))
1269 def test_POST_upload_unicode(self):
1270 filename = u"n\u00e9wer.txt" # n e-acute w e r . t x t
1271 d = self.POST(self.public_url + "/foo", t="upload",
1272 file=(filename, self.NEWFILE_CONTENTS))
1274 d.addCallback(self.failUnlessURIMatchesChild, fn, filename)
1275 d.addCallback(lambda res:
1276 self.failUnlessChildContentsAre(fn, filename,
1277 self.NEWFILE_CONTENTS))
1278 target_url = self.public_url + "/foo/" + filename.encode("utf-8")
1279 d.addCallback(lambda res: self.GET(target_url))
1280 d.addCallback(lambda contents: self.failUnlessEqual(contents,
1281 self.NEWFILE_CONTENTS,
1285 def test_POST_upload_unicode_named(self):
1286 filename = u"n\u00e9wer.txt" # n e-acute w e r . t x t
1287 d = self.POST(self.public_url + "/foo", t="upload",
1289 file=("overridden", self.NEWFILE_CONTENTS))
1291 d.addCallback(self.failUnlessURIMatchesChild, fn, filename)
1292 d.addCallback(lambda res:
1293 self.failUnlessChildContentsAre(fn, filename,
1294 self.NEWFILE_CONTENTS))
1295 target_url = self.public_url + "/foo/" + filename.encode("utf-8")
1296 d.addCallback(lambda res: self.GET(target_url))
1297 d.addCallback(lambda contents: self.failUnlessEqual(contents,
1298 self.NEWFILE_CONTENTS,
1302 def test_POST_upload_no_link(self):
1303 d = self.POST("/uri", t="upload",
1304 file=("new.txt", self.NEWFILE_CONTENTS))
1305 def _check_upload_results(page):
1306 # this should be a page which describes the results of the upload
1307 # that just finished.
1308 self.failUnless("Upload Results:" in page)
1309 self.failUnless("URI:" in page)
1310 uri_re = re.compile("URI: <tt><span>(.*)</span>")
1311 mo = uri_re.search(page)
1312 self.failUnless(mo, page)
1313 new_uri = mo.group(1)
1315 d.addCallback(_check_upload_results)
1316 d.addCallback(self.failUnlessCHKURIHasContents, self.NEWFILE_CONTENTS)
1319 def test_POST_upload_no_link_whendone(self):
1320 d = self.POST("/uri", t="upload", when_done="/",
1321 file=("new.txt", self.NEWFILE_CONTENTS))
1322 d.addBoth(self.shouldRedirect, "/")
1325 def shouldRedirect2(self, which, checker, callable, *args, **kwargs):
1326 d = defer.maybeDeferred(callable, *args, **kwargs)
1328 if isinstance(res, failure.Failure):
1329 res.trap(error.PageRedirect)
1330 statuscode = res.value.status
1331 target = res.value.location
1332 return checker(statuscode, target)
1333 self.fail("%s: callable was supposed to redirect, not return '%s'"
1338 def test_POST_upload_no_link_whendone_results(self):
1339 def check(statuscode, target):
1340 self.failUnlessEqual(statuscode, str(http.FOUND))
1341 self.failUnless(target.startswith(self.webish_url), target)
1342 return client.getPage(target, method="GET")
1343 d = self.shouldRedirect2("test_POST_upload_no_link_whendone_results",
1345 self.POST, "/uri", t="upload",
1346 when_done="/uri/%(uri)s",
1347 file=("new.txt", self.NEWFILE_CONTENTS))
1348 d.addCallback(lambda res:
1349 self.failUnlessEqual(res, self.NEWFILE_CONTENTS))
1352 def test_POST_upload_no_link_mutable(self):
1353 d = self.POST("/uri", t="upload", mutable="true",
1354 file=("new.txt", self.NEWFILE_CONTENTS))
1355 def _check(filecap):
1356 filecap = filecap.strip()
1357 self.failUnless(filecap.startswith("URI:SSK:"), filecap)
1358 self.filecap = filecap
1359 u = uri.WriteableSSKFileURI.init_from_string(filecap)
1360 self.failUnless(u.storage_index in FakeMutableFileNode.all_contents)
1361 n = self.s.create_node_from_uri(filecap)
1362 return n.download_best_version()
1363 d.addCallback(_check)
1365 self.failUnlessEqual(data, self.NEWFILE_CONTENTS)
1366 return self.GET("/uri/%s" % urllib.quote(self.filecap))
1367 d.addCallback(_check2)
1369 self.failUnlessEqual(data, self.NEWFILE_CONTENTS)
1370 return self.GET("/file/%s" % urllib.quote(self.filecap))
1371 d.addCallback(_check3)
1373 self.failUnlessEqual(data, self.NEWFILE_CONTENTS)
1374 d.addCallback(_check4)
1377 def test_POST_upload_no_link_mutable_toobig(self):
1378 d = self.shouldFail2(error.Error,
1379 "test_POST_upload_no_link_mutable_toobig",
1380 "413 Request Entity Too Large",
1381 "SDMF is limited to one segment, and 10001 > 10000",
1383 "/uri", t="upload", mutable="true",
1385 "b" * (self.s.MUTABLE_SIZELIMIT+1)) )
1388 def test_POST_upload_mutable(self):
1389 # this creates a mutable file
1390 d = self.POST(self.public_url + "/foo", t="upload", mutable="true",
1391 file=("new.txt", self.NEWFILE_CONTENTS))
1393 d.addCallback(self.failUnlessURIMatchesChild, fn, u"new.txt")
1394 d.addCallback(lambda res:
1395 self.failUnlessMutableChildContentsAre(fn, u"new.txt",
1396 self.NEWFILE_CONTENTS))
1397 d.addCallback(lambda res: self._foo_node.get(u"new.txt"))
1399 self.failUnless(IMutableFileNode.providedBy(newnode))
1400 self.failUnless(newnode.is_mutable())
1401 self.failIf(newnode.is_readonly())
1402 self._mutable_node = newnode
1403 self._mutable_uri = newnode.get_uri()
1406 # now upload it again and make sure that the URI doesn't change
1407 NEWER_CONTENTS = self.NEWFILE_CONTENTS + "newer\n"
1408 d.addCallback(lambda res:
1409 self.POST(self.public_url + "/foo", t="upload",
1411 file=("new.txt", NEWER_CONTENTS)))
1412 d.addCallback(self.failUnlessURIMatchesChild, fn, u"new.txt")
1413 d.addCallback(lambda res:
1414 self.failUnlessMutableChildContentsAre(fn, u"new.txt",
1416 d.addCallback(lambda res: self._foo_node.get(u"new.txt"))
1418 self.failUnless(IMutableFileNode.providedBy(newnode))
1419 self.failUnless(newnode.is_mutable())
1420 self.failIf(newnode.is_readonly())
1421 self.failUnlessEqual(self._mutable_uri, newnode.get_uri())
1422 d.addCallback(_got2)
1424 # upload a second time, using PUT instead of POST
1425 NEW2_CONTENTS = NEWER_CONTENTS + "overwrite with PUT\n"
1426 d.addCallback(lambda res:
1427 self.PUT(self.public_url + "/foo/new.txt", NEW2_CONTENTS))
1428 d.addCallback(self.failUnlessURIMatchesChild, fn, u"new.txt")
1429 d.addCallback(lambda res:
1430 self.failUnlessMutableChildContentsAre(fn, u"new.txt",
1433 # finally list the directory, since mutable files are displayed
1434 # slightly differently
1436 d.addCallback(lambda res:
1437 self.GET(self.public_url + "/foo/",
1438 followRedirect=True))
1439 def _check_page(res):
1440 # TODO: assert more about the contents
1441 self.failUnless("SSK" in res)
1443 d.addCallback(_check_page)
1445 d.addCallback(lambda res: self._foo_node.get(u"new.txt"))
1447 self.failUnless(IMutableFileNode.providedBy(newnode))
1448 self.failUnless(newnode.is_mutable())
1449 self.failIf(newnode.is_readonly())
1450 self.failUnlessEqual(self._mutable_uri, newnode.get_uri())
1451 d.addCallback(_got3)
1453 # look at the JSON form of the enclosing directory
1454 d.addCallback(lambda res:
1455 self.GET(self.public_url + "/foo/?t=json",
1456 followRedirect=True))
1457 def _check_page_json(res):
1458 parsed = simplejson.loads(res)
1459 self.failUnlessEqual(parsed[0], "dirnode")
1460 children = dict( [(unicode(name),value)
1462 in parsed[1]["children"].iteritems()] )
1463 self.failUnless("new.txt" in children)
1464 new_json = children["new.txt"]
1465 self.failUnlessEqual(new_json[0], "filenode")
1466 self.failUnless(new_json[1]["mutable"])
1467 self.failUnlessEqual(new_json[1]["rw_uri"], self._mutable_uri)
1468 ro_uri = unicode(self._mutable_node.get_readonly().to_string())
1469 self.failUnlessEqual(new_json[1]["ro_uri"], ro_uri)
1470 d.addCallback(_check_page_json)
1472 # and the JSON form of the file
1473 d.addCallback(lambda res:
1474 self.GET(self.public_url + "/foo/new.txt?t=json"))
1475 def _check_file_json(res):
1476 parsed = simplejson.loads(res)
1477 self.failUnlessEqual(parsed[0], "filenode")
1478 self.failUnless(parsed[1]["mutable"])
1479 self.failUnlessEqual(parsed[1]["rw_uri"], self._mutable_uri)
1480 ro_uri = unicode(self._mutable_node.get_readonly().to_string())
1481 self.failUnlessEqual(parsed[1]["ro_uri"], ro_uri)
1482 d.addCallback(_check_file_json)
1484 # and look at t=uri and t=readonly-uri
1485 d.addCallback(lambda res:
1486 self.GET(self.public_url + "/foo/new.txt?t=uri"))
1487 d.addCallback(lambda res: self.failUnlessEqual(res, self._mutable_uri))
1488 d.addCallback(lambda res:
1489 self.GET(self.public_url + "/foo/new.txt?t=readonly-uri"))
1490 def _check_ro_uri(res):
1491 ro_uri = unicode(self._mutable_node.get_readonly().to_string())
1492 self.failUnlessEqual(res, ro_uri)
1493 d.addCallback(_check_ro_uri)
1495 # make sure we can get to it from /uri/URI
1496 d.addCallback(lambda res:
1497 self.GET("/uri/%s" % urllib.quote(self._mutable_uri)))
1498 d.addCallback(lambda res:
1499 self.failUnlessEqual(res, NEW2_CONTENTS))
1501 # and that HEAD computes the size correctly
1502 d.addCallback(lambda res:
1503 self.HEAD(self.public_url + "/foo/new.txt",
1504 return_response=True))
1505 def _got_headers((res, status, headers)):
1506 self.failUnlessEqual(res, "")
1507 self.failUnlessEqual(headers["content-length"][0],
1508 str(len(NEW2_CONTENTS)))
1509 self.failUnlessEqual(headers["content-type"], ["text/plain"])
1510 d.addCallback(_got_headers)
1512 # make sure that size errors are displayed correctly for overwrite
1513 d.addCallback(lambda res:
1514 self.shouldFail2(error.Error,
1515 "test_POST_upload_mutable-toobig",
1516 "413 Request Entity Too Large",
1517 "SDMF is limited to one segment, and 10001 > 10000",
1519 self.public_url + "/foo", t="upload",
1522 "b" * (self.s.MUTABLE_SIZELIMIT+1)),
1525 d.addErrback(self.dump_error)
1528 def test_POST_upload_mutable_toobig(self):
1529 d = self.shouldFail2(error.Error,
1530 "test_POST_upload_mutable_toobig",
1531 "413 Request Entity Too Large",
1532 "SDMF is limited to one segment, and 10001 > 10000",
1534 self.public_url + "/foo",
1535 t="upload", mutable="true",
1537 "b" * (self.s.MUTABLE_SIZELIMIT+1)) )
1540 def dump_error(self, f):
1541 # if the web server returns an error code (like 400 Bad Request),
1542 # web.client.getPage puts the HTTP response body into the .response
1543 # attribute of the exception object that it gives back. It does not
1544 # appear in the Failure's repr(), so the ERROR that trial displays
1545 # will be rather terse and unhelpful. addErrback this method to the
1546 # end of your chain to get more information out of these errors.
1547 if f.check(error.Error):
1548 print "web.error.Error:"
1550 print f.value.response
1553 def test_POST_upload_replace(self):
1554 d = self.POST(self.public_url + "/foo", t="upload",
1555 file=("bar.txt", self.NEWFILE_CONTENTS))
1557 d.addCallback(self.failUnlessURIMatchesChild, fn, u"bar.txt")
1558 d.addCallback(lambda res:
1559 self.failUnlessChildContentsAre(fn, u"bar.txt",
1560 self.NEWFILE_CONTENTS))
1563 def test_POST_upload_no_replace_ok(self):
1564 d = self.POST(self.public_url + "/foo?replace=false", t="upload",
1565 file=("new.txt", self.NEWFILE_CONTENTS))
1566 d.addCallback(lambda res: self.GET(self.public_url + "/foo/new.txt"))
1567 d.addCallback(lambda res: self.failUnlessEqual(res,
1568 self.NEWFILE_CONTENTS))
1571 def test_POST_upload_no_replace_queryarg(self):
1572 d = self.POST(self.public_url + "/foo?replace=false", t="upload",
1573 file=("bar.txt", self.NEWFILE_CONTENTS))
1574 d.addBoth(self.shouldFail, error.Error,
1575 "POST_upload_no_replace_queryarg",
1577 "There was already a child by that name, and you asked me "
1578 "to not replace it")
1579 d.addCallback(lambda res: self.GET(self.public_url + "/foo/bar.txt"))
1580 d.addCallback(self.failUnlessIsBarDotTxt)
1583 def test_POST_upload_no_replace_field(self):
1584 d = self.POST(self.public_url + "/foo", t="upload", replace="false",
1585 file=("bar.txt", self.NEWFILE_CONTENTS))
1586 d.addBoth(self.shouldFail, error.Error, "POST_upload_no_replace_field",
1588 "There was already a child by that name, and you asked me "
1589 "to not replace it")
1590 d.addCallback(lambda res: self.GET(self.public_url + "/foo/bar.txt"))
1591 d.addCallback(self.failUnlessIsBarDotTxt)
1594 def test_POST_upload_whendone(self):
1595 d = self.POST(self.public_url + "/foo", t="upload", when_done="/THERE",
1596 file=("new.txt", self.NEWFILE_CONTENTS))
1597 d.addBoth(self.shouldRedirect, "/THERE")
1599 d.addCallback(lambda res:
1600 self.failUnlessChildContentsAre(fn, u"new.txt",
1601 self.NEWFILE_CONTENTS))
1604 def test_POST_upload_named(self):
1606 d = self.POST(self.public_url + "/foo", t="upload",
1607 name="new.txt", file=self.NEWFILE_CONTENTS)
1608 d.addCallback(self.failUnlessURIMatchesChild, fn, u"new.txt")
1609 d.addCallback(lambda res:
1610 self.failUnlessChildContentsAre(fn, u"new.txt",
1611 self.NEWFILE_CONTENTS))
1614 def test_POST_upload_named_badfilename(self):
1615 d = self.POST(self.public_url + "/foo", t="upload",
1616 name="slashes/are/bad.txt", file=self.NEWFILE_CONTENTS)
1617 d.addBoth(self.shouldFail, error.Error,
1618 "test_POST_upload_named_badfilename",
1620 "name= may not contain a slash",
1622 # make sure that nothing was added
1623 d.addCallback(lambda res:
1624 self.failUnlessNodeKeysAre(self._foo_node,
1625 [u"bar.txt", u"blockingfile",
1626 u"empty", u"n\u00fc.txt",
1630 def test_POST_FILEURL_check(self):
1631 bar_url = self.public_url + "/foo/bar.txt"
1632 d = self.POST(bar_url, t="check")
1634 self.failUnless("Healthy :" in res)
1635 d.addCallback(_check)
1636 redir_url = "http://allmydata.org/TARGET"
1637 def _check2(statuscode, target):
1638 self.failUnlessEqual(statuscode, str(http.FOUND))
1639 self.failUnlessEqual(target, redir_url)
1640 d.addCallback(lambda res:
1641 self.shouldRedirect2("test_POST_FILEURL_check",
1645 when_done=redir_url))
1646 d.addCallback(lambda res:
1647 self.POST(bar_url, t="check", return_to=redir_url))
1649 self.failUnless("Healthy :" in res)
1650 self.failUnless("Return to file" in res)
1651 self.failUnless(redir_url in res)
1652 d.addCallback(_check3)
1654 d.addCallback(lambda res:
1655 self.POST(bar_url, t="check", output="JSON"))
1656 def _check_json(res):
1657 data = simplejson.loads(res)
1658 self.failUnless("storage-index" in data)
1659 self.failUnless(data["results"]["healthy"])
1660 d.addCallback(_check_json)
1664 def test_POST_FILEURL_check_and_repair(self):
1665 bar_url = self.public_url + "/foo/bar.txt"
1666 d = self.POST(bar_url, t="check", repair="true")
1668 self.failUnless("Healthy :" in res)
1669 d.addCallback(_check)
1670 redir_url = "http://allmydata.org/TARGET"
1671 def _check2(statuscode, target):
1672 self.failUnlessEqual(statuscode, str(http.FOUND))
1673 self.failUnlessEqual(target, redir_url)
1674 d.addCallback(lambda res:
1675 self.shouldRedirect2("test_POST_FILEURL_check_and_repair",
1678 t="check", repair="true",
1679 when_done=redir_url))
1680 d.addCallback(lambda res:
1681 self.POST(bar_url, t="check", return_to=redir_url))
1683 self.failUnless("Healthy :" in res)
1684 self.failUnless("Return to file" in res)
1685 self.failUnless(redir_url in res)
1686 d.addCallback(_check3)
1689 def test_POST_DIRURL_check(self):
1690 foo_url = self.public_url + "/foo/"
1691 d = self.POST(foo_url, t="check")
1693 self.failUnless("Healthy :" in res, res)
1694 d.addCallback(_check)
1695 redir_url = "http://allmydata.org/TARGET"
1696 def _check2(statuscode, target):
1697 self.failUnlessEqual(statuscode, str(http.FOUND))
1698 self.failUnlessEqual(target, redir_url)
1699 d.addCallback(lambda res:
1700 self.shouldRedirect2("test_POST_DIRURL_check",
1704 when_done=redir_url))
1705 d.addCallback(lambda res:
1706 self.POST(foo_url, t="check", return_to=redir_url))
1708 self.failUnless("Healthy :" in res, res)
1709 self.failUnless("Return to file/directory" in res)
1710 self.failUnless(redir_url in res)
1711 d.addCallback(_check3)
1713 d.addCallback(lambda res:
1714 self.POST(foo_url, t="check", output="JSON"))
1715 def _check_json(res):
1716 data = simplejson.loads(res)
1717 self.failUnless("storage-index" in data)
1718 self.failUnless(data["results"]["healthy"])
1719 d.addCallback(_check_json)
1723 def test_POST_DIRURL_check_and_repair(self):
1724 foo_url = self.public_url + "/foo/"
1725 d = self.POST(foo_url, t="check", repair="true")
1727 self.failUnless("Healthy :" in res, res)
1728 d.addCallback(_check)
1729 redir_url = "http://allmydata.org/TARGET"
1730 def _check2(statuscode, target):
1731 self.failUnlessEqual(statuscode, str(http.FOUND))
1732 self.failUnlessEqual(target, redir_url)
1733 d.addCallback(lambda res:
1734 self.shouldRedirect2("test_POST_DIRURL_check_and_repair",
1737 t="check", repair="true",
1738 when_done=redir_url))
1739 d.addCallback(lambda res:
1740 self.POST(foo_url, t="check", return_to=redir_url))
1742 self.failUnless("Healthy :" in res)
1743 self.failUnless("Return to file/directory" in res)
1744 self.failUnless(redir_url in res)
1745 d.addCallback(_check3)
1748 def wait_for_operation(self, ignored, ophandle):
1749 url = "/operations/" + ophandle
1750 url += "?t=status&output=JSON"
1753 data = simplejson.loads(res)
1754 if not data["finished"]:
1755 d = self.stall(delay=1.0)
1756 d.addCallback(self.wait_for_operation, ophandle)
1762 def get_operation_results(self, ignored, ophandle, output=None):
1763 url = "/operations/" + ophandle
1766 url += "&output=" + output
1769 if output and output.lower() == "json":
1770 return simplejson.loads(res)
1775 def test_POST_DIRURL_deepcheck_no_ophandle(self):
1776 d = self.shouldFail2(error.Error,
1777 "test_POST_DIRURL_deepcheck_no_ophandle",
1779 "slow operation requires ophandle=",
1780 self.POST, self.public_url, t="start-deep-check")
1783 def test_POST_DIRURL_deepcheck(self):
1784 def _check_redirect(statuscode, target):
1785 self.failUnlessEqual(statuscode, str(http.FOUND))
1786 self.failUnless(target.endswith("/operations/123"))
1787 d = self.shouldRedirect2("test_POST_DIRURL_deepcheck", _check_redirect,
1788 self.POST, self.public_url,
1789 t="start-deep-check", ophandle="123")
1790 d.addCallback(self.wait_for_operation, "123")
1791 def _check_json(data):
1792 self.failUnlessEqual(data["finished"], True)
1793 self.failUnlessEqual(data["count-objects-checked"], 8)
1794 self.failUnlessEqual(data["count-objects-healthy"], 8)
1795 d.addCallback(_check_json)
1796 d.addCallback(self.get_operation_results, "123", "html")
1797 def _check_html(res):
1798 self.failUnless("Objects Checked: <span>8</span>" in res)
1799 self.failUnless("Objects Healthy: <span>8</span>" in res)
1800 d.addCallback(_check_html)
1802 d.addCallback(lambda res:
1803 self.GET("/operations/123/"))
1804 d.addCallback(_check_html) # should be the same as without the slash
1806 d.addCallback(lambda res:
1807 self.shouldFail2(error.Error, "one", "404 Not Found",
1808 "No detailed results for SI bogus",
1809 self.GET, "/operations/123/bogus"))
1811 foo_si = self._foo_node.get_storage_index()
1812 foo_si_s = base32.b2a(foo_si)
1813 d.addCallback(lambda res:
1814 self.GET("/operations/123/%s?output=JSON" % foo_si_s))
1815 def _check_foo_json(res):
1816 data = simplejson.loads(res)
1817 self.failUnlessEqual(data["storage-index"], foo_si_s)
1818 self.failUnless(data["results"]["healthy"])
1819 d.addCallback(_check_foo_json)
1822 def test_POST_DIRURL_deepcheck_and_repair(self):
1823 d = self.POST(self.public_url, t="start-deep-check", repair="true",
1824 ophandle="124", output="json", followRedirect=True)
1825 d.addCallback(self.wait_for_operation, "124")
1826 def _check_json(data):
1827 self.failUnlessEqual(data["finished"], True)
1828 self.failUnlessEqual(data["count-objects-checked"], 8)
1829 self.failUnlessEqual(data["count-objects-healthy-pre-repair"], 8)
1830 self.failUnlessEqual(data["count-objects-unhealthy-pre-repair"], 0)
1831 self.failUnlessEqual(data["count-corrupt-shares-pre-repair"], 0)
1832 self.failUnlessEqual(data["count-repairs-attempted"], 0)
1833 self.failUnlessEqual(data["count-repairs-successful"], 0)
1834 self.failUnlessEqual(data["count-repairs-unsuccessful"], 0)
1835 self.failUnlessEqual(data["count-objects-healthy-post-repair"], 8)
1836 self.failUnlessEqual(data["count-objects-unhealthy-post-repair"], 0)
1837 self.failUnlessEqual(data["count-corrupt-shares-post-repair"], 0)
1838 d.addCallback(_check_json)
1839 d.addCallback(self.get_operation_results, "124", "html")
1840 def _check_html(res):
1841 self.failUnless("Objects Checked: <span>8</span>" in res)
1843 self.failUnless("Objects Healthy (before repair): <span>8</span>" in res)
1844 self.failUnless("Objects Unhealthy (before repair): <span>0</span>" in res)
1845 self.failUnless("Corrupt Shares (before repair): <span>0</span>" in res)
1847 self.failUnless("Repairs Attempted: <span>0</span>" in res)
1848 self.failUnless("Repairs Successful: <span>0</span>" in res)
1849 self.failUnless("Repairs Unsuccessful: <span>0</span>" in res)
1851 self.failUnless("Objects Healthy (after repair): <span>8</span>" in res)
1852 self.failUnless("Objects Unhealthy (after repair): <span>0</span>" in res)
1853 self.failUnless("Corrupt Shares (after repair): <span>0</span>" in res)
1854 d.addCallback(_check_html)
1857 def test_POST_FILEURL_bad_t(self):
1858 d = self.shouldFail2(error.Error, "POST_bad_t", "400 Bad Request",
1859 "POST to file: bad t=bogus",
1860 self.POST, self.public_url + "/foo/bar.txt",
1864 def test_POST_mkdir(self): # return value?
1865 d = self.POST(self.public_url + "/foo", t="mkdir", name="newdir")
1866 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
1867 d.addCallback(self.failUnlessNodeKeysAre, [])
1870 def test_POST_mkdir_2(self):
1871 d = self.POST(self.public_url + "/foo/newdir?t=mkdir", "")
1872 d.addCallback(lambda res:
1873 self.failUnlessNodeHasChild(self._foo_node, u"newdir"))
1874 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
1875 d.addCallback(self.failUnlessNodeKeysAre, [])
1878 def test_POST_mkdirs_2(self):
1879 d = self.POST(self.public_url + "/foo/bardir/newdir?t=mkdir", "")
1880 d.addCallback(lambda res:
1881 self.failUnlessNodeHasChild(self._foo_node, u"bardir"))
1882 d.addCallback(lambda res: self._foo_node.get(u"bardir"))
1883 d.addCallback(lambda bardirnode: bardirnode.get(u"newdir"))
1884 d.addCallback(self.failUnlessNodeKeysAre, [])
1887 def test_POST_mkdir_no_parentdir_noredirect(self):
1888 d = self.POST("/uri?t=mkdir")
1889 def _after_mkdir(res):
1890 uri.DirectoryURI.init_from_string(res)
1891 d.addCallback(_after_mkdir)
1894 def test_POST_mkdir_no_parentdir_redirect(self):
1895 d = self.POST("/uri?t=mkdir&redirect_to_result=true")
1896 d.addBoth(self.shouldRedirect, None, statuscode='303')
1897 def _check_target(target):
1898 target = urllib.unquote(target)
1899 self.failUnless(target.startswith("uri/URI:DIR2:"), target)
1900 d.addCallback(_check_target)
1903 def test_POST_noparent_bad(self):
1904 d = self.shouldHTTPError("POST /uri?t=bogus", 400, "Bad Request",
1905 "/uri accepts only PUT, PUT?t=mkdir, "
1906 "POST?t=upload, and POST?t=mkdir",
1907 self.POST, "/uri?t=bogus")
1910 def test_welcome_page_mkdir_button(self):
1911 # Fetch the welcome page.
1913 def _after_get_welcome_page(res):
1914 MKDIR_BUTTON_RE=re.compile('<form action="([^"]*)" method="post".*?<input type="hidden" name="t" value="([^"]*)" /><input type="hidden" name="([^"]*)" value="([^"]*)" /><input type="submit" value="Create a directory" />', re.I)
1915 mo = MKDIR_BUTTON_RE.search(res)
1916 formaction = mo.group(1)
1918 formaname = mo.group(3)
1919 formavalue = mo.group(4)
1920 return (formaction, formt, formaname, formavalue)
1921 d.addCallback(_after_get_welcome_page)
1922 def _after_parse_form(res):
1923 (formaction, formt, formaname, formavalue) = res
1924 return self.POST("/%s?t=%s&%s=%s" % (formaction, formt, formaname, formavalue))
1925 d.addCallback(_after_parse_form)
1926 d.addBoth(self.shouldRedirect, None, statuscode='303')
1929 def test_POST_mkdir_replace(self): # return value?
1930 d = self.POST(self.public_url + "/foo", t="mkdir", name="sub")
1931 d.addCallback(lambda res: self._foo_node.get(u"sub"))
1932 d.addCallback(self.failUnlessNodeKeysAre, [])
1935 def test_POST_mkdir_no_replace_queryarg(self): # return value?
1936 d = self.POST(self.public_url + "/foo?replace=false", t="mkdir", name="sub")
1937 d.addBoth(self.shouldFail, error.Error,
1938 "POST_mkdir_no_replace_queryarg",
1940 "There was already a child by that name, and you asked me "
1941 "to not replace it")
1942 d.addCallback(lambda res: self._foo_node.get(u"sub"))
1943 d.addCallback(self.failUnlessNodeKeysAre, [u"baz.txt"])
1946 def test_POST_mkdir_no_replace_field(self): # return value?
1947 d = self.POST(self.public_url + "/foo", t="mkdir", name="sub",
1949 d.addBoth(self.shouldFail, error.Error, "POST_mkdir_no_replace_field",
1951 "There was already a child by that name, and you asked me "
1952 "to not replace it")
1953 d.addCallback(lambda res: self._foo_node.get(u"sub"))
1954 d.addCallback(self.failUnlessNodeKeysAre, [u"baz.txt"])
1957 def test_POST_mkdir_whendone_field(self):
1958 d = self.POST(self.public_url + "/foo",
1959 t="mkdir", name="newdir", when_done="/THERE")
1960 d.addBoth(self.shouldRedirect, "/THERE")
1961 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
1962 d.addCallback(self.failUnlessNodeKeysAre, [])
1965 def test_POST_mkdir_whendone_queryarg(self):
1966 d = self.POST(self.public_url + "/foo?when_done=/THERE",
1967 t="mkdir", name="newdir")
1968 d.addBoth(self.shouldRedirect, "/THERE")
1969 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
1970 d.addCallback(self.failUnlessNodeKeysAre, [])
1973 def test_POST_bad_t(self):
1974 d = self.shouldFail2(error.Error, "POST_bad_t", "400 Bad Request",
1975 "POST to a directory with bad t=BOGUS",
1976 self.POST, self.public_url + "/foo", t="BOGUS")
1979 def test_POST_set_children(self):
1980 contents9, n9, newuri9 = self.makefile(9)
1981 contents10, n10, newuri10 = self.makefile(10)
1982 contents11, n11, newuri11 = self.makefile(11)
1985 "atomic_added_1": [ "filenode", { "rw_uri": "%s",
1988 "ctime": 1002777696.7564139,
1989 "mtime": 1002777696.7564139
1992 "atomic_added_2": [ "filenode", { "rw_uri": "%s",
1995 "ctime": 1002777696.7564139,
1996 "mtime": 1002777696.7564139
1999 "atomic_added_3": [ "filenode", { "rw_uri": "%s",
2002 "ctime": 1002777696.7564139,
2003 "mtime": 1002777696.7564139
2006 }""" % (newuri9, newuri10, newuri11)
2008 url = self.webish_url + self.public_url + "/foo" + "?t=set_children"
2010 d = client.getPage(url, method="POST", postdata=reqbody)
2012 self.failUnlessURIMatchesChild(newuri9, self._foo_node, u"atomic_added_1")
2013 self.failUnlessURIMatchesChild(newuri10, self._foo_node, u"atomic_added_2")
2014 self.failUnlessURIMatchesChild(newuri11, self._foo_node, u"atomic_added_3")
2016 d.addCallback(_then)
2017 d.addErrback(self.dump_error)
2020 def test_POST_put_uri(self):
2021 contents, n, newuri = self.makefile(8)
2022 d = self.POST(self.public_url + "/foo", t="uri", name="new.txt", uri=newuri)
2023 d.addCallback(self.failUnlessURIMatchesChild, self._foo_node, u"new.txt")
2024 d.addCallback(lambda res:
2025 self.failUnlessChildContentsAre(self._foo_node, u"new.txt",
2029 def test_POST_put_uri_replace(self):
2030 contents, n, newuri = self.makefile(8)
2031 d = self.POST(self.public_url + "/foo", t="uri", name="bar.txt", uri=newuri)
2032 d.addCallback(self.failUnlessURIMatchesChild, self._foo_node, u"bar.txt")
2033 d.addCallback(lambda res:
2034 self.failUnlessChildContentsAre(self._foo_node, u"bar.txt",
2038 def test_POST_put_uri_no_replace_queryarg(self):
2039 contents, n, newuri = self.makefile(8)
2040 d = self.POST(self.public_url + "/foo?replace=false", t="uri",
2041 name="bar.txt", uri=newuri)
2042 d.addBoth(self.shouldFail, error.Error,
2043 "POST_put_uri_no_replace_queryarg",
2045 "There was already a child by that name, and you asked me "
2046 "to not replace it")
2047 d.addCallback(lambda res: self.GET(self.public_url + "/foo/bar.txt"))
2048 d.addCallback(self.failUnlessIsBarDotTxt)
2051 def test_POST_put_uri_no_replace_field(self):
2052 contents, n, newuri = self.makefile(8)
2053 d = self.POST(self.public_url + "/foo", t="uri", replace="false",
2054 name="bar.txt", uri=newuri)
2055 d.addBoth(self.shouldFail, error.Error,
2056 "POST_put_uri_no_replace_field",
2058 "There was already a child by that name, and you asked me "
2059 "to not replace it")
2060 d.addCallback(lambda res: self.GET(self.public_url + "/foo/bar.txt"))
2061 d.addCallback(self.failUnlessIsBarDotTxt)
2064 def test_POST_delete(self):
2065 d = self.POST(self.public_url + "/foo", t="delete", name="bar.txt")
2066 d.addCallback(lambda res: self._foo_node.list())
2067 def _check(children):
2068 self.failIf(u"bar.txt" in children)
2069 d.addCallback(_check)
2072 def test_POST_rename_file(self):
2073 d = self.POST(self.public_url + "/foo", t="rename",
2074 from_name="bar.txt", to_name='wibble.txt')
2075 d.addCallback(lambda res:
2076 self.failIfNodeHasChild(self._foo_node, u"bar.txt"))
2077 d.addCallback(lambda res:
2078 self.failUnlessNodeHasChild(self._foo_node, u"wibble.txt"))
2079 d.addCallback(lambda res: self.GET(self.public_url + "/foo/wibble.txt"))
2080 d.addCallback(self.failUnlessIsBarDotTxt)
2081 d.addCallback(lambda res: self.GET(self.public_url + "/foo/wibble.txt?t=json"))
2082 d.addCallback(self.failUnlessIsBarJSON)
2085 def test_POST_rename_file_redundant(self):
2086 d = self.POST(self.public_url + "/foo", t="rename",
2087 from_name="bar.txt", to_name='bar.txt')
2088 d.addCallback(lambda res:
2089 self.failUnlessNodeHasChild(self._foo_node, u"bar.txt"))
2090 d.addCallback(lambda res: self.GET(self.public_url + "/foo/bar.txt"))
2091 d.addCallback(self.failUnlessIsBarDotTxt)
2092 d.addCallback(lambda res: self.GET(self.public_url + "/foo/bar.txt?t=json"))
2093 d.addCallback(self.failUnlessIsBarJSON)
2096 def test_POST_rename_file_replace(self):
2097 # rename a file and replace a directory with it
2098 d = self.POST(self.public_url + "/foo", t="rename",
2099 from_name="bar.txt", to_name='empty')
2100 d.addCallback(lambda res:
2101 self.failIfNodeHasChild(self._foo_node, u"bar.txt"))
2102 d.addCallback(lambda res:
2103 self.failUnlessNodeHasChild(self._foo_node, u"empty"))
2104 d.addCallback(lambda res: self.GET(self.public_url + "/foo/empty"))
2105 d.addCallback(self.failUnlessIsBarDotTxt)
2106 d.addCallback(lambda res: self.GET(self.public_url + "/foo/empty?t=json"))
2107 d.addCallback(self.failUnlessIsBarJSON)
2110 def test_POST_rename_file_no_replace_queryarg(self):
2111 # rename a file and replace a directory with it
2112 d = self.POST(self.public_url + "/foo?replace=false", t="rename",
2113 from_name="bar.txt", to_name='empty')
2114 d.addBoth(self.shouldFail, error.Error,
2115 "POST_rename_file_no_replace_queryarg",
2117 "There was already a child by that name, and you asked me "
2118 "to not replace it")
2119 d.addCallback(lambda res: self.GET(self.public_url + "/foo/empty?t=json"))
2120 d.addCallback(self.failUnlessIsEmptyJSON)
2123 def test_POST_rename_file_no_replace_field(self):
2124 # rename a file and replace a directory with it
2125 d = self.POST(self.public_url + "/foo", t="rename", replace="false",
2126 from_name="bar.txt", to_name='empty')
2127 d.addBoth(self.shouldFail, error.Error,
2128 "POST_rename_file_no_replace_field",
2130 "There was already a child by that name, and you asked me "
2131 "to not replace it")
2132 d.addCallback(lambda res: self.GET(self.public_url + "/foo/empty?t=json"))
2133 d.addCallback(self.failUnlessIsEmptyJSON)
2136 def failUnlessIsEmptyJSON(self, res):
2137 data = simplejson.loads(res)
2138 self.failUnlessEqual(data[0], "dirnode", data)
2139 self.failUnlessEqual(len(data[1]["children"]), 0)
2141 def test_POST_rename_file_slash_fail(self):
2142 d = self.POST(self.public_url + "/foo", t="rename",
2143 from_name="bar.txt", to_name='kirk/spock.txt')
2144 d.addBoth(self.shouldFail, error.Error,
2145 "test_POST_rename_file_slash_fail",
2147 "to_name= may not contain a slash",
2149 d.addCallback(lambda res:
2150 self.failUnlessNodeHasChild(self._foo_node, u"bar.txt"))
2153 def test_POST_rename_dir(self):
2154 d = self.POST(self.public_url, t="rename",
2155 from_name="foo", to_name='plunk')
2156 d.addCallback(lambda res:
2157 self.failIfNodeHasChild(self.public_root, u"foo"))
2158 d.addCallback(lambda res:
2159 self.failUnlessNodeHasChild(self.public_root, u"plunk"))
2160 d.addCallback(lambda res: self.GET(self.public_url + "/plunk?t=json"))
2161 d.addCallback(self.failUnlessIsFooJSON)
2164 def shouldRedirect(self, res, target=None, statuscode=None, which=""):
2165 """ If target is not None then the redirection has to go to target. If
2166 statuscode is not None then the redirection has to be accomplished with
2167 that HTTP status code."""
2168 if not isinstance(res, failure.Failure):
2169 to_where = (target is None) and "somewhere" or ("to " + target)
2170 self.fail("%s: we were expecting to get redirected %s, not get an"
2171 " actual page: %s" % (which, to_where, res))
2172 res.trap(error.PageRedirect)
2173 if statuscode is not None:
2174 self.failUnlessEqual(res.value.status, statuscode,
2175 "%s: not a redirect" % which)
2176 if target is not None:
2177 # the PageRedirect does not seem to capture the uri= query arg
2178 # properly, so we can't check for it.
2179 realtarget = self.webish_url + target
2180 self.failUnlessEqual(res.value.location, realtarget,
2181 "%s: wrong target" % which)
2182 return res.value.location
2184 def test_GET_URI_form(self):
2185 base = "/uri?uri=%s" % self._bar_txt_uri
2186 # this is supposed to give us a redirect to /uri/$URI, plus arguments
2187 targetbase = "/uri/%s" % urllib.quote(self._bar_txt_uri)
2189 d.addBoth(self.shouldRedirect, targetbase)
2190 d.addCallback(lambda res: self.GET(base+"&filename=bar.txt"))
2191 d.addBoth(self.shouldRedirect, targetbase+"?filename=bar.txt")
2192 d.addCallback(lambda res: self.GET(base+"&t=json"))
2193 d.addBoth(self.shouldRedirect, targetbase+"?t=json")
2194 d.addCallback(self.log, "about to get file by uri")
2195 d.addCallback(lambda res: self.GET(base, followRedirect=True))
2196 d.addCallback(self.failUnlessIsBarDotTxt)
2197 d.addCallback(self.log, "got file by uri, about to get dir by uri")
2198 d.addCallback(lambda res: self.GET("/uri?uri=%s&t=json" % self._foo_uri,
2199 followRedirect=True))
2200 d.addCallback(self.failUnlessIsFooJSON)
2201 d.addCallback(self.log, "got dir by uri")
2205 def test_GET_URI_form_bad(self):
2206 d = self.shouldFail2(error.Error, "test_GET_URI_form_bad",
2207 "400 Bad Request", "GET /uri requires uri=",
2211 def test_GET_rename_form(self):
2212 d = self.GET(self.public_url + "/foo?t=rename-form&name=bar.txt",
2213 followRedirect=True)
2215 self.failUnless('name="when_done" value="."' in res, res)
2216 self.failUnless(re.search(r'name="from_name" value="bar\.txt"', res))
2217 d.addCallback(_check)
2220 def log(self, res, msg):
2221 #print "MSG: %s RES: %s" % (msg, res)
2225 def test_GET_URI_URL(self):
2226 base = "/uri/%s" % self._bar_txt_uri
2228 d.addCallback(self.failUnlessIsBarDotTxt)
2229 d.addCallback(lambda res: self.GET(base+"?filename=bar.txt"))
2230 d.addCallback(self.failUnlessIsBarDotTxt)
2231 d.addCallback(lambda res: self.GET(base+"?filename=bar.txt&save=true"))
2232 d.addCallback(self.failUnlessIsBarDotTxt)
2235 def test_GET_URI_URL_dir(self):
2236 base = "/uri/%s?t=json" % self._foo_uri
2238 d.addCallback(self.failUnlessIsFooJSON)
2241 def test_GET_URI_URL_missing(self):
2242 base = "/uri/%s" % self._bad_file_uri
2243 d = self.shouldHTTPError("test_GET_URI_URL_missing",
2244 http.GONE, None, "NotEnoughSharesError",
2246 # TODO: how can we exercise both sides of WebDownloadTarget.fail
2247 # here? we must arrange for a download to fail after target.open()
2248 # has been called, and then inspect the response to see that it is
2249 # shorter than we expected.
2252 def test_PUT_DIRURL_uri(self):
2253 d = self.s.create_dirnode()
2255 new_uri = dn.get_uri()
2256 # replace /foo with a new (empty) directory
2257 d = self.PUT(self.public_url + "/foo?t=uri", new_uri)
2258 d.addCallback(lambda res:
2259 self.failUnlessEqual(res.strip(), new_uri))
2260 d.addCallback(lambda res:
2261 self.failUnlessChildURIIs(self.public_root,
2265 d.addCallback(_made_dir)
2268 def test_PUT_DIRURL_uri_noreplace(self):
2269 d = self.s.create_dirnode()
2271 new_uri = dn.get_uri()
2272 # replace /foo with a new (empty) directory, but ask that
2273 # replace=false, so it should fail
2274 d = self.shouldFail2(error.Error, "test_PUT_DIRURL_uri_noreplace",
2275 "409 Conflict", "There was already a child by that name, and you asked me to not replace it",
2277 self.public_url + "/foo?t=uri&replace=false",
2279 d.addCallback(lambda res:
2280 self.failUnlessChildURIIs(self.public_root,
2284 d.addCallback(_made_dir)
2287 def test_PUT_DIRURL_bad_t(self):
2288 d = self.shouldFail2(error.Error, "test_PUT_DIRURL_bad_t",
2289 "400 Bad Request", "PUT to a directory",
2290 self.PUT, self.public_url + "/foo?t=BOGUS", "")
2291 d.addCallback(lambda res:
2292 self.failUnlessChildURIIs(self.public_root,
2297 def test_PUT_NEWFILEURL_uri(self):
2298 contents, n, new_uri = self.makefile(8)
2299 d = self.PUT(self.public_url + "/foo/new.txt?t=uri", new_uri)
2300 d.addCallback(lambda res: self.failUnlessEqual(res.strip(), new_uri))
2301 d.addCallback(lambda res:
2302 self.failUnlessChildContentsAre(self._foo_node, u"new.txt",
2306 def test_PUT_NEWFILEURL_uri_replace(self):
2307 contents, n, new_uri = self.makefile(8)
2308 d = self.PUT(self.public_url + "/foo/bar.txt?t=uri", new_uri)
2309 d.addCallback(lambda res: self.failUnlessEqual(res.strip(), new_uri))
2310 d.addCallback(lambda res:
2311 self.failUnlessChildContentsAre(self._foo_node, u"bar.txt",
2315 def test_PUT_NEWFILEURL_uri_no_replace(self):
2316 contents, n, new_uri = self.makefile(8)
2317 d = self.PUT(self.public_url + "/foo/bar.txt?t=uri&replace=false", new_uri)
2318 d.addBoth(self.shouldFail, error.Error, "PUT_NEWFILEURL_uri_no_replace",
2320 "There was already a child by that name, and you asked me "
2321 "to not replace it")
2324 def test_PUT_NEWFILE_URI(self):
2325 file_contents = "New file contents here\n"
2326 d = self.PUT("/uri", file_contents)
2328 assert isinstance(uri, str), uri
2329 self.failUnless(uri in FakeCHKFileNode.all_contents)
2330 self.failUnlessEqual(FakeCHKFileNode.all_contents[uri],
2332 return self.GET("/uri/%s" % uri)
2333 d.addCallback(_check)
2335 self.failUnlessEqual(res, file_contents)
2336 d.addCallback(_check2)
2339 def test_PUT_NEWFILE_URI_not_mutable(self):
2340 file_contents = "New file contents here\n"
2341 d = self.PUT("/uri?mutable=false", file_contents)
2343 assert isinstance(uri, str), uri
2344 self.failUnless(uri in FakeCHKFileNode.all_contents)
2345 self.failUnlessEqual(FakeCHKFileNode.all_contents[uri],
2347 return self.GET("/uri/%s" % uri)
2348 d.addCallback(_check)
2350 self.failUnlessEqual(res, file_contents)
2351 d.addCallback(_check2)
2354 def test_PUT_NEWFILE_URI_only_PUT(self):
2355 d = self.PUT("/uri?t=bogus", "")
2356 d.addBoth(self.shouldFail, error.Error,
2357 "PUT_NEWFILE_URI_only_PUT",
2359 "/uri accepts only PUT, PUT?t=mkdir, POST?t=upload, and POST?t=mkdir")
2362 def test_PUT_NEWFILE_URI_mutable(self):
2363 file_contents = "New file contents here\n"
2364 d = self.PUT("/uri?mutable=true", file_contents)
2365 def _check1(filecap):
2366 filecap = filecap.strip()
2367 self.failUnless(filecap.startswith("URI:SSK:"), filecap)
2368 self.filecap = filecap
2369 u = uri.WriteableSSKFileURI.init_from_string(filecap)
2370 self.failUnless(u.storage_index in FakeMutableFileNode.all_contents)
2371 n = self.s.create_node_from_uri(filecap)
2372 return n.download_best_version()
2373 d.addCallback(_check1)
2375 self.failUnlessEqual(data, file_contents)
2376 return self.GET("/uri/%s" % urllib.quote(self.filecap))
2377 d.addCallback(_check2)
2379 self.failUnlessEqual(res, file_contents)
2380 d.addCallback(_check3)
2383 def test_PUT_mkdir(self):
2384 d = self.PUT("/uri?t=mkdir", "")
2386 n = self.s.create_node_from_uri(uri.strip())
2387 d2 = self.failUnlessNodeKeysAre(n, [])
2388 d2.addCallback(lambda res:
2389 self.GET("/uri/%s?t=json" % uri))
2391 d.addCallback(_check)
2392 d.addCallback(self.failUnlessIsEmptyJSON)
2395 def test_POST_check(self):
2396 d = self.POST(self.public_url + "/foo", t="check", name="bar.txt")
2398 # this returns a string form of the results, which are probably
2399 # None since we're using fake filenodes.
2400 # TODO: verify that the check actually happened, by changing
2401 # FakeCHKFileNode to count how many times .check() has been
2404 d.addCallback(_done)
2407 def test_bad_method(self):
2408 url = self.webish_url + self.public_url + "/foo/bar.txt"
2409 d = self.shouldHTTPError("test_bad_method",
2410 501, "Not Implemented",
2411 "I don't know how to treat a BOGUS request.",
2412 client.getPage, url, method="BOGUS")
2415 def test_short_url(self):
2416 url = self.webish_url + "/uri"
2417 d = self.shouldHTTPError("test_short_url", 501, "Not Implemented",
2418 "I don't know how to treat a DELETE request.",
2419 client.getPage, url, method="DELETE")
2422 def test_ophandle_bad(self):
2423 url = self.webish_url + "/operations/bogus?t=status"
2424 d = self.shouldHTTPError("test_ophandle_bad", 404, "404 Not Found",
2425 "unknown/expired handle 'bogus'",
2426 client.getPage, url)
2429 def test_ophandle_cancel(self):
2430 d = self.POST(self.public_url + "/foo/?t=start-manifest&ophandle=128",
2431 followRedirect=True)
2432 d.addCallback(lambda ignored:
2433 self.GET("/operations/128?t=status&output=JSON"))
2435 data = simplejson.loads(res)
2436 self.failUnless("finished" in data, res)
2437 monitor = self.ws.root.child_operations.handles["128"][0]
2438 d = self.POST("/operations/128?t=cancel&output=JSON")
2440 data = simplejson.loads(res)
2441 self.failUnless("finished" in data, res)
2442 # t=cancel causes the handle to be forgotten
2443 self.failUnless(monitor.is_cancelled())
2444 d.addCallback(_check2)
2446 d.addCallback(_check1)
2447 d.addCallback(lambda ignored:
2448 self.shouldHTTPError("test_ophandle_cancel",
2449 404, "404 Not Found",
2450 "unknown/expired handle '128'",
2452 "/operations/128?t=status&output=JSON"))
2455 def test_ophandle_retainfor(self):
2456 d = self.POST(self.public_url + "/foo/?t=start-manifest&ophandle=129&retain-for=60",
2457 followRedirect=True)
2458 d.addCallback(lambda ignored:
2459 self.GET("/operations/129?t=status&output=JSON&retain-for=0"))
2461 data = simplejson.loads(res)
2462 self.failUnless("finished" in data, res)
2463 d.addCallback(_check1)
2464 # the retain-for=0 will cause the handle to be expired very soon
2465 d.addCallback(self.stall, 2.0)
2466 d.addCallback(lambda ignored:
2467 self.shouldHTTPError("test_ophandle_retainfor",
2468 404, "404 Not Found",
2469 "unknown/expired handle '129'",
2471 "/operations/129?t=status&output=JSON"))
2474 def test_ophandle_release_after_complete(self):
2475 d = self.POST(self.public_url + "/foo/?t=start-manifest&ophandle=130",
2476 followRedirect=True)
2477 d.addCallback(self.wait_for_operation, "130")
2478 d.addCallback(lambda ignored:
2479 self.GET("/operations/130?t=status&output=JSON&release-after-complete=true"))
2480 # the release-after-complete=true will cause the handle to be expired
2481 d.addCallback(lambda ignored:
2482 self.shouldHTTPError("test_ophandle_release_after_complete",
2483 404, "404 Not Found",
2484 "unknown/expired handle '130'",
2486 "/operations/130?t=status&output=JSON"))
2489 def test_incident(self):
2490 d = self.POST("/report_incident", details="eek")
2492 self.failUnless("Thank you for your report!" in res, res)
2493 d.addCallback(_done)
2496 def test_static(self):
2497 webdir = os.path.join(self.staticdir, "subdir")
2498 fileutil.make_dirs(webdir)
2499 f = open(os.path.join(webdir, "hello.txt"), "wb")
2503 d = self.GET("/static/subdir/hello.txt")
2505 self.failUnlessEqual(res, "hello")
2506 d.addCallback(_check)
2510 class Util(unittest.TestCase, ShouldFailMixin):
2511 def test_parse_replace_arg(self):
2512 self.failUnlessEqual(common.parse_replace_arg("true"), True)
2513 self.failUnlessEqual(common.parse_replace_arg("false"), False)
2514 self.failUnlessEqual(common.parse_replace_arg("only-files"),
2516 self.shouldFail(AssertionError, "test_parse_replace_arg", "",
2517 common.parse_replace_arg, "only_fles")
2519 def test_abbreviate_time(self):
2520 self.failUnlessEqual(common.abbreviate_time(None), "")
2521 self.failUnlessEqual(common.abbreviate_time(1.234), "1.23s")
2522 self.failUnlessEqual(common.abbreviate_time(0.123), "123ms")
2523 self.failUnlessEqual(common.abbreviate_time(0.00123), "1.2ms")
2524 self.failUnlessEqual(common.abbreviate_time(0.000123), "123us")
2526 def test_abbreviate_rate(self):
2527 self.failUnlessEqual(common.abbreviate_rate(None), "")
2528 self.failUnlessEqual(common.abbreviate_rate(1234000), "1.23MBps")
2529 self.failUnlessEqual(common.abbreviate_rate(12340), "12.3kBps")
2530 self.failUnlessEqual(common.abbreviate_rate(123), "123Bps")
2532 def test_abbreviate_size(self):
2533 self.failUnlessEqual(common.abbreviate_size(None), "")
2534 self.failUnlessEqual(common.abbreviate_size(1.23*1000*1000*1000), "1.23GB")
2535 self.failUnlessEqual(common.abbreviate_size(1.23*1000*1000), "1.23MB")
2536 self.failUnlessEqual(common.abbreviate_size(1230), "1.2kB")
2537 self.failUnlessEqual(common.abbreviate_size(123), "123B")
2539 def test_plural(self):
2541 return "%d second%s" % (s, status.plural(s))
2542 self.failUnlessEqual(convert(0), "0 seconds")
2543 self.failUnlessEqual(convert(1), "1 second")
2544 self.failUnlessEqual(convert(2), "2 seconds")
2546 return "has share%s: %s" % (status.plural(s), ",".join(s))
2547 self.failUnlessEqual(convert2([]), "has shares: ")
2548 self.failUnlessEqual(convert2(["1"]), "has share: 1")
2549 self.failUnlessEqual(convert2(["1","2"]), "has shares: 1,2")
2552 class Grid(GridTestMixin, WebErrorMixin, unittest.TestCase, ShouldFailMixin):
2554 def CHECK(self, ign, which, args, clientnum=0):
2555 fileurl = self.fileurls[which]
2556 url = fileurl + "?" + args
2557 return self.GET(url, method="POST", clientnum=clientnum)
2559 def test_filecheck(self):
2560 self.basedir = "web/Grid/filecheck"
2562 c0 = self.g.clients[0]
2565 d = c0.upload(upload.Data(DATA, convergence=""))
2566 def _stash_uri(ur, which):
2567 self.uris[which] = ur.uri
2568 d.addCallback(_stash_uri, "good")
2569 d.addCallback(lambda ign:
2570 c0.upload(upload.Data(DATA+"1", convergence="")))
2571 d.addCallback(_stash_uri, "sick")
2572 d.addCallback(lambda ign:
2573 c0.upload(upload.Data(DATA+"2", convergence="")))
2574 d.addCallback(_stash_uri, "dead")
2575 def _stash_mutable_uri(n, which):
2576 self.uris[which] = n.get_uri()
2577 assert isinstance(self.uris[which], str)
2578 d.addCallback(lambda ign: c0.create_mutable_file(DATA+"3"))
2579 d.addCallback(_stash_mutable_uri, "corrupt")
2580 d.addCallback(lambda ign:
2581 c0.upload(upload.Data("literal", convergence="")))
2582 d.addCallback(_stash_uri, "small")
2584 def _compute_fileurls(ignored):
2586 for which in self.uris:
2587 self.fileurls[which] = "uri/" + urllib.quote(self.uris[which])
2588 d.addCallback(_compute_fileurls)
2590 def _clobber_shares(ignored):
2591 good_shares = self.find_shares(self.uris["good"])
2592 self.failUnlessEqual(len(good_shares), 10)
2593 sick_shares = self.find_shares(self.uris["sick"])
2594 os.unlink(sick_shares[0][2])
2595 dead_shares = self.find_shares(self.uris["dead"])
2596 for i in range(1, 10):
2597 os.unlink(dead_shares[i][2])
2598 c_shares = self.find_shares(self.uris["corrupt"])
2599 cso = CorruptShareOptions()
2600 cso.stdout = StringIO()
2601 cso.parseOptions([c_shares[0][2]])
2603 d.addCallback(_clobber_shares)
2605 d.addCallback(self.CHECK, "good", "t=check")
2606 def _got_html_good(res):
2607 self.failUnless("Healthy" in res, res)
2608 self.failIf("Not Healthy" in res, res)
2609 d.addCallback(_got_html_good)
2610 d.addCallback(self.CHECK, "good", "t=check&return_to=somewhere")
2611 def _got_html_good_return_to(res):
2612 self.failUnless("Healthy" in res, res)
2613 self.failIf("Not Healthy" in res, res)
2614 self.failUnless('<a href="somewhere">Return to file'
2616 d.addCallback(_got_html_good_return_to)
2617 d.addCallback(self.CHECK, "good", "t=check&output=json")
2618 def _got_json_good(res):
2619 r = simplejson.loads(res)
2620 self.failUnlessEqual(r["summary"], "Healthy")
2621 self.failUnless(r["results"]["healthy"])
2622 self.failIf(r["results"]["needs-rebalancing"])
2623 self.failUnless(r["results"]["recoverable"])
2624 d.addCallback(_got_json_good)
2626 d.addCallback(self.CHECK, "small", "t=check")
2627 def _got_html_small(res):
2628 self.failUnless("Literal files are always healthy" in res, res)
2629 self.failIf("Not Healthy" in res, res)
2630 d.addCallback(_got_html_small)
2631 d.addCallback(self.CHECK, "small", "t=check&return_to=somewhere")
2632 def _got_html_small_return_to(res):
2633 self.failUnless("Literal files are always healthy" in res, res)
2634 self.failIf("Not Healthy" in res, res)
2635 self.failUnless('<a href="somewhere">Return to file'
2637 d.addCallback(_got_html_small_return_to)
2638 d.addCallback(self.CHECK, "small", "t=check&output=json")
2639 def _got_json_small(res):
2640 r = simplejson.loads(res)
2641 self.failUnlessEqual(r["storage-index"], "")
2642 self.failUnless(r["results"]["healthy"])
2643 d.addCallback(_got_json_small)
2645 d.addCallback(self.CHECK, "sick", "t=check")
2646 def _got_html_sick(res):
2647 self.failUnless("Not Healthy" in res, res)
2648 d.addCallback(_got_html_sick)
2649 d.addCallback(self.CHECK, "sick", "t=check&output=json")
2650 def _got_json_sick(res):
2651 r = simplejson.loads(res)
2652 self.failUnlessEqual(r["summary"],
2653 "Not Healthy: 9 shares (enc 3-of-10)")
2654 self.failIf(r["results"]["healthy"])
2655 self.failIf(r["results"]["needs-rebalancing"])
2656 self.failUnless(r["results"]["recoverable"])
2657 d.addCallback(_got_json_sick)
2659 d.addCallback(self.CHECK, "dead", "t=check")
2660 def _got_html_dead(res):
2661 self.failUnless("Not Healthy" in res, res)
2662 d.addCallback(_got_html_dead)
2663 d.addCallback(self.CHECK, "dead", "t=check&output=json")
2664 def _got_json_dead(res):
2665 r = simplejson.loads(res)
2666 self.failUnlessEqual(r["summary"],
2667 "Not Healthy: 1 shares (enc 3-of-10)")
2668 self.failIf(r["results"]["healthy"])
2669 self.failIf(r["results"]["needs-rebalancing"])
2670 self.failIf(r["results"]["recoverable"])
2671 d.addCallback(_got_json_dead)
2673 d.addCallback(self.CHECK, "corrupt", "t=check&verify=true")
2674 def _got_html_corrupt(res):
2675 self.failUnless("Not Healthy! : Unhealthy" in res, res)
2676 d.addCallback(_got_html_corrupt)
2677 d.addCallback(self.CHECK, "corrupt", "t=check&verify=true&output=json")
2678 def _got_json_corrupt(res):
2679 r = simplejson.loads(res)
2680 self.failUnless("Unhealthy: 9 shares (enc 3-of-10)" in r["summary"],
2682 self.failIf(r["results"]["healthy"])
2683 self.failUnless(r["results"]["recoverable"])
2684 self.failUnlessEqual(r["results"]["count-shares-good"], 9)
2685 self.failUnlessEqual(r["results"]["count-corrupt-shares"], 1)
2686 d.addCallback(_got_json_corrupt)
2688 d.addErrback(self.explain_web_error)
2691 def test_repair_html(self):
2692 self.basedir = "web/Grid/repair_html"
2694 c0 = self.g.clients[0]
2697 d = c0.upload(upload.Data(DATA, convergence=""))
2698 def _stash_uri(ur, which):
2699 self.uris[which] = ur.uri
2700 d.addCallback(_stash_uri, "good")
2701 d.addCallback(lambda ign:
2702 c0.upload(upload.Data(DATA+"1", convergence="")))
2703 d.addCallback(_stash_uri, "sick")
2704 d.addCallback(lambda ign:
2705 c0.upload(upload.Data(DATA+"2", convergence="")))
2706 d.addCallback(_stash_uri, "dead")
2707 def _stash_mutable_uri(n, which):
2708 self.uris[which] = n.get_uri()
2709 assert isinstance(self.uris[which], str)
2710 d.addCallback(lambda ign: c0.create_mutable_file(DATA+"3"))
2711 d.addCallback(_stash_mutable_uri, "corrupt")
2713 def _compute_fileurls(ignored):
2715 for which in self.uris:
2716 self.fileurls[which] = "uri/" + urllib.quote(self.uris[which])
2717 d.addCallback(_compute_fileurls)
2719 def _clobber_shares(ignored):
2720 good_shares = self.find_shares(self.uris["good"])
2721 self.failUnlessEqual(len(good_shares), 10)
2722 sick_shares = self.find_shares(self.uris["sick"])
2723 os.unlink(sick_shares[0][2])
2724 dead_shares = self.find_shares(self.uris["dead"])
2725 for i in range(1, 10):
2726 os.unlink(dead_shares[i][2])
2727 c_shares = self.find_shares(self.uris["corrupt"])
2728 cso = CorruptShareOptions()
2729 cso.stdout = StringIO()
2730 cso.parseOptions([c_shares[0][2]])
2732 d.addCallback(_clobber_shares)
2734 d.addCallback(self.CHECK, "good", "t=check&repair=true")
2735 def _got_html_good(res):
2736 self.failUnless("Healthy" in res, res)
2737 self.failIf("Not Healthy" in res, res)
2738 self.failUnless("No repair necessary" in res, res)
2739 d.addCallback(_got_html_good)
2741 d.addCallback(self.CHECK, "sick", "t=check&repair=true")
2742 def _got_html_sick(res):
2743 self.failUnless("Healthy : healthy" in res, res)
2744 self.failIf("Not Healthy" in res, res)
2745 self.failUnless("Repair successful" in res, res)
2746 d.addCallback(_got_html_sick)
2748 # repair of a dead file will fail, of course, but it isn't yet
2749 # clear how this should be reported. Right now it shows up as
2752 #d.addCallback(self.CHECK, "dead", "t=check&repair=true")
2753 #def _got_html_dead(res):
2755 # self.failUnless("Healthy : healthy" in res, res)
2756 # self.failIf("Not Healthy" in res, res)
2757 # self.failUnless("No repair necessary" in res, res)
2758 #d.addCallback(_got_html_dead)
2760 d.addCallback(self.CHECK, "corrupt", "t=check&verify=true&repair=true")
2761 def _got_html_corrupt(res):
2762 self.failUnless("Healthy : Healthy" in res, res)
2763 self.failIf("Not Healthy" in res, res)
2764 self.failUnless("Repair successful" in res, res)
2765 d.addCallback(_got_html_corrupt)
2767 d.addErrback(self.explain_web_error)
2770 def test_repair_json(self):
2771 self.basedir = "web/Grid/repair_json"
2773 c0 = self.g.clients[0]
2776 d = c0.upload(upload.Data(DATA+"1", convergence=""))
2777 def _stash_uri(ur, which):
2778 self.uris[which] = ur.uri
2779 d.addCallback(_stash_uri, "sick")
2781 def _compute_fileurls(ignored):
2783 for which in self.uris:
2784 self.fileurls[which] = "uri/" + urllib.quote(self.uris[which])
2785 d.addCallback(_compute_fileurls)
2787 def _clobber_shares(ignored):
2788 sick_shares = self.find_shares(self.uris["sick"])
2789 os.unlink(sick_shares[0][2])
2790 d.addCallback(_clobber_shares)
2792 d.addCallback(self.CHECK, "sick", "t=check&repair=true&output=json")
2793 def _got_json_sick(res):
2794 r = simplejson.loads(res)
2795 self.failUnlessEqual(r["repair-attempted"], True)
2796 self.failUnlessEqual(r["repair-successful"], True)
2797 self.failUnlessEqual(r["pre-repair-results"]["summary"],
2798 "Not Healthy: 9 shares (enc 3-of-10)")
2799 self.failIf(r["pre-repair-results"]["results"]["healthy"])
2800 self.failUnlessEqual(r["post-repair-results"]["summary"], "healthy")
2801 self.failUnless(r["post-repair-results"]["results"]["healthy"])
2802 d.addCallback(_got_json_sick)
2804 d.addErrback(self.explain_web_error)
2807 def test_unknown(self):
2808 self.basedir = "web/Grid/unknown"
2810 c0 = self.g.clients[0]
2814 future_writecap = "x-tahoe-crazy://I_am_from_the_future."
2815 future_readcap = "x-tahoe-crazy-readonly://I_am_from_the_future."
2816 # the future cap format may contain slashes, which must be tolerated
2817 expected_info_url = "uri/%s?t=info" % urllib.quote(future_writecap,
2819 future_node = UnknownNode(future_writecap, future_readcap)
2821 d = c0.create_dirnode()
2822 def _stash_root_and_create_file(n):
2824 self.rooturl = "uri/" + urllib.quote(n.get_uri()) + "/"
2825 self.rourl = "uri/" + urllib.quote(n.get_readonly_uri()) + "/"
2826 return self.rootnode.set_node(u"future", future_node)
2827 d.addCallback(_stash_root_and_create_file)
2828 # make sure directory listing tolerates unknown nodes
2829 d.addCallback(lambda ign: self.GET(self.rooturl))
2830 def _check_html(res):
2831 self.failUnlessIn("<td>future</td>", res)
2832 # find the More Info link for "future", should be relative
2833 mo = re.search(r'<a href="([^"]+)">More Info</a>', res)
2834 info_url = mo.group(1)
2835 self.failUnlessEqual(info_url, "future?t=info")
2837 d.addCallback(_check_html)
2838 d.addCallback(lambda ign: self.GET(self.rooturl+"?t=json"))
2839 def _check_json(res, expect_writecap):
2840 data = simplejson.loads(res)
2841 self.failUnlessEqual(data[0], "dirnode")
2842 f = data[1]["children"]["future"]
2843 self.failUnlessEqual(f[0], "unknown")
2845 self.failUnlessEqual(f[1]["rw_uri"], future_writecap)
2847 self.failIfIn("rw_uri", f[1])
2848 self.failUnlessEqual(f[1]["ro_uri"], future_readcap)
2849 self.failUnless("metadata" in f[1])
2850 d.addCallback(_check_json, expect_writecap=True)
2851 d.addCallback(lambda ign: self.GET(expected_info_url))
2852 def _check_info(res, expect_readcap):
2853 self.failUnlessIn("Object Type: <span>unknown</span>", res)
2854 self.failUnlessIn(future_writecap, res)
2856 self.failUnlessIn(future_readcap, res)
2857 self.failIfIn("Raw data as", res)
2858 self.failIfIn("Directory writecap", res)
2859 self.failIfIn("Checker Operations", res)
2860 self.failIfIn("Mutable File Operations", res)
2861 self.failIfIn("Directory Operations", res)
2862 d.addCallback(_check_info, expect_readcap=False)
2863 d.addCallback(lambda ign: self.GET(self.rooturl+"future?t=info"))
2864 d.addCallback(_check_info, expect_readcap=True)
2866 # and make sure that a read-only version of the directory can be
2867 # rendered too. This version will not have future_writecap
2868 d.addCallback(lambda ign: self.GET(self.rourl))
2869 d.addCallback(_check_html)
2870 d.addCallback(lambda ign: self.GET(self.rourl+"?t=json"))
2871 d.addCallback(_check_json, expect_writecap=False)
2874 def test_deep_check(self):
2875 self.basedir = "web/Grid/deep_check"
2877 c0 = self.g.clients[0]
2881 d = c0.create_dirnode()
2882 def _stash_root_and_create_file(n):
2884 self.fileurls["root"] = "uri/" + urllib.quote(n.get_uri()) + "/"
2885 return n.add_file(u"good", upload.Data(DATA, convergence=""))
2886 d.addCallback(_stash_root_and_create_file)
2887 def _stash_uri(fn, which):
2888 self.uris[which] = fn.get_uri()
2890 d.addCallback(_stash_uri, "good")
2891 d.addCallback(lambda ign:
2892 self.rootnode.add_file(u"small",
2893 upload.Data("literal",
2895 d.addCallback(_stash_uri, "small")
2896 d.addCallback(lambda ign:
2897 self.rootnode.add_file(u"sick",
2898 upload.Data(DATA+"1",
2900 d.addCallback(_stash_uri, "sick")
2902 # this tests that deep-check and stream-manifest will ignore
2903 # UnknownNode instances. Hopefully this will also cover deep-stats.
2904 future_writecap = "x-tahoe-crazy://I_am_from_the_future."
2905 future_readcap = "x-tahoe-crazy-readonly://I_am_from_the_future."
2906 future_node = UnknownNode(future_writecap, future_readcap)
2907 d.addCallback(lambda ign: self.rootnode.set_node(u"future",future_node))
2909 def _clobber_shares(ignored):
2910 self.delete_shares_numbered(self.uris["sick"], [0,1])
2911 d.addCallback(_clobber_shares)
2919 d.addCallback(self.CHECK, "root", "t=stream-deep-check")
2922 units = [simplejson.loads(line)
2923 for line in res.splitlines()
2926 print "response is:", res
2927 print "undecodeable line was '%s'" % line
2929 self.failUnlessEqual(len(units), 5+1)
2930 # should be parent-first
2932 self.failUnlessEqual(u0["path"], [])
2933 self.failUnlessEqual(u0["type"], "directory")
2934 self.failUnlessEqual(u0["cap"], self.rootnode.get_uri())
2935 u0cr = u0["check-results"]
2936 self.failUnlessEqual(u0cr["results"]["count-shares-good"], 10)
2938 ugood = [u for u in units
2939 if u["type"] == "file" and u["path"] == [u"good"]][0]
2940 self.failUnlessEqual(ugood["cap"], self.uris["good"])
2941 ugoodcr = ugood["check-results"]
2942 self.failUnlessEqual(ugoodcr["results"]["count-shares-good"], 10)
2945 self.failUnlessEqual(stats["type"], "stats")
2947 self.failUnlessEqual(s["count-immutable-files"], 2)
2948 self.failUnlessEqual(s["count-literal-files"], 1)
2949 self.failUnlessEqual(s["count-directories"], 1)
2950 self.failUnlessEqual(s["count-unknown"], 1)
2951 d.addCallback(_done)
2953 d.addCallback(self.CHECK, "root", "t=stream-manifest")
2954 def _check_manifest(res):
2955 self.failUnless(res.endswith("\n"))
2956 units = [simplejson.loads(t) for t in res[:-1].split("\n")]
2957 self.failUnlessEqual(len(units), 5+1)
2958 self.failUnlessEqual(units[-1]["type"], "stats")
2960 self.failUnlessEqual(first["path"], [])
2961 self.failUnlessEqual(first["cap"], self.rootnode.get_uri())
2962 self.failUnlessEqual(first["type"], "directory")
2963 stats = units[-1]["stats"]
2964 self.failUnlessEqual(stats["count-immutable-files"], 2)
2965 self.failUnlessEqual(stats["count-literal-files"], 1)
2966 self.failUnlessEqual(stats["count-mutable-files"], 0)
2967 self.failUnlessEqual(stats["count-immutable-files"], 2)
2968 self.failUnlessEqual(stats["count-unknown"], 1)
2969 d.addCallback(_check_manifest)
2971 # now add root/subdir and root/subdir/grandchild, then make subdir
2972 # unrecoverable, then see what happens
2974 d.addCallback(lambda ign:
2975 self.rootnode.create_empty_directory(u"subdir"))
2976 d.addCallback(_stash_uri, "subdir")
2977 d.addCallback(lambda subdir_node:
2978 subdir_node.add_file(u"grandchild",
2979 upload.Data(DATA+"2",
2981 d.addCallback(_stash_uri, "grandchild")
2983 d.addCallback(lambda ign:
2984 self.delete_shares_numbered(self.uris["subdir"],
2992 # root/subdir [unrecoverable]
2993 # root/subdir/grandchild
2995 # how should a streaming-JSON API indicate fatal error?
2996 # answer: emit ERROR: instead of a JSON string
2998 d.addCallback(self.CHECK, "root", "t=stream-manifest")
2999 def _check_broken_manifest(res):
3000 lines = res.splitlines()
3002 for (i,line) in enumerate(lines)
3003 if line.startswith("ERROR:")]
3005 self.fail("no ERROR: in output: %s" % (res,))
3006 first_error = error_lines[0]
3007 error_line = lines[first_error]
3008 error_msg = lines[first_error+1:]
3009 error_msg_s = "\n".join(error_msg) + "\n"
3010 self.failUnlessIn("ERROR: UnrecoverableFileError(no recoverable versions)",
3012 self.failUnless(len(error_msg) > 2, error_msg_s) # some traceback
3013 units = [simplejson.loads(line) for line in lines[:first_error]]
3014 self.failUnlessEqual(len(units), 6) # includes subdir
3015 last_unit = units[-1]
3016 self.failUnlessEqual(last_unit["path"], ["subdir"])
3017 d.addCallback(_check_broken_manifest)
3019 d.addCallback(self.CHECK, "root", "t=stream-deep-check")
3020 def _check_broken_deepcheck(res):
3021 lines = res.splitlines()
3023 for (i,line) in enumerate(lines)
3024 if line.startswith("ERROR:")]
3026 self.fail("no ERROR: in output: %s" % (res,))
3027 first_error = error_lines[0]
3028 error_line = lines[first_error]
3029 error_msg = lines[first_error+1:]
3030 error_msg_s = "\n".join(error_msg) + "\n"
3031 self.failUnlessIn("ERROR: UnrecoverableFileError(no recoverable versions)",
3033 self.failUnless(len(error_msg) > 2, error_msg_s) # some traceback
3034 units = [simplejson.loads(line) for line in lines[:first_error]]
3035 self.failUnlessEqual(len(units), 6) # includes subdir
3036 last_unit = units[-1]
3037 self.failUnlessEqual(last_unit["path"], ["subdir"])
3038 r = last_unit["check-results"]["results"]
3039 self.failUnlessEqual(r["count-recoverable-versions"], 0)
3040 self.failUnlessEqual(r["count-shares-good"], 1)
3041 self.failUnlessEqual(r["recoverable"], False)
3042 d.addCallback(_check_broken_deepcheck)
3044 d.addErrback(self.explain_web_error)
3047 def test_deep_check_and_repair(self):
3048 self.basedir = "web/Grid/deep_check_and_repair"
3050 c0 = self.g.clients[0]
3054 d = c0.create_dirnode()
3055 def _stash_root_and_create_file(n):
3057 self.fileurls["root"] = "uri/" + urllib.quote(n.get_uri()) + "/"
3058 return n.add_file(u"good", upload.Data(DATA, convergence=""))
3059 d.addCallback(_stash_root_and_create_file)
3060 def _stash_uri(fn, which):
3061 self.uris[which] = fn.get_uri()
3062 d.addCallback(_stash_uri, "good")
3063 d.addCallback(lambda ign:
3064 self.rootnode.add_file(u"small",
3065 upload.Data("literal",
3067 d.addCallback(_stash_uri, "small")
3068 d.addCallback(lambda ign:
3069 self.rootnode.add_file(u"sick",
3070 upload.Data(DATA+"1",
3072 d.addCallback(_stash_uri, "sick")
3073 #d.addCallback(lambda ign:
3074 # self.rootnode.add_file(u"dead",
3075 # upload.Data(DATA+"2",
3077 #d.addCallback(_stash_uri, "dead")
3079 #d.addCallback(lambda ign: c0.create_mutable_file("mutable"))
3080 #d.addCallback(lambda fn: self.rootnode.set_node(u"corrupt", fn))
3081 #d.addCallback(_stash_uri, "corrupt")
3083 def _clobber_shares(ignored):
3084 good_shares = self.find_shares(self.uris["good"])
3085 self.failUnlessEqual(len(good_shares), 10)
3086 sick_shares = self.find_shares(self.uris["sick"])
3087 os.unlink(sick_shares[0][2])
3088 #dead_shares = self.find_shares(self.uris["dead"])
3089 #for i in range(1, 10):
3090 # os.unlink(dead_shares[i][2])
3092 #c_shares = self.find_shares(self.uris["corrupt"])
3093 #cso = CorruptShareOptions()
3094 #cso.stdout = StringIO()
3095 #cso.parseOptions([c_shares[0][2]])
3097 d.addCallback(_clobber_shares)
3100 # root/good CHK, 10 shares
3102 # root/sick CHK, 9 shares
3104 d.addCallback(self.CHECK, "root", "t=stream-deep-check&repair=true")
3106 units = [simplejson.loads(line)
3107 for line in res.splitlines()
3109 self.failUnlessEqual(len(units), 4+1)
3110 # should be parent-first
3112 self.failUnlessEqual(u0["path"], [])
3113 self.failUnlessEqual(u0["type"], "directory")
3114 self.failUnlessEqual(u0["cap"], self.rootnode.get_uri())
3115 u0crr = u0["check-and-repair-results"]
3116 self.failUnlessEqual(u0crr["repair-attempted"], False)
3117 self.failUnlessEqual(u0crr["pre-repair-results"]["results"]["count-shares-good"], 10)
3119 ugood = [u for u in units
3120 if u["type"] == "file" and u["path"] == [u"good"]][0]
3121 self.failUnlessEqual(ugood["cap"], self.uris["good"])
3122 ugoodcrr = ugood["check-and-repair-results"]
3123 self.failUnlessEqual(u0crr["repair-attempted"], False)
3124 self.failUnlessEqual(u0crr["pre-repair-results"]["results"]["count-shares-good"], 10)
3126 usick = [u for u in units
3127 if u["type"] == "file" and u["path"] == [u"sick"]][0]
3128 self.failUnlessEqual(usick["cap"], self.uris["sick"])
3129 usickcrr = usick["check-and-repair-results"]
3130 self.failUnlessEqual(usickcrr["repair-attempted"], True)
3131 self.failUnlessEqual(usickcrr["repair-successful"], True)
3132 self.failUnlessEqual(usickcrr["pre-repair-results"]["results"]["count-shares-good"], 9)
3133 self.failUnlessEqual(usickcrr["post-repair-results"]["results"]["count-shares-good"], 10)
3136 self.failUnlessEqual(stats["type"], "stats")
3138 self.failUnlessEqual(s["count-immutable-files"], 2)
3139 self.failUnlessEqual(s["count-literal-files"], 1)
3140 self.failUnlessEqual(s["count-directories"], 1)
3141 d.addCallback(_done)
3143 d.addErrback(self.explain_web_error)
3146 def _count_leases(self, ignored, which):
3147 u = self.uris[which]
3148 shares = self.find_shares(u)
3150 for shnum, serverid, fn in shares:
3151 sf = get_share_file(fn)
3152 num_leases = len(list(sf.get_leases()))
3153 lease_counts.append( (fn, num_leases) )
3156 def _assert_leasecount(self, lease_counts, expected):
3157 for (fn, num_leases) in lease_counts:
3158 if num_leases != expected:
3159 self.fail("expected %d leases, have %d, on %s" %
3160 (expected, num_leases, fn))
3162 def test_add_lease(self):
3163 self.basedir = "web/Grid/add_lease"
3164 self.set_up_grid(num_clients=2)
3165 c0 = self.g.clients[0]
3168 d = c0.upload(upload.Data(DATA, convergence=""))
3169 def _stash_uri(ur, which):
3170 self.uris[which] = ur.uri
3171 d.addCallback(_stash_uri, "one")
3172 d.addCallback(lambda ign:
3173 c0.upload(upload.Data(DATA+"1", convergence="")))
3174 d.addCallback(_stash_uri, "two")
3175 def _stash_mutable_uri(n, which):
3176 self.uris[which] = n.get_uri()
3177 assert isinstance(self.uris[which], str)
3178 d.addCallback(lambda ign: c0.create_mutable_file(DATA+"2"))
3179 d.addCallback(_stash_mutable_uri, "mutable")
3181 def _compute_fileurls(ignored):
3183 for which in self.uris:
3184 self.fileurls[which] = "uri/" + urllib.quote(self.uris[which])
3185 d.addCallback(_compute_fileurls)
3187 d.addCallback(self._count_leases, "one")
3188 d.addCallback(self._assert_leasecount, 1)
3189 d.addCallback(self._count_leases, "two")
3190 d.addCallback(self._assert_leasecount, 1)
3191 d.addCallback(self._count_leases, "mutable")
3192 d.addCallback(self._assert_leasecount, 1)
3194 d.addCallback(self.CHECK, "one", "t=check") # no add-lease
3195 def _got_html_good(res):
3196 self.failUnless("Healthy" in res, res)
3197 self.failIf("Not Healthy" in res, res)
3198 d.addCallback(_got_html_good)
3200 d.addCallback(self._count_leases, "one")
3201 d.addCallback(self._assert_leasecount, 1)
3202 d.addCallback(self._count_leases, "two")
3203 d.addCallback(self._assert_leasecount, 1)
3204 d.addCallback(self._count_leases, "mutable")
3205 d.addCallback(self._assert_leasecount, 1)
3207 # this CHECK uses the original client, which uses the same
3208 # lease-secrets, so it will just renew the original lease
3209 d.addCallback(self.CHECK, "one", "t=check&add-lease=true")
3210 d.addCallback(_got_html_good)
3212 d.addCallback(self._count_leases, "one")
3213 d.addCallback(self._assert_leasecount, 1)
3214 d.addCallback(self._count_leases, "two")
3215 d.addCallback(self._assert_leasecount, 1)
3216 d.addCallback(self._count_leases, "mutable")
3217 d.addCallback(self._assert_leasecount, 1)
3219 # this CHECK uses an alternate client, which adds a second lease
3220 d.addCallback(self.CHECK, "one", "t=check&add-lease=true", clientnum=1)
3221 d.addCallback(_got_html_good)
3223 d.addCallback(self._count_leases, "one")
3224 d.addCallback(self._assert_leasecount, 2)
3225 d.addCallback(self._count_leases, "two")
3226 d.addCallback(self._assert_leasecount, 1)
3227 d.addCallback(self._count_leases, "mutable")
3228 d.addCallback(self._assert_leasecount, 1)
3230 d.addCallback(self.CHECK, "mutable", "t=check&add-lease=true")
3231 d.addCallback(_got_html_good)
3233 d.addCallback(self._count_leases, "one")
3234 d.addCallback(self._assert_leasecount, 2)
3235 d.addCallback(self._count_leases, "two")
3236 d.addCallback(self._assert_leasecount, 1)
3237 d.addCallback(self._count_leases, "mutable")
3238 d.addCallback(self._assert_leasecount, 1)
3240 d.addCallback(self.CHECK, "mutable", "t=check&add-lease=true",
3242 d.addCallback(_got_html_good)
3244 d.addCallback(self._count_leases, "one")
3245 d.addCallback(self._assert_leasecount, 2)
3246 d.addCallback(self._count_leases, "two")
3247 d.addCallback(self._assert_leasecount, 1)
3248 d.addCallback(self._count_leases, "mutable")
3249 d.addCallback(self._assert_leasecount, 2)
3251 d.addErrback(self.explain_web_error)
3254 def test_deep_add_lease(self):
3255 self.basedir = "web/Grid/deep_add_lease"
3256 self.set_up_grid(num_clients=2)
3257 c0 = self.g.clients[0]
3261 d = c0.create_dirnode()
3262 def _stash_root_and_create_file(n):
3264 self.uris["root"] = n.get_uri()
3265 self.fileurls["root"] = "uri/" + urllib.quote(n.get_uri()) + "/"
3266 return n.add_file(u"one", upload.Data(DATA, convergence=""))
3267 d.addCallback(_stash_root_and_create_file)
3268 def _stash_uri(fn, which):
3269 self.uris[which] = fn.get_uri()
3270 d.addCallback(_stash_uri, "one")
3271 d.addCallback(lambda ign:
3272 self.rootnode.add_file(u"small",
3273 upload.Data("literal",
3275 d.addCallback(_stash_uri, "small")
3277 d.addCallback(lambda ign: c0.create_mutable_file("mutable"))
3278 d.addCallback(lambda fn: self.rootnode.set_node(u"mutable", fn))
3279 d.addCallback(_stash_uri, "mutable")
3281 d.addCallback(self.CHECK, "root", "t=stream-deep-check") # no add-lease
3283 units = [simplejson.loads(line)
3284 for line in res.splitlines()
3286 # root, one, small, mutable, stats
3287 self.failUnlessEqual(len(units), 4+1)
3288 d.addCallback(_done)
3290 d.addCallback(self._count_leases, "root")
3291 d.addCallback(self._assert_leasecount, 1)
3292 d.addCallback(self._count_leases, "one")
3293 d.addCallback(self._assert_leasecount, 1)
3294 d.addCallback(self._count_leases, "mutable")
3295 d.addCallback(self._assert_leasecount, 1)
3297 d.addCallback(self.CHECK, "root", "t=stream-deep-check&add-lease=true")
3298 d.addCallback(_done)
3300 d.addCallback(self._count_leases, "root")
3301 d.addCallback(self._assert_leasecount, 1)
3302 d.addCallback(self._count_leases, "one")
3303 d.addCallback(self._assert_leasecount, 1)
3304 d.addCallback(self._count_leases, "mutable")
3305 d.addCallback(self._assert_leasecount, 1)
3307 d.addCallback(self.CHECK, "root", "t=stream-deep-check&add-lease=true",
3309 d.addCallback(_done)
3311 d.addCallback(self._count_leases, "root")
3312 d.addCallback(self._assert_leasecount, 2)
3313 d.addCallback(self._count_leases, "one")
3314 d.addCallback(self._assert_leasecount, 2)
3315 d.addCallback(self._count_leases, "mutable")
3316 d.addCallback(self._assert_leasecount, 2)
3318 d.addErrback(self.explain_web_error)
3322 def test_exceptions(self):
3323 self.basedir = "web/Grid/exceptions"
3324 self.set_up_grid(num_clients=1, num_servers=2)
3325 c0 = self.g.clients[0]
3328 d = c0.create_dirnode()
3330 self.fileurls["root"] = "uri/" + urllib.quote(n.get_uri()) + "/"
3331 self.fileurls["imaginary"] = self.fileurls["root"] + "imaginary"
3333 d.addCallback(_stash_root)
3334 d.addCallback(lambda ign: c0.upload(upload.Data(DATA, convergence="")))
3336 self.fileurls["1share"] = "uri/" + urllib.quote(ur.uri)
3337 self.delete_shares_numbered(ur.uri, range(1,10))
3339 u = uri.from_string(ur.uri)
3340 u.key = testutil.flip_bit(u.key, 0)
3341 baduri = u.to_string()
3342 self.fileurls["0shares"] = "uri/" + urllib.quote(baduri)
3343 d.addCallback(_stash_bad)
3344 d.addCallback(lambda ign: c0.create_dirnode())
3345 def _mangle_dirnode_1share(n):
3347 url = self.fileurls["dir-1share"] = "uri/" + urllib.quote(u) + "/"
3348 self.fileurls["dir-1share-json"] = url + "?t=json"
3349 self.delete_shares_numbered(u, range(1,10))
3350 d.addCallback(_mangle_dirnode_1share)
3351 d.addCallback(lambda ign: c0.create_dirnode())
3352 def _mangle_dirnode_0share(n):
3354 url = self.fileurls["dir-0share"] = "uri/" + urllib.quote(u) + "/"
3355 self.fileurls["dir-0share-json"] = url + "?t=json"
3356 self.delete_shares_numbered(u, range(0,10))
3357 d.addCallback(_mangle_dirnode_0share)
3359 # NotEnoughSharesError should be reported sensibly, with a
3360 # text/plain explanation of the problem, and perhaps some
3361 # information on which shares *could* be found.
3363 d.addCallback(lambda ignored:
3364 self.shouldHTTPError("GET unrecoverable",
3365 410, "Gone", "NoSharesError",
3366 self.GET, self.fileurls["0shares"]))
3367 def _check_zero_shares(body):
3368 self.failIf("<html>" in body, body)
3369 body = " ".join(body.strip().split())
3370 exp = ("NoSharesError: no shares could be found. "
3371 "Zero shares usually indicates a corrupt URI, or that "
3372 "no servers were connected, but it might also indicate "
3373 "severe corruption. You should perform a filecheck on "
3374 "this object to learn more. The full error message is: "
3375 "Failed to get enough shareholders: have 0, need 3")
3376 self.failUnlessEqual(exp, body)
3377 d.addCallback(_check_zero_shares)
3380 d.addCallback(lambda ignored:
3381 self.shouldHTTPError("GET 1share",
3382 410, "Gone", "NotEnoughSharesError",
3383 self.GET, self.fileurls["1share"]))
3384 def _check_one_share(body):
3385 self.failIf("<html>" in body, body)
3386 body = " ".join(body.strip().split())
3387 exp = ("NotEnoughSharesError: This indicates that some "
3388 "servers were unavailable, or that shares have been "
3389 "lost to server departure, hard drive failure, or disk "
3390 "corruption. You should perform a filecheck on "
3391 "this object to learn more. The full error message is:"
3392 " Failed to get enough shareholders: have 1, need 3")
3393 self.failUnlessEqual(exp, body)
3394 d.addCallback(_check_one_share)
3396 d.addCallback(lambda ignored:
3397 self.shouldHTTPError("GET imaginary",
3398 404, "Not Found", None,
3399 self.GET, self.fileurls["imaginary"]))
3400 def _missing_child(body):
3401 self.failUnless("No such child: imaginary" in body, body)
3402 d.addCallback(_missing_child)
3404 d.addCallback(lambda ignored: self.GET(self.fileurls["dir-0share"]))
3405 def _check_0shares_dir_html(body):
3406 self.failUnless("<html>" in body, body)
3407 # we should see the regular page, but without the child table or
3409 body = " ".join(body.strip().split())
3410 self.failUnlessIn('href="?t=info">More info on this directory',
3412 exp = ("UnrecoverableFileError: the directory (or mutable file) "
3413 "could not be retrieved, because there were insufficient "
3414 "good shares. This might indicate that no servers were "
3415 "connected, insufficient servers were connected, the URI "
3416 "was corrupt, or that shares have been lost due to server "
3417 "departure, hard drive failure, or disk corruption. You "
3418 "should perform a filecheck on this object to learn more.")
3419 self.failUnlessIn(exp, body)
3420 self.failUnlessIn("No upload forms: directory is unreadable", body)
3421 d.addCallback(_check_0shares_dir_html)
3423 d.addCallback(lambda ignored: self.GET(self.fileurls["dir-1share"]))
3424 def _check_1shares_dir_html(body):
3425 # at some point, we'll split UnrecoverableFileError into 0-shares
3426 # and some-shares like we did for immutable files (since there
3427 # are different sorts of advice to offer in each case). For now,
3428 # they present the same way.
3429 self.failUnless("<html>" in body, body)
3430 body = " ".join(body.strip().split())
3431 self.failUnlessIn('href="?t=info">More info on this directory',
3433 exp = ("UnrecoverableFileError: the directory (or mutable file) "
3434 "could not be retrieved, because there were insufficient "
3435 "good shares. This might indicate that no servers were "
3436 "connected, insufficient servers were connected, the URI "
3437 "was corrupt, or that shares have been lost due to server "
3438 "departure, hard drive failure, or disk corruption. You "
3439 "should perform a filecheck on this object to learn more.")
3440 self.failUnlessIn(exp, body)
3441 self.failUnlessIn("No upload forms: directory is unreadable", body)
3442 d.addCallback(_check_1shares_dir_html)
3444 d.addCallback(lambda ignored:
3445 self.shouldHTTPError("GET dir-0share-json",
3446 410, "Gone", "UnrecoverableFileError",
3448 self.fileurls["dir-0share-json"]))
3449 def _check_unrecoverable_file(body):
3450 self.failIf("<html>" in body, body)
3451 body = " ".join(body.strip().split())
3452 exp = ("UnrecoverableFileError: the directory (or mutable file) "
3453 "could not be retrieved, because there were insufficient "
3454 "good shares. This might indicate that no servers were "
3455 "connected, insufficient servers were connected, the URI "
3456 "was corrupt, or that shares have been lost due to server "
3457 "departure, hard drive failure, or disk corruption. You "
3458 "should perform a filecheck on this object to learn more.")
3459 self.failUnlessEqual(exp, body)
3460 d.addCallback(_check_unrecoverable_file)
3462 d.addCallback(lambda ignored:
3463 self.shouldHTTPError("GET dir-1share-json",
3464 410, "Gone", "UnrecoverableFileError",
3466 self.fileurls["dir-1share-json"]))
3467 d.addCallback(_check_unrecoverable_file)
3469 d.addCallback(lambda ignored:
3470 self.shouldHTTPError("GET imaginary",
3471 404, "Not Found", None,
3472 self.GET, self.fileurls["imaginary"]))
3474 # attach a webapi child that throws a random error, to test how it
3476 w = c0.getServiceNamed("webish")
3477 w.root.putChild("ERRORBOOM", ErrorBoom())
3479 d.addCallback(lambda ignored:
3480 self.shouldHTTPError("GET errorboom_html",
3481 500, "Internal Server Error", None,
3482 self.GET, "ERRORBOOM"))
3483 def _internal_error_html(body):
3484 # test that a weird exception during a webapi operation with
3485 # Accept:*/* results in a text/html stack trace, while one
3486 # without that Accept: line gets us a text/plain stack trace
3487 self.failUnless("<html>" in body, "expected HTML, not '%s'" % body)
3488 d.addCallback(_internal_error_html)
3490 d.addCallback(lambda ignored:
3491 self.shouldHTTPError("GET errorboom_text",
3492 500, "Internal Server Error", None,
3493 self.GET, "ERRORBOOM",
3494 headers={"accept": ["text/plain"]}))
3495 def _internal_error_text(body):
3496 # test that a weird exception during a webapi operation with
3497 # Accept:*/* results in a text/html stack trace, while one
3498 # without that Accept: line gets us a text/plain stack trace
3499 self.failIf("<html>" in body, body)
3500 self.failUnless(body.startswith("Traceback "), body)
3501 d.addCallback(_internal_error_text)
3503 def _flush_errors(res):
3504 # Trial: please ignore the CompletelyUnhandledError in the logs
3505 self.flushLoggedErrors(CompletelyUnhandledError)
3507 d.addBoth(_flush_errors)
3511 class CompletelyUnhandledError(Exception):
3513 class ErrorBoom(rend.Page):
3514 def beforeRender(self, ctx):
3515 raise CompletelyUnhandledError("whoops")