1 import os.path, re, urllib
3 from StringIO import StringIO
4 from twisted.application import service
5 from twisted.trial import unittest
6 from twisted.internet import defer, reactor
7 from twisted.web import client, error, http
8 from twisted.python import failure, log
10 from allmydata import interfaces, uri, webish
11 from allmydata.storage.shares import get_share_file
12 from allmydata.storage_client import StorageFarmBroker
13 from allmydata.immutable import upload, download
14 from allmydata.nodemaker import NodeMaker
15 from allmydata.unknown import UnknownNode
16 from allmydata.web import status, common
17 from allmydata.scripts.debug import CorruptShareOptions, corrupt_share
18 from allmydata.util import fileutil, base32
19 from allmydata.test.common import FakeCHKFileNode, FakeMutableFileNode, \
20 create_chk_filenode, WebErrorMixin, ShouldFailMixin
21 from allmydata.interfaces import IMutableFileNode
22 from allmydata.mutable import servermap, publish, retrieve
23 import common_util as testutil
24 from allmydata.test.no_network import GridTestMixin
25 from allmydata.test.common_web import HTTPClientGETFactory, \
28 # create a fake uploader/downloader, and a couple of fake dirnodes, then
29 # create a webserver that works against them
31 timeout = 480 # Most of these take longer than 240 seconds on Francois's arm box.
33 class FakeStatsProvider:
35 stats = {'stats': {}, 'counters': {}}
38 class FakeNodeMaker(NodeMaker):
39 def _create_lit(self, cap):
40 return FakeCHKFileNode(cap)
41 def _create_immutable(self, cap):
42 return FakeCHKFileNode(cap)
43 def _create_mutable(self, cap):
44 return FakeMutableFileNode(None, None, None, None).init_from_uri(cap)
45 def create_mutable_file(self, contents="", keysize=None):
46 n = FakeMutableFileNode(None, None, None, None)
47 return n.create(contents)
50 def upload(self, uploadable):
51 d = uploadable.get_size()
52 d.addCallback(lambda size: uploadable.read(size))
55 n = create_chk_filenode(data)
56 results = upload.UploadResults()
57 results.uri = n.get_uri()
59 d.addCallback(_got_data)
63 _all_upload_status = [upload.UploadStatus()]
64 _all_download_status = [download.DownloadStatus()]
65 _all_mapupdate_statuses = [servermap.UpdateStatus()]
66 _all_publish_statuses = [publish.PublishStatus()]
67 _all_retrieve_statuses = [retrieve.RetrieveStatus()]
69 def list_all_upload_statuses(self):
70 return self._all_upload_status
71 def list_all_download_statuses(self):
72 return self._all_download_status
73 def list_all_mapupdate_statuses(self):
74 return self._all_mapupdate_statuses
75 def list_all_publish_statuses(self):
76 return self._all_publish_statuses
77 def list_all_retrieve_statuses(self):
78 return self._all_retrieve_statuses
79 def list_all_helper_statuses(self):
82 class FakeClient(service.MultiService):
84 service.MultiService.__init__(self)
85 self.uploader = FakeUploader()
86 self.nodemaker = FakeNodeMaker(None, None, None,
87 self.uploader, None, None,
90 nodeid = "fake_nodeid"
91 nickname = "fake_nickname"
92 basedir = "fake_basedir"
93 def get_versions(self):
94 return {'allmydata': "fake",
99 introducer_furl = "None"
101 convergence = "some random string"
102 stats_provider = FakeStatsProvider()
104 def connected_to_introducer(self):
107 storage_broker = StorageFarmBroker(None, permute_peers=True)
108 def get_storage_broker(self):
109 return self.storage_broker
110 _secret_holder = None
111 def get_encoding_parameters(self):
112 return {"k": 3, "n": 10}
113 def get_history(self):
116 def create_node_from_uri(self, writecap, readcap=None):
117 return self.nodemaker.create_from_cap(writecap, readcap)
119 def create_empty_dirnode(self):
120 return self.nodemaker.create_new_mutable_directory()
122 MUTABLE_SIZELIMIT = FakeMutableFileNode.MUTABLE_SIZELIMIT
123 def create_mutable_file(self, contents=""):
124 return self.nodemaker.create_mutable_file(contents)
126 def upload(self, uploadable):
127 return self.uploader.upload(uploadable)
129 class WebMixin(object):
131 self.s = FakeClient()
132 self.s.startService()
133 self.staticdir = self.mktemp()
134 self.ws = s = webish.WebishServer(self.s, "0", staticdir=self.staticdir)
135 s.setServiceParent(self.s)
136 self.webish_port = port = s.listener._port.getHost().port
137 self.webish_url = "http://localhost:%d" % port
139 l = [ self.s.create_empty_dirnode() for x in range(6) ]
140 d = defer.DeferredList(l)
142 self.public_root = res[0][1]
143 assert interfaces.IDirectoryNode.providedBy(self.public_root), res
144 self.public_url = "/uri/" + self.public_root.get_uri()
145 self.private_root = res[1][1]
149 self._foo_uri = foo.get_uri()
150 self._foo_readonly_uri = foo.get_readonly_uri()
151 self._foo_verifycap = foo.get_verify_cap().to_string()
152 # NOTE: we ignore the deferred on all set_uri() calls, because we
153 # know the fake nodes do these synchronously
154 self.public_root.set_uri(u"foo", foo.get_uri())
156 self.BAR_CONTENTS, n, self._bar_txt_uri = self.makefile(0)
157 foo.set_uri(u"bar.txt", self._bar_txt_uri)
158 self._bar_txt_verifycap = n.get_verify_cap().to_string()
160 foo.set_uri(u"empty", res[3][1].get_uri())
161 sub_uri = res[4][1].get_uri()
162 self._sub_uri = sub_uri
163 foo.set_uri(u"sub", sub_uri)
164 sub = self.s.create_node_from_uri(sub_uri)
166 _ign, n, blocking_uri = self.makefile(1)
167 foo.set_uri(u"blockingfile", blocking_uri)
169 unicode_filename = u"n\u00fc.txt" # n u-umlaut . t x t
170 # ok, unicode calls it LATIN SMALL LETTER U WITH DIAERESIS but I
171 # still think of it as an umlaut
172 foo.set_uri(unicode_filename, self._bar_txt_uri)
174 _ign, n, baz_file = self.makefile(2)
175 self._baz_file_uri = baz_file
176 sub.set_uri(u"baz.txt", baz_file)
178 _ign, n, self._bad_file_uri = self.makefile(3)
179 # this uri should not be downloadable
180 del FakeCHKFileNode.all_contents[self._bad_file_uri]
183 self.public_root.set_uri(u"reedownlee", rodir.get_readonly_uri())
184 rodir.set_uri(u"nor", baz_file)
189 # public/foo/blockingfile
192 # public/foo/sub/baz.txt
194 # public/reedownlee/nor
195 self.NEWFILE_CONTENTS = "newfile contents\n"
197 return foo.get_metadata_for(u"bar.txt")
199 def _got_metadata(metadata):
200 self._bar_txt_metadata = metadata
201 d.addCallback(_got_metadata)
204 def makefile(self, number):
205 contents = "contents of file %s\n" % number
206 n = create_chk_filenode(contents)
207 return contents, n, n.get_uri()
210 return self.s.stopService()
212 def failUnlessIsBarDotTxt(self, res):
213 self.failUnlessEqual(res, self.BAR_CONTENTS, res)
215 def failUnlessIsBarJSON(self, res):
216 data = simplejson.loads(res)
217 self.failUnless(isinstance(data, list))
218 self.failUnlessEqual(data[0], u"filenode")
219 self.failUnless(isinstance(data[1], dict))
220 self.failIf(data[1]["mutable"])
221 self.failIf("rw_uri" in data[1]) # immutable
222 self.failUnlessEqual(data[1]["ro_uri"], self._bar_txt_uri)
223 self.failUnlessEqual(data[1]["verify_uri"], self._bar_txt_verifycap)
224 self.failUnlessEqual(data[1]["size"], len(self.BAR_CONTENTS))
226 def failUnlessIsFooJSON(self, res):
227 data = simplejson.loads(res)
228 self.failUnless(isinstance(data, list))
229 self.failUnlessEqual(data[0], "dirnode", res)
230 self.failUnless(isinstance(data[1], dict))
231 self.failUnless(data[1]["mutable"])
232 self.failUnless("rw_uri" in data[1]) # mutable
233 self.failUnlessEqual(data[1]["rw_uri"], self._foo_uri)
234 self.failUnlessEqual(data[1]["ro_uri"], self._foo_readonly_uri)
235 self.failUnlessEqual(data[1]["verify_uri"], self._foo_verifycap)
237 kidnames = sorted([unicode(n) for n in data[1]["children"]])
238 self.failUnlessEqual(kidnames,
239 [u"bar.txt", u"blockingfile", u"empty",
240 u"n\u00fc.txt", u"sub"])
241 kids = dict( [(unicode(name),value)
243 in data[1]["children"].iteritems()] )
244 self.failUnlessEqual(kids[u"sub"][0], "dirnode")
245 self.failUnless("metadata" in kids[u"sub"][1])
246 self.failUnless("ctime" in kids[u"sub"][1]["metadata"])
247 self.failUnless("mtime" in kids[u"sub"][1]["metadata"])
248 self.failUnlessEqual(kids[u"bar.txt"][0], "filenode")
249 self.failUnlessEqual(kids[u"bar.txt"][1]["size"], len(self.BAR_CONTENTS))
250 self.failUnlessEqual(kids[u"bar.txt"][1]["ro_uri"], self._bar_txt_uri)
251 self.failUnlessEqual(kids[u"bar.txt"][1]["verify_uri"],
252 self._bar_txt_verifycap)
253 self.failUnlessEqual(kids[u"bar.txt"][1]["metadata"]["ctime"],
254 self._bar_txt_metadata["ctime"])
255 self.failUnlessEqual(kids[u"n\u00fc.txt"][1]["ro_uri"],
258 def GET(self, urlpath, followRedirect=False, return_response=False,
260 # if return_response=True, this fires with (data, statuscode,
261 # respheaders) instead of just data.
262 assert not isinstance(urlpath, unicode)
263 url = self.webish_url + urlpath
264 factory = HTTPClientGETFactory(url, method="GET",
265 followRedirect=followRedirect, **kwargs)
266 reactor.connectTCP("localhost", self.webish_port, factory)
269 return (data, factory.status, factory.response_headers)
271 d.addCallback(_got_data)
272 return factory.deferred
274 def HEAD(self, urlpath, return_response=False, **kwargs):
275 # this requires some surgery, because twisted.web.client doesn't want
276 # to give us back the response headers.
277 factory = HTTPClientHEADFactory(urlpath, method="HEAD", **kwargs)
278 reactor.connectTCP("localhost", self.webish_port, factory)
281 return (data, factory.status, factory.response_headers)
283 d.addCallback(_got_data)
284 return factory.deferred
286 def PUT(self, urlpath, data, **kwargs):
287 url = self.webish_url + urlpath
288 return client.getPage(url, method="PUT", postdata=data, **kwargs)
290 def DELETE(self, urlpath):
291 url = self.webish_url + urlpath
292 return client.getPage(url, method="DELETE")
294 def POST(self, urlpath, followRedirect=False, **fields):
295 url = self.webish_url + urlpath
296 sepbase = "boogabooga"
300 form.append('Content-Disposition: form-data; name="_charset"')
304 for name, value in fields.iteritems():
305 if isinstance(value, tuple):
306 filename, value = value
307 form.append('Content-Disposition: form-data; name="%s"; '
308 'filename="%s"' % (name, filename.encode("utf-8")))
310 form.append('Content-Disposition: form-data; name="%s"' % name)
312 if isinstance(value, unicode):
313 value = value.encode("utf-8")
316 assert isinstance(value, str)
320 body = "\r\n".join(form) + "\r\n"
321 headers = {"content-type": "multipart/form-data; boundary=%s" % sepbase,
323 return client.getPage(url, method="POST", postdata=body,
324 headers=headers, followRedirect=followRedirect)
326 def shouldFail(self, res, expected_failure, which,
327 substring=None, response_substring=None):
328 if isinstance(res, failure.Failure):
329 res.trap(expected_failure)
331 self.failUnless(substring in str(res),
332 "substring '%s' not in '%s'"
333 % (substring, str(res)))
334 if response_substring:
335 self.failUnless(response_substring in res.value.response,
336 "response substring '%s' not in '%s'"
337 % (response_substring, res.value.response))
339 self.fail("%s was supposed to raise %s, not get '%s'" %
340 (which, expected_failure, res))
342 def shouldFail2(self, expected_failure, which, substring,
344 callable, *args, **kwargs):
345 assert substring is None or isinstance(substring, str)
346 assert response_substring is None or isinstance(response_substring, str)
347 d = defer.maybeDeferred(callable, *args, **kwargs)
349 if isinstance(res, failure.Failure):
350 res.trap(expected_failure)
352 self.failUnless(substring in str(res),
353 "%s: substring '%s' not in '%s'"
354 % (which, substring, str(res)))
355 if response_substring:
356 self.failUnless(response_substring in res.value.response,
357 "%s: response substring '%s' not in '%s'"
359 response_substring, res.value.response))
361 self.fail("%s was supposed to raise %s, not get '%s'" %
362 (which, expected_failure, res))
366 def should404(self, res, which):
367 if isinstance(res, failure.Failure):
368 res.trap(error.Error)
369 self.failUnlessEqual(res.value.status, "404")
371 self.fail("%s was supposed to Error(404), not get '%s'" %
375 class Web(WebMixin, WebErrorMixin, testutil.StallMixin, unittest.TestCase):
376 def test_create(self):
379 def test_welcome(self):
382 self.failUnless('Welcome To TahoeLAFS' in res, res)
384 self.s.basedir = 'web/test_welcome'
385 fileutil.make_dirs("web/test_welcome")
386 fileutil.make_dirs("web/test_welcome/private")
388 d.addCallback(_check)
391 def test_provisioning(self):
392 d = self.GET("/provisioning/")
394 self.failUnless('Tahoe Provisioning Tool' in res)
395 fields = {'filled': True,
396 "num_users": int(50e3),
397 "files_per_user": 1000,
398 "space_per_user": int(1e9),
399 "sharing_ratio": 1.0,
400 "encoding_parameters": "3-of-10-5",
402 "ownership_mode": "A",
403 "download_rate": 100,
408 return self.POST("/provisioning/", **fields)
410 d.addCallback(_check)
412 self.failUnless('Tahoe Provisioning Tool' in res)
413 self.failUnless("Share space consumed: 167.01TB" in res)
415 fields = {'filled': True,
416 "num_users": int(50e6),
417 "files_per_user": 1000,
418 "space_per_user": int(5e9),
419 "sharing_ratio": 1.0,
420 "encoding_parameters": "25-of-100-50",
421 "num_servers": 30000,
422 "ownership_mode": "E",
423 "drive_failure_model": "U",
425 "download_rate": 1000,
430 return self.POST("/provisioning/", **fields)
431 d.addCallback(_check2)
433 self.failUnless("Share space consumed: huge!" in res)
434 fields = {'filled': True}
435 return self.POST("/provisioning/", **fields)
436 d.addCallback(_check3)
438 self.failUnless("Share space consumed:" in res)
439 d.addCallback(_check4)
442 def test_reliability_tool(self):
444 from allmydata import reliability
445 _hush_pyflakes = reliability
447 raise unittest.SkipTest("reliability tool requires NumPy")
449 d = self.GET("/reliability/")
451 self.failUnless('Tahoe Reliability Tool' in res)
452 fields = {'drive_lifetime': "8Y",
457 "check_period": "1M",
458 "report_period": "3M",
461 return self.POST("/reliability/", **fields)
463 d.addCallback(_check)
465 self.failUnless('Tahoe Reliability Tool' in res)
466 r = r'Probability of loss \(no maintenance\):\s+<span>0.033591'
467 self.failUnless(re.search(r, res), res)
468 d.addCallback(_check2)
471 def test_status(self):
472 h = self.s.get_history()
473 dl_num = h.list_all_download_statuses()[0].get_counter()
474 ul_num = h.list_all_upload_statuses()[0].get_counter()
475 mu_num = h.list_all_mapupdate_statuses()[0].get_counter()
476 pub_num = h.list_all_publish_statuses()[0].get_counter()
477 ret_num = h.list_all_retrieve_statuses()[0].get_counter()
478 d = self.GET("/status", followRedirect=True)
480 self.failUnless('Upload and Download Status' in res, res)
481 self.failUnless('"down-%d"' % dl_num in res, res)
482 self.failUnless('"up-%d"' % ul_num in res, res)
483 self.failUnless('"mapupdate-%d"' % mu_num in res, res)
484 self.failUnless('"publish-%d"' % pub_num in res, res)
485 self.failUnless('"retrieve-%d"' % ret_num in res, res)
486 d.addCallback(_check)
487 d.addCallback(lambda res: self.GET("/status/?t=json"))
488 def _check_json(res):
489 data = simplejson.loads(res)
490 self.failUnless(isinstance(data, dict))
491 active = data["active"]
492 # TODO: test more. We need a way to fake an active operation
494 d.addCallback(_check_json)
496 d.addCallback(lambda res: self.GET("/status/down-%d" % dl_num))
498 self.failUnless("File Download Status" in res, res)
499 d.addCallback(_check_dl)
500 d.addCallback(lambda res: self.GET("/status/up-%d" % ul_num))
502 self.failUnless("File Upload Status" in res, res)
503 d.addCallback(_check_ul)
504 d.addCallback(lambda res: self.GET("/status/mapupdate-%d" % mu_num))
505 def _check_mapupdate(res):
506 self.failUnless("Mutable File Servermap Update Status" in res, res)
507 d.addCallback(_check_mapupdate)
508 d.addCallback(lambda res: self.GET("/status/publish-%d" % pub_num))
509 def _check_publish(res):
510 self.failUnless("Mutable File Publish Status" in res, res)
511 d.addCallback(_check_publish)
512 d.addCallback(lambda res: self.GET("/status/retrieve-%d" % ret_num))
513 def _check_retrieve(res):
514 self.failUnless("Mutable File Retrieve Status" in res, res)
515 d.addCallback(_check_retrieve)
519 def test_status_numbers(self):
520 drrm = status.DownloadResultsRendererMixin()
521 self.failUnlessEqual(drrm.render_time(None, None), "")
522 self.failUnlessEqual(drrm.render_time(None, 2.5), "2.50s")
523 self.failUnlessEqual(drrm.render_time(None, 0.25), "250ms")
524 self.failUnlessEqual(drrm.render_time(None, 0.0021), "2.1ms")
525 self.failUnlessEqual(drrm.render_time(None, 0.000123), "123us")
526 self.failUnlessEqual(drrm.render_rate(None, None), "")
527 self.failUnlessEqual(drrm.render_rate(None, 2500000), "2.50MBps")
528 self.failUnlessEqual(drrm.render_rate(None, 30100), "30.1kBps")
529 self.failUnlessEqual(drrm.render_rate(None, 123), "123Bps")
531 urrm = status.UploadResultsRendererMixin()
532 self.failUnlessEqual(urrm.render_time(None, None), "")
533 self.failUnlessEqual(urrm.render_time(None, 2.5), "2.50s")
534 self.failUnlessEqual(urrm.render_time(None, 0.25), "250ms")
535 self.failUnlessEqual(urrm.render_time(None, 0.0021), "2.1ms")
536 self.failUnlessEqual(urrm.render_time(None, 0.000123), "123us")
537 self.failUnlessEqual(urrm.render_rate(None, None), "")
538 self.failUnlessEqual(urrm.render_rate(None, 2500000), "2.50MBps")
539 self.failUnlessEqual(urrm.render_rate(None, 30100), "30.1kBps")
540 self.failUnlessEqual(urrm.render_rate(None, 123), "123Bps")
542 def test_GET_FILEURL(self):
543 d = self.GET(self.public_url + "/foo/bar.txt")
544 d.addCallback(self.failUnlessIsBarDotTxt)
547 def test_GET_FILEURL_range(self):
548 headers = {"range": "bytes=1-10"}
549 d = self.GET(self.public_url + "/foo/bar.txt", headers=headers,
550 return_response=True)
551 def _got((res, status, headers)):
552 self.failUnlessEqual(int(status), 206)
553 self.failUnless(headers.has_key("content-range"))
554 self.failUnlessEqual(headers["content-range"][0],
555 "bytes 1-10/%d" % len(self.BAR_CONTENTS))
556 self.failUnlessEqual(res, self.BAR_CONTENTS[1:11])
560 def test_GET_FILEURL_partial_range(self):
561 headers = {"range": "bytes=5-"}
562 length = len(self.BAR_CONTENTS)
563 d = self.GET(self.public_url + "/foo/bar.txt", headers=headers,
564 return_response=True)
565 def _got((res, status, headers)):
566 self.failUnlessEqual(int(status), 206)
567 self.failUnless(headers.has_key("content-range"))
568 self.failUnlessEqual(headers["content-range"][0],
569 "bytes 5-%d/%d" % (length-1, length))
570 self.failUnlessEqual(res, self.BAR_CONTENTS[5:])
574 def test_HEAD_FILEURL_range(self):
575 headers = {"range": "bytes=1-10"}
576 d = self.HEAD(self.public_url + "/foo/bar.txt", headers=headers,
577 return_response=True)
578 def _got((res, status, headers)):
579 self.failUnlessEqual(res, "")
580 self.failUnlessEqual(int(status), 206)
581 self.failUnless(headers.has_key("content-range"))
582 self.failUnlessEqual(headers["content-range"][0],
583 "bytes 1-10/%d" % len(self.BAR_CONTENTS))
587 def test_HEAD_FILEURL_partial_range(self):
588 headers = {"range": "bytes=5-"}
589 length = len(self.BAR_CONTENTS)
590 d = self.HEAD(self.public_url + "/foo/bar.txt", headers=headers,
591 return_response=True)
592 def _got((res, status, headers)):
593 self.failUnlessEqual(int(status), 206)
594 self.failUnless(headers.has_key("content-range"))
595 self.failUnlessEqual(headers["content-range"][0],
596 "bytes 5-%d/%d" % (length-1, length))
600 def test_GET_FILEURL_range_bad(self):
601 headers = {"range": "BOGUS=fizbop-quarnak"}
602 d = self.shouldFail2(error.Error, "test_GET_FILEURL_range_bad",
604 "Syntactically invalid http range header",
605 self.GET, self.public_url + "/foo/bar.txt",
609 def test_HEAD_FILEURL(self):
610 d = self.HEAD(self.public_url + "/foo/bar.txt", return_response=True)
611 def _got((res, status, headers)):
612 self.failUnlessEqual(res, "")
613 self.failUnlessEqual(headers["content-length"][0],
614 str(len(self.BAR_CONTENTS)))
615 self.failUnlessEqual(headers["content-type"], ["text/plain"])
619 def test_GET_FILEURL_named(self):
620 base = "/file/%s" % urllib.quote(self._bar_txt_uri)
621 base2 = "/named/%s" % urllib.quote(self._bar_txt_uri)
622 d = self.GET(base + "/@@name=/blah.txt")
623 d.addCallback(self.failUnlessIsBarDotTxt)
624 d.addCallback(lambda res: self.GET(base + "/blah.txt"))
625 d.addCallback(self.failUnlessIsBarDotTxt)
626 d.addCallback(lambda res: self.GET(base + "/ignore/lots/blah.txt"))
627 d.addCallback(self.failUnlessIsBarDotTxt)
628 d.addCallback(lambda res: self.GET(base2 + "/@@name=/blah.txt"))
629 d.addCallback(self.failUnlessIsBarDotTxt)
630 save_url = base + "?save=true&filename=blah.txt"
631 d.addCallback(lambda res: self.GET(save_url))
632 d.addCallback(self.failUnlessIsBarDotTxt) # TODO: check headers
633 u_filename = u"n\u00e9wer.txt" # n e-acute w e r . t x t
634 u_fn_e = urllib.quote(u_filename.encode("utf-8"))
635 u_url = base + "?save=true&filename=" + u_fn_e
636 d.addCallback(lambda res: self.GET(u_url))
637 d.addCallback(self.failUnlessIsBarDotTxt) # TODO: check headers
640 def test_PUT_FILEURL_named_bad(self):
641 base = "/file/%s" % urllib.quote(self._bar_txt_uri)
642 d = self.shouldFail2(error.Error, "test_PUT_FILEURL_named_bad",
644 "/file can only be used with GET or HEAD",
645 self.PUT, base + "/@@name=/blah.txt", "")
648 def test_GET_DIRURL_named_bad(self):
649 base = "/file/%s" % urllib.quote(self._foo_uri)
650 d = self.shouldFail2(error.Error, "test_PUT_DIRURL_named_bad",
653 self.GET, base + "/@@name=/blah.txt")
656 def test_GET_slash_file_bad(self):
657 d = self.shouldFail2(error.Error, "test_GET_slash_file_bad",
659 "/file must be followed by a file-cap and a name",
663 def test_GET_unhandled_URI_named(self):
664 contents, n, newuri = self.makefile(12)
665 verifier_cap = n.get_verify_cap().to_string()
666 base = "/file/%s" % urllib.quote(verifier_cap)
667 # client.create_node_from_uri() can't handle verify-caps
668 d = self.shouldFail2(error.Error, "GET_unhandled_URI_named",
669 "400 Bad Request", "is not a file-cap",
673 def test_GET_unhandled_URI(self):
674 contents, n, newuri = self.makefile(12)
675 verifier_cap = n.get_verify_cap().to_string()
676 base = "/uri/%s" % urllib.quote(verifier_cap)
677 # client.create_node_from_uri() can't handle verify-caps
678 d = self.shouldFail2(error.Error, "test_GET_unhandled_URI",
680 "GET unknown URI type: can only do t=info",
684 def test_GET_FILE_URI(self):
685 base = "/uri/%s" % urllib.quote(self._bar_txt_uri)
687 d.addCallback(self.failUnlessIsBarDotTxt)
690 def test_GET_FILE_URI_badchild(self):
691 base = "/uri/%s/boguschild" % urllib.quote(self._bar_txt_uri)
692 errmsg = "Files have no children, certainly not named 'boguschild'"
693 d = self.shouldFail2(error.Error, "test_GET_FILE_URI_badchild",
694 "400 Bad Request", errmsg,
698 def test_PUT_FILE_URI_badchild(self):
699 base = "/uri/%s/boguschild" % urllib.quote(self._bar_txt_uri)
700 errmsg = "Cannot create directory 'boguschild', because its parent is a file, not a directory"
701 d = self.shouldFail2(error.Error, "test_GET_FILE_URI_badchild",
702 "400 Bad Request", errmsg,
706 def test_GET_FILEURL_save(self):
707 d = self.GET(self.public_url + "/foo/bar.txt?filename=bar.txt&save=true")
708 # TODO: look at the headers, expect a Content-Disposition: attachment
710 d.addCallback(self.failUnlessIsBarDotTxt)
713 def test_GET_FILEURL_missing(self):
714 d = self.GET(self.public_url + "/foo/missing")
715 d.addBoth(self.should404, "test_GET_FILEURL_missing")
718 def test_PUT_overwrite_only_files(self):
719 # create a directory, put a file in that directory.
720 contents, n, filecap = self.makefile(8)
721 d = self.PUT(self.public_url + "/foo/dir?t=mkdir", "")
722 d.addCallback(lambda res:
723 self.PUT(self.public_url + "/foo/dir/file1.txt",
724 self.NEWFILE_CONTENTS))
725 # try to overwrite the file with replace=only-files
727 d.addCallback(lambda res:
728 self.PUT(self.public_url + "/foo/dir/file1.txt?t=uri&replace=only-files",
730 d.addCallback(lambda res:
731 self.shouldFail2(error.Error, "PUT_bad_t", "409 Conflict",
732 "There was already a child by that name, and you asked me "
734 self.PUT, self.public_url + "/foo/dir?t=uri&replace=only-files",
738 def test_PUT_NEWFILEURL(self):
739 d = self.PUT(self.public_url + "/foo/new.txt", self.NEWFILE_CONTENTS)
740 # TODO: we lose the response code, so we can't check this
741 #self.failUnlessEqual(responsecode, 201)
742 d.addCallback(self.failUnlessURIMatchesChild, self._foo_node, u"new.txt")
743 d.addCallback(lambda res:
744 self.failUnlessChildContentsAre(self._foo_node, u"new.txt",
745 self.NEWFILE_CONTENTS))
748 def test_PUT_NEWFILEURL_not_mutable(self):
749 d = self.PUT(self.public_url + "/foo/new.txt?mutable=false",
750 self.NEWFILE_CONTENTS)
751 # TODO: we lose the response code, so we can't check this
752 #self.failUnlessEqual(responsecode, 201)
753 d.addCallback(self.failUnlessURIMatchesChild, self._foo_node, u"new.txt")
754 d.addCallback(lambda res:
755 self.failUnlessChildContentsAre(self._foo_node, u"new.txt",
756 self.NEWFILE_CONTENTS))
759 def test_PUT_NEWFILEURL_range_bad(self):
760 headers = {"content-range": "bytes 1-10/%d" % len(self.NEWFILE_CONTENTS)}
761 target = self.public_url + "/foo/new.txt"
762 d = self.shouldFail2(error.Error, "test_PUT_NEWFILEURL_range_bad",
763 "501 Not Implemented",
764 "Content-Range in PUT not yet supported",
765 # (and certainly not for immutable files)
766 self.PUT, target, self.NEWFILE_CONTENTS[1:11],
768 d.addCallback(lambda res:
769 self.failIfNodeHasChild(self._foo_node, u"new.txt"))
772 def test_PUT_NEWFILEURL_mutable(self):
773 d = self.PUT(self.public_url + "/foo/new.txt?mutable=true",
774 self.NEWFILE_CONTENTS)
775 # TODO: we lose the response code, so we can't check this
776 #self.failUnlessEqual(responsecode, 201)
778 u = uri.from_string_mutable_filenode(res)
779 self.failUnless(u.is_mutable())
780 self.failIf(u.is_readonly())
782 d.addCallback(_check_uri)
783 d.addCallback(self.failUnlessURIMatchesChild, self._foo_node, u"new.txt")
784 d.addCallback(lambda res:
785 self.failUnlessMutableChildContentsAre(self._foo_node,
787 self.NEWFILE_CONTENTS))
790 def test_PUT_NEWFILEURL_mutable_toobig(self):
791 d = self.shouldFail2(error.Error, "test_PUT_NEWFILEURL_mutable_toobig",
792 "413 Request Entity Too Large",
793 "SDMF is limited to one segment, and 10001 > 10000",
795 self.public_url + "/foo/new.txt?mutable=true",
796 "b" * (self.s.MUTABLE_SIZELIMIT+1))
799 def test_PUT_NEWFILEURL_replace(self):
800 d = self.PUT(self.public_url + "/foo/bar.txt", self.NEWFILE_CONTENTS)
801 # TODO: we lose the response code, so we can't check this
802 #self.failUnlessEqual(responsecode, 200)
803 d.addCallback(self.failUnlessURIMatchesChild, self._foo_node, u"bar.txt")
804 d.addCallback(lambda res:
805 self.failUnlessChildContentsAre(self._foo_node, u"bar.txt",
806 self.NEWFILE_CONTENTS))
809 def test_PUT_NEWFILEURL_bad_t(self):
810 d = self.shouldFail2(error.Error, "PUT_bad_t", "400 Bad Request",
811 "PUT to a file: bad t=bogus",
812 self.PUT, self.public_url + "/foo/bar.txt?t=bogus",
816 def test_PUT_NEWFILEURL_no_replace(self):
817 d = self.PUT(self.public_url + "/foo/bar.txt?replace=false",
818 self.NEWFILE_CONTENTS)
819 d.addBoth(self.shouldFail, error.Error, "PUT_NEWFILEURL_no_replace",
821 "There was already a child by that name, and you asked me "
825 def test_PUT_NEWFILEURL_mkdirs(self):
826 d = self.PUT(self.public_url + "/foo/newdir/new.txt", self.NEWFILE_CONTENTS)
828 d.addCallback(self.failUnlessURIMatchesChild, fn, u"newdir/new.txt")
829 d.addCallback(lambda res: self.failIfNodeHasChild(fn, u"new.txt"))
830 d.addCallback(lambda res: self.failUnlessNodeHasChild(fn, u"newdir"))
831 d.addCallback(lambda res:
832 self.failUnlessChildContentsAre(fn, u"newdir/new.txt",
833 self.NEWFILE_CONTENTS))
836 def test_PUT_NEWFILEURL_blocked(self):
837 d = self.PUT(self.public_url + "/foo/blockingfile/new.txt",
838 self.NEWFILE_CONTENTS)
839 d.addBoth(self.shouldFail, error.Error, "PUT_NEWFILEURL_blocked",
841 "Unable to create directory 'blockingfile': a file was in the way")
844 def test_DELETE_FILEURL(self):
845 d = self.DELETE(self.public_url + "/foo/bar.txt")
846 d.addCallback(lambda res:
847 self.failIfNodeHasChild(self._foo_node, u"bar.txt"))
850 def test_DELETE_FILEURL_missing(self):
851 d = self.DELETE(self.public_url + "/foo/missing")
852 d.addBoth(self.should404, "test_DELETE_FILEURL_missing")
855 def test_DELETE_FILEURL_missing2(self):
856 d = self.DELETE(self.public_url + "/missing/missing")
857 d.addBoth(self.should404, "test_DELETE_FILEURL_missing2")
860 def test_GET_FILEURL_json(self):
861 # twisted.web.http.parse_qs ignores any query args without an '=', so
862 # I can't do "GET /path?json", I have to do "GET /path/t=json"
863 # instead. This may make it tricky to emulate the S3 interface
865 d = self.GET(self.public_url + "/foo/bar.txt?t=json")
866 d.addCallback(self.failUnlessIsBarJSON)
869 def test_GET_FILEURL_json_missing(self):
870 d = self.GET(self.public_url + "/foo/missing?json")
871 d.addBoth(self.should404, "test_GET_FILEURL_json_missing")
874 def test_GET_FILEURL_uri(self):
875 d = self.GET(self.public_url + "/foo/bar.txt?t=uri")
877 self.failUnlessEqual(res, self._bar_txt_uri)
878 d.addCallback(_check)
879 d.addCallback(lambda res:
880 self.GET(self.public_url + "/foo/bar.txt?t=readonly-uri"))
882 # for now, for files, uris and readonly-uris are the same
883 self.failUnlessEqual(res, self._bar_txt_uri)
884 d.addCallback(_check2)
887 def test_GET_FILEURL_badtype(self):
888 d = self.shouldHTTPError("GET t=bogus", 400, "Bad Request",
891 self.public_url + "/foo/bar.txt?t=bogus")
894 def test_GET_FILEURL_uri_missing(self):
895 d = self.GET(self.public_url + "/foo/missing?t=uri")
896 d.addBoth(self.should404, "test_GET_FILEURL_uri_missing")
899 def test_GET_DIRURL(self):
900 # the addSlash means we get a redirect here
901 # from /uri/$URI/foo/ , we need ../../../ to get back to the root
903 d = self.GET(self.public_url + "/foo", followRedirect=True)
905 self.failUnless(('<a href="%s">Return to Welcome page' % ROOT)
907 # the FILE reference points to a URI, but it should end in bar.txt
908 bar_url = ("%s/file/%s/@@named=/bar.txt" %
909 (ROOT, urllib.quote(self._bar_txt_uri)))
910 get_bar = "".join([r'<td>FILE</td>',
912 r'<a href="%s">bar.txt</a>' % bar_url,
914 r'\s+<td>%d</td>' % len(self.BAR_CONTENTS),
916 self.failUnless(re.search(get_bar, res), res)
917 for line in res.split("\n"):
918 # find the line that contains the delete button for bar.txt
919 if ("form action" in line and
920 'value="delete"' in line and
921 'value="bar.txt"' in line):
922 # the form target should use a relative URL
923 foo_url = urllib.quote("%s/uri/%s/" % (ROOT, self._foo_uri))
924 self.failUnless(('action="%s"' % foo_url) in line, line)
925 # and the when_done= should too
926 #done_url = urllib.quote(???)
927 #self.failUnless(('name="when_done" value="%s"' % done_url)
931 self.fail("unable to find delete-bar.txt line", res)
933 # the DIR reference just points to a URI
934 sub_url = ("%s/uri/%s/" % (ROOT, urllib.quote(self._sub_uri)))
935 get_sub = ((r'<td>DIR</td>')
936 +r'\s+<td><a href="%s">sub</a></td>' % sub_url)
937 self.failUnless(re.search(get_sub, res), res)
938 d.addCallback(_check)
940 # look at a directory which is readonly
941 d.addCallback(lambda res:
942 self.GET(self.public_url + "/reedownlee", followRedirect=True))
944 self.failUnless("(read-only)" in res, res)
945 self.failIf("Upload a file" in res, res)
946 d.addCallback(_check2)
948 # and at a directory that contains a readonly directory
949 d.addCallback(lambda res:
950 self.GET(self.public_url, followRedirect=True))
952 self.failUnless(re.search('<td>DIR-RO</td>'
953 r'\s+<td><a href="[\.\/]+/uri/URI%3ADIR2-RO%3A[^"]+">reedownlee</a></td>', res), res)
954 d.addCallback(_check3)
956 # and an empty directory
957 d.addCallback(lambda res: self.GET(self.public_url + "/foo/empty/"))
959 self.failUnless("directory is empty" in res, res)
960 MKDIR_BUTTON_RE=re.compile('<input type="hidden" name="t" value="mkdir" />.*<legend class="freeform-form-label">Create a new directory in this directory</legend>.*<input type="submit" value="Create" />', re.I)
961 self.failUnless(MKDIR_BUTTON_RE.search(res), res)
962 d.addCallback(_check4)
966 def test_GET_DIRURL_badtype(self):
967 d = self.shouldHTTPError("test_GET_DIRURL_badtype",
971 self.public_url + "/foo?t=bogus")
974 def test_GET_DIRURL_json(self):
975 d = self.GET(self.public_url + "/foo?t=json")
976 d.addCallback(self.failUnlessIsFooJSON)
980 def test_POST_DIRURL_manifest_no_ophandle(self):
981 d = self.shouldFail2(error.Error,
982 "test_POST_DIRURL_manifest_no_ophandle",
984 "slow operation requires ophandle=",
985 self.POST, self.public_url, t="start-manifest")
988 def test_POST_DIRURL_manifest(self):
989 d = defer.succeed(None)
990 def getman(ignored, output):
991 d = self.POST(self.public_url + "/foo/?t=start-manifest&ophandle=125",
993 d.addCallback(self.wait_for_operation, "125")
994 d.addCallback(self.get_operation_results, "125", output)
996 d.addCallback(getman, None)
997 def _got_html(manifest):
998 self.failUnless("Manifest of SI=" in manifest)
999 self.failUnless("<td>sub</td>" in manifest)
1000 self.failUnless(self._sub_uri in manifest)
1001 self.failUnless("<td>sub/baz.txt</td>" in manifest)
1002 d.addCallback(_got_html)
1004 # both t=status and unadorned GET should be identical
1005 d.addCallback(lambda res: self.GET("/operations/125"))
1006 d.addCallback(_got_html)
1008 d.addCallback(getman, "html")
1009 d.addCallback(_got_html)
1010 d.addCallback(getman, "text")
1011 def _got_text(manifest):
1012 self.failUnless("\nsub " + self._sub_uri + "\n" in manifest)
1013 self.failUnless("\nsub/baz.txt URI:CHK:" in manifest)
1014 d.addCallback(_got_text)
1015 d.addCallback(getman, "JSON")
1017 data = res["manifest"]
1019 for (path_list, cap) in data:
1020 got[tuple(path_list)] = cap
1021 self.failUnlessEqual(got[(u"sub",)], self._sub_uri)
1022 self.failUnless((u"sub",u"baz.txt") in got)
1023 self.failUnless("finished" in res)
1024 self.failUnless("origin" in res)
1025 self.failUnless("storage-index" in res)
1026 self.failUnless("verifycaps" in res)
1027 self.failUnless("stats" in res)
1028 d.addCallback(_got_json)
1031 def test_POST_DIRURL_deepsize_no_ophandle(self):
1032 d = self.shouldFail2(error.Error,
1033 "test_POST_DIRURL_deepsize_no_ophandle",
1035 "slow operation requires ophandle=",
1036 self.POST, self.public_url, t="start-deep-size")
1039 def test_POST_DIRURL_deepsize(self):
1040 d = self.POST(self.public_url + "/foo/?t=start-deep-size&ophandle=126",
1041 followRedirect=True)
1042 d.addCallback(self.wait_for_operation, "126")
1043 d.addCallback(self.get_operation_results, "126", "json")
1044 def _got_json(data):
1045 self.failUnlessEqual(data["finished"], True)
1047 self.failUnless(size > 1000)
1048 d.addCallback(_got_json)
1049 d.addCallback(self.get_operation_results, "126", "text")
1051 mo = re.search(r'^size: (\d+)$', res, re.M)
1052 self.failUnless(mo, res)
1053 size = int(mo.group(1))
1054 # with directories, the size varies.
1055 self.failUnless(size > 1000)
1056 d.addCallback(_got_text)
1059 def test_POST_DIRURL_deepstats_no_ophandle(self):
1060 d = self.shouldFail2(error.Error,
1061 "test_POST_DIRURL_deepstats_no_ophandle",
1063 "slow operation requires ophandle=",
1064 self.POST, self.public_url, t="start-deep-stats")
1067 def test_POST_DIRURL_deepstats(self):
1068 d = self.POST(self.public_url + "/foo/?t=start-deep-stats&ophandle=127",
1069 followRedirect=True)
1070 d.addCallback(self.wait_for_operation, "127")
1071 d.addCallback(self.get_operation_results, "127", "json")
1072 def _got_json(stats):
1073 expected = {"count-immutable-files": 3,
1074 "count-mutable-files": 0,
1075 "count-literal-files": 0,
1077 "count-directories": 3,
1078 "size-immutable-files": 57,
1079 "size-literal-files": 0,
1080 #"size-directories": 1912, # varies
1081 #"largest-directory": 1590,
1082 "largest-directory-children": 5,
1083 "largest-immutable-file": 19,
1085 for k,v in expected.iteritems():
1086 self.failUnlessEqual(stats[k], v,
1087 "stats[%s] was %s, not %s" %
1089 self.failUnlessEqual(stats["size-files-histogram"],
1091 d.addCallback(_got_json)
1094 def test_POST_DIRURL_stream_manifest(self):
1095 d = self.POST(self.public_url + "/foo/?t=stream-manifest")
1097 self.failUnless(res.endswith("\n"))
1098 units = [simplejson.loads(t) for t in res[:-1].split("\n")]
1099 self.failUnlessEqual(len(units), 7)
1100 self.failUnlessEqual(units[-1]["type"], "stats")
1102 self.failUnlessEqual(first["path"], [])
1103 self.failUnlessEqual(first["cap"], self._foo_uri)
1104 self.failUnlessEqual(first["type"], "directory")
1105 baz = [u for u in units[:-1] if u["cap"] == self._baz_file_uri][0]
1106 self.failUnlessEqual(baz["path"], ["sub", "baz.txt"])
1107 self.failIfEqual(baz["storage-index"], None)
1108 self.failIfEqual(baz["verifycap"], None)
1109 self.failIfEqual(baz["repaircap"], None)
1111 d.addCallback(_check)
1114 def test_GET_DIRURL_uri(self):
1115 d = self.GET(self.public_url + "/foo?t=uri")
1117 self.failUnlessEqual(res, self._foo_uri)
1118 d.addCallback(_check)
1121 def test_GET_DIRURL_readonly_uri(self):
1122 d = self.GET(self.public_url + "/foo?t=readonly-uri")
1124 self.failUnlessEqual(res, self._foo_readonly_uri)
1125 d.addCallback(_check)
1128 def test_PUT_NEWDIRURL(self):
1129 d = self.PUT(self.public_url + "/foo/newdir?t=mkdir", "")
1130 d.addCallback(lambda res:
1131 self.failUnlessNodeHasChild(self._foo_node, u"newdir"))
1132 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
1133 d.addCallback(self.failUnlessNodeKeysAre, [])
1136 def test_PUT_NEWDIRURL_exists(self):
1137 d = self.PUT(self.public_url + "/foo/sub?t=mkdir", "")
1138 d.addCallback(lambda res:
1139 self.failUnlessNodeHasChild(self._foo_node, u"sub"))
1140 d.addCallback(lambda res: self._foo_node.get(u"sub"))
1141 d.addCallback(self.failUnlessNodeKeysAre, [u"baz.txt"])
1144 def test_PUT_NEWDIRURL_blocked(self):
1145 d = self.shouldFail2(error.Error, "PUT_NEWDIRURL_blocked",
1146 "409 Conflict", "Unable to create directory 'bar.txt': a file was in the way",
1148 self.public_url + "/foo/bar.txt/sub?t=mkdir", "")
1149 d.addCallback(lambda res:
1150 self.failUnlessNodeHasChild(self._foo_node, u"sub"))
1151 d.addCallback(lambda res: self._foo_node.get(u"sub"))
1152 d.addCallback(self.failUnlessNodeKeysAre, [u"baz.txt"])
1155 def test_PUT_NEWDIRURL_mkdir_p(self):
1156 d = defer.succeed(None)
1157 d.addCallback(lambda res: self.POST(self.public_url + "/foo", t='mkdir', name='mkp'))
1158 d.addCallback(lambda res: self.failUnlessNodeHasChild(self._foo_node, u"mkp"))
1159 d.addCallback(lambda res: self._foo_node.get(u"mkp"))
1160 def mkdir_p(mkpnode):
1161 url = '/uri/%s?t=mkdir-p&path=/sub1/sub2' % urllib.quote(mkpnode.get_uri())
1163 def made_subsub(ssuri):
1164 d = self._foo_node.get_child_at_path(u"mkp/sub1/sub2")
1165 d.addCallback(lambda ssnode: self.failUnlessEqual(ssnode.get_uri(), ssuri))
1167 d.addCallback(lambda uri2: self.failUnlessEqual(uri2, ssuri))
1169 d.addCallback(made_subsub)
1171 d.addCallback(mkdir_p)
1174 def test_PUT_NEWDIRURL_mkdirs(self):
1175 d = self.PUT(self.public_url + "/foo/subdir/newdir?t=mkdir", "")
1176 d.addCallback(lambda res:
1177 self.failIfNodeHasChild(self._foo_node, u"newdir"))
1178 d.addCallback(lambda res:
1179 self.failUnlessNodeHasChild(self._foo_node, u"subdir"))
1180 d.addCallback(lambda res:
1181 self._foo_node.get_child_at_path(u"subdir/newdir"))
1182 d.addCallback(self.failUnlessNodeKeysAre, [])
1185 def test_DELETE_DIRURL(self):
1186 d = self.DELETE(self.public_url + "/foo")
1187 d.addCallback(lambda res:
1188 self.failIfNodeHasChild(self.public_root, u"foo"))
1191 def test_DELETE_DIRURL_missing(self):
1192 d = self.DELETE(self.public_url + "/foo/missing")
1193 d.addBoth(self.should404, "test_DELETE_DIRURL_missing")
1194 d.addCallback(lambda res:
1195 self.failUnlessNodeHasChild(self.public_root, u"foo"))
1198 def test_DELETE_DIRURL_missing2(self):
1199 d = self.DELETE(self.public_url + "/missing")
1200 d.addBoth(self.should404, "test_DELETE_DIRURL_missing2")
1203 def dump_root(self):
1205 w = webish.DirnodeWalkerMixin()
1206 def visitor(childpath, childnode, metadata):
1208 d = w.walk(self.public_root, visitor)
1211 def failUnlessNodeKeysAre(self, node, expected_keys):
1212 for k in expected_keys:
1213 assert isinstance(k, unicode)
1215 def _check(children):
1216 self.failUnlessEqual(sorted(children.keys()), sorted(expected_keys))
1217 d.addCallback(_check)
1219 def failUnlessNodeHasChild(self, node, name):
1220 assert isinstance(name, unicode)
1222 def _check(children):
1223 self.failUnless(name in children)
1224 d.addCallback(_check)
1226 def failIfNodeHasChild(self, node, name):
1227 assert isinstance(name, unicode)
1229 def _check(children):
1230 self.failIf(name in children)
1231 d.addCallback(_check)
1234 def failUnlessChildContentsAre(self, node, name, expected_contents):
1235 assert isinstance(name, unicode)
1236 d = node.get_child_at_path(name)
1237 d.addCallback(lambda node: node.download_to_data())
1238 def _check(contents):
1239 self.failUnlessEqual(contents, expected_contents)
1240 d.addCallback(_check)
1243 def failUnlessMutableChildContentsAre(self, node, name, expected_contents):
1244 assert isinstance(name, unicode)
1245 d = node.get_child_at_path(name)
1246 d.addCallback(lambda node: node.download_best_version())
1247 def _check(contents):
1248 self.failUnlessEqual(contents, expected_contents)
1249 d.addCallback(_check)
1252 def failUnlessChildURIIs(self, node, name, expected_uri):
1253 assert isinstance(name, unicode)
1254 d = node.get_child_at_path(name)
1256 self.failUnlessEqual(child.get_uri(), expected_uri.strip())
1257 d.addCallback(_check)
1260 def failUnlessURIMatchesChild(self, got_uri, node, name):
1261 assert isinstance(name, unicode)
1262 d = node.get_child_at_path(name)
1264 self.failUnlessEqual(got_uri.strip(), child.get_uri())
1265 d.addCallback(_check)
1268 def failUnlessCHKURIHasContents(self, got_uri, contents):
1269 self.failUnless(FakeCHKFileNode.all_contents[got_uri] == contents)
1271 def test_POST_upload(self):
1272 d = self.POST(self.public_url + "/foo", t="upload",
1273 file=("new.txt", self.NEWFILE_CONTENTS))
1275 d.addCallback(self.failUnlessURIMatchesChild, fn, u"new.txt")
1276 d.addCallback(lambda res:
1277 self.failUnlessChildContentsAre(fn, u"new.txt",
1278 self.NEWFILE_CONTENTS))
1281 def test_POST_upload_unicode(self):
1282 filename = u"n\u00e9wer.txt" # n e-acute w e r . t x t
1283 d = self.POST(self.public_url + "/foo", t="upload",
1284 file=(filename, self.NEWFILE_CONTENTS))
1286 d.addCallback(self.failUnlessURIMatchesChild, fn, filename)
1287 d.addCallback(lambda res:
1288 self.failUnlessChildContentsAre(fn, filename,
1289 self.NEWFILE_CONTENTS))
1290 target_url = self.public_url + "/foo/" + filename.encode("utf-8")
1291 d.addCallback(lambda res: self.GET(target_url))
1292 d.addCallback(lambda contents: self.failUnlessEqual(contents,
1293 self.NEWFILE_CONTENTS,
1297 def test_POST_upload_unicode_named(self):
1298 filename = u"n\u00e9wer.txt" # n e-acute w e r . t x t
1299 d = self.POST(self.public_url + "/foo", t="upload",
1301 file=("overridden", self.NEWFILE_CONTENTS))
1303 d.addCallback(self.failUnlessURIMatchesChild, fn, filename)
1304 d.addCallback(lambda res:
1305 self.failUnlessChildContentsAre(fn, filename,
1306 self.NEWFILE_CONTENTS))
1307 target_url = self.public_url + "/foo/" + filename.encode("utf-8")
1308 d.addCallback(lambda res: self.GET(target_url))
1309 d.addCallback(lambda contents: self.failUnlessEqual(contents,
1310 self.NEWFILE_CONTENTS,
1314 def test_POST_upload_no_link(self):
1315 d = self.POST("/uri", t="upload",
1316 file=("new.txt", self.NEWFILE_CONTENTS))
1317 def _check_upload_results(page):
1318 # this should be a page which describes the results of the upload
1319 # that just finished.
1320 self.failUnless("Upload Results:" in page)
1321 self.failUnless("URI:" in page)
1322 uri_re = re.compile("URI: <tt><span>(.*)</span>")
1323 mo = uri_re.search(page)
1324 self.failUnless(mo, page)
1325 new_uri = mo.group(1)
1327 d.addCallback(_check_upload_results)
1328 d.addCallback(self.failUnlessCHKURIHasContents, self.NEWFILE_CONTENTS)
1331 def test_POST_upload_no_link_whendone(self):
1332 d = self.POST("/uri", t="upload", when_done="/",
1333 file=("new.txt", self.NEWFILE_CONTENTS))
1334 d.addBoth(self.shouldRedirect, "/")
1337 def shouldRedirect2(self, which, checker, callable, *args, **kwargs):
1338 d = defer.maybeDeferred(callable, *args, **kwargs)
1340 if isinstance(res, failure.Failure):
1341 res.trap(error.PageRedirect)
1342 statuscode = res.value.status
1343 target = res.value.location
1344 return checker(statuscode, target)
1345 self.fail("%s: callable was supposed to redirect, not return '%s'"
1350 def test_POST_upload_no_link_whendone_results(self):
1351 def check(statuscode, target):
1352 self.failUnlessEqual(statuscode, str(http.FOUND))
1353 self.failUnless(target.startswith(self.webish_url), target)
1354 return client.getPage(target, method="GET")
1355 d = self.shouldRedirect2("test_POST_upload_no_link_whendone_results",
1357 self.POST, "/uri", t="upload",
1358 when_done="/uri/%(uri)s",
1359 file=("new.txt", self.NEWFILE_CONTENTS))
1360 d.addCallback(lambda res:
1361 self.failUnlessEqual(res, self.NEWFILE_CONTENTS))
1364 def test_POST_upload_no_link_mutable(self):
1365 d = self.POST("/uri", t="upload", mutable="true",
1366 file=("new.txt", self.NEWFILE_CONTENTS))
1367 def _check(filecap):
1368 filecap = filecap.strip()
1369 self.failUnless(filecap.startswith("URI:SSK:"), filecap)
1370 self.filecap = filecap
1371 u = uri.WriteableSSKFileURI.init_from_string(filecap)
1372 self.failUnless(u.storage_index in FakeMutableFileNode.all_contents)
1373 n = self.s.create_node_from_uri(filecap)
1374 return n.download_best_version()
1375 d.addCallback(_check)
1377 self.failUnlessEqual(data, self.NEWFILE_CONTENTS)
1378 return self.GET("/uri/%s" % urllib.quote(self.filecap))
1379 d.addCallback(_check2)
1381 self.failUnlessEqual(data, self.NEWFILE_CONTENTS)
1382 return self.GET("/file/%s" % urllib.quote(self.filecap))
1383 d.addCallback(_check3)
1385 self.failUnlessEqual(data, self.NEWFILE_CONTENTS)
1386 d.addCallback(_check4)
1389 def test_POST_upload_no_link_mutable_toobig(self):
1390 d = self.shouldFail2(error.Error,
1391 "test_POST_upload_no_link_mutable_toobig",
1392 "413 Request Entity Too Large",
1393 "SDMF is limited to one segment, and 10001 > 10000",
1395 "/uri", t="upload", mutable="true",
1397 "b" * (self.s.MUTABLE_SIZELIMIT+1)) )
1400 def test_POST_upload_mutable(self):
1401 # this creates a mutable file
1402 d = self.POST(self.public_url + "/foo", t="upload", mutable="true",
1403 file=("new.txt", self.NEWFILE_CONTENTS))
1405 d.addCallback(self.failUnlessURIMatchesChild, fn, u"new.txt")
1406 d.addCallback(lambda res:
1407 self.failUnlessMutableChildContentsAre(fn, u"new.txt",
1408 self.NEWFILE_CONTENTS))
1409 d.addCallback(lambda res: self._foo_node.get(u"new.txt"))
1411 self.failUnless(IMutableFileNode.providedBy(newnode))
1412 self.failUnless(newnode.is_mutable())
1413 self.failIf(newnode.is_readonly())
1414 self._mutable_node = newnode
1415 self._mutable_uri = newnode.get_uri()
1418 # now upload it again and make sure that the URI doesn't change
1419 NEWER_CONTENTS = self.NEWFILE_CONTENTS + "newer\n"
1420 d.addCallback(lambda res:
1421 self.POST(self.public_url + "/foo", t="upload",
1423 file=("new.txt", NEWER_CONTENTS)))
1424 d.addCallback(self.failUnlessURIMatchesChild, fn, u"new.txt")
1425 d.addCallback(lambda res:
1426 self.failUnlessMutableChildContentsAre(fn, u"new.txt",
1428 d.addCallback(lambda res: self._foo_node.get(u"new.txt"))
1430 self.failUnless(IMutableFileNode.providedBy(newnode))
1431 self.failUnless(newnode.is_mutable())
1432 self.failIf(newnode.is_readonly())
1433 self.failUnlessEqual(self._mutable_uri, newnode.get_uri())
1434 d.addCallback(_got2)
1436 # upload a second time, using PUT instead of POST
1437 NEW2_CONTENTS = NEWER_CONTENTS + "overwrite with PUT\n"
1438 d.addCallback(lambda res:
1439 self.PUT(self.public_url + "/foo/new.txt", NEW2_CONTENTS))
1440 d.addCallback(self.failUnlessURIMatchesChild, fn, u"new.txt")
1441 d.addCallback(lambda res:
1442 self.failUnlessMutableChildContentsAre(fn, u"new.txt",
1445 # finally list the directory, since mutable files are displayed
1446 # slightly differently
1448 d.addCallback(lambda res:
1449 self.GET(self.public_url + "/foo/",
1450 followRedirect=True))
1451 def _check_page(res):
1452 # TODO: assert more about the contents
1453 self.failUnless("SSK" in res)
1455 d.addCallback(_check_page)
1457 d.addCallback(lambda res: self._foo_node.get(u"new.txt"))
1459 self.failUnless(IMutableFileNode.providedBy(newnode))
1460 self.failUnless(newnode.is_mutable())
1461 self.failIf(newnode.is_readonly())
1462 self.failUnlessEqual(self._mutable_uri, newnode.get_uri())
1463 d.addCallback(_got3)
1465 # look at the JSON form of the enclosing directory
1466 d.addCallback(lambda res:
1467 self.GET(self.public_url + "/foo/?t=json",
1468 followRedirect=True))
1469 def _check_page_json(res):
1470 parsed = simplejson.loads(res)
1471 self.failUnlessEqual(parsed[0], "dirnode")
1472 children = dict( [(unicode(name),value)
1474 in parsed[1]["children"].iteritems()] )
1475 self.failUnless("new.txt" in children)
1476 new_json = children["new.txt"]
1477 self.failUnlessEqual(new_json[0], "filenode")
1478 self.failUnless(new_json[1]["mutable"])
1479 self.failUnlessEqual(new_json[1]["rw_uri"], self._mutable_uri)
1480 ro_uri = unicode(self._mutable_node.get_readonly().to_string())
1481 self.failUnlessEqual(new_json[1]["ro_uri"], ro_uri)
1482 d.addCallback(_check_page_json)
1484 # and the JSON form of the file
1485 d.addCallback(lambda res:
1486 self.GET(self.public_url + "/foo/new.txt?t=json"))
1487 def _check_file_json(res):
1488 parsed = simplejson.loads(res)
1489 self.failUnlessEqual(parsed[0], "filenode")
1490 self.failUnless(parsed[1]["mutable"])
1491 self.failUnlessEqual(parsed[1]["rw_uri"], self._mutable_uri)
1492 ro_uri = unicode(self._mutable_node.get_readonly().to_string())
1493 self.failUnlessEqual(parsed[1]["ro_uri"], ro_uri)
1494 d.addCallback(_check_file_json)
1496 # and look at t=uri and t=readonly-uri
1497 d.addCallback(lambda res:
1498 self.GET(self.public_url + "/foo/new.txt?t=uri"))
1499 d.addCallback(lambda res: self.failUnlessEqual(res, self._mutable_uri))
1500 d.addCallback(lambda res:
1501 self.GET(self.public_url + "/foo/new.txt?t=readonly-uri"))
1502 def _check_ro_uri(res):
1503 ro_uri = unicode(self._mutable_node.get_readonly().to_string())
1504 self.failUnlessEqual(res, ro_uri)
1505 d.addCallback(_check_ro_uri)
1507 # make sure we can get to it from /uri/URI
1508 d.addCallback(lambda res:
1509 self.GET("/uri/%s" % urllib.quote(self._mutable_uri)))
1510 d.addCallback(lambda res:
1511 self.failUnlessEqual(res, NEW2_CONTENTS))
1513 # and that HEAD computes the size correctly
1514 d.addCallback(lambda res:
1515 self.HEAD(self.public_url + "/foo/new.txt",
1516 return_response=True))
1517 def _got_headers((res, status, headers)):
1518 self.failUnlessEqual(res, "")
1519 self.failUnlessEqual(headers["content-length"][0],
1520 str(len(NEW2_CONTENTS)))
1521 self.failUnlessEqual(headers["content-type"], ["text/plain"])
1522 d.addCallback(_got_headers)
1524 # make sure that size errors are displayed correctly for overwrite
1525 d.addCallback(lambda res:
1526 self.shouldFail2(error.Error,
1527 "test_POST_upload_mutable-toobig",
1528 "413 Request Entity Too Large",
1529 "SDMF is limited to one segment, and 10001 > 10000",
1531 self.public_url + "/foo", t="upload",
1534 "b" * (self.s.MUTABLE_SIZELIMIT+1)),
1537 d.addErrback(self.dump_error)
1540 def test_POST_upload_mutable_toobig(self):
1541 d = self.shouldFail2(error.Error,
1542 "test_POST_upload_mutable_toobig",
1543 "413 Request Entity Too Large",
1544 "SDMF is limited to one segment, and 10001 > 10000",
1546 self.public_url + "/foo",
1547 t="upload", mutable="true",
1549 "b" * (self.s.MUTABLE_SIZELIMIT+1)) )
1552 def dump_error(self, f):
1553 # if the web server returns an error code (like 400 Bad Request),
1554 # web.client.getPage puts the HTTP response body into the .response
1555 # attribute of the exception object that it gives back. It does not
1556 # appear in the Failure's repr(), so the ERROR that trial displays
1557 # will be rather terse and unhelpful. addErrback this method to the
1558 # end of your chain to get more information out of these errors.
1559 if f.check(error.Error):
1560 print "web.error.Error:"
1562 print f.value.response
1565 def test_POST_upload_replace(self):
1566 d = self.POST(self.public_url + "/foo", t="upload",
1567 file=("bar.txt", self.NEWFILE_CONTENTS))
1569 d.addCallback(self.failUnlessURIMatchesChild, fn, u"bar.txt")
1570 d.addCallback(lambda res:
1571 self.failUnlessChildContentsAre(fn, u"bar.txt",
1572 self.NEWFILE_CONTENTS))
1575 def test_POST_upload_no_replace_ok(self):
1576 d = self.POST(self.public_url + "/foo?replace=false", t="upload",
1577 file=("new.txt", self.NEWFILE_CONTENTS))
1578 d.addCallback(lambda res: self.GET(self.public_url + "/foo/new.txt"))
1579 d.addCallback(lambda res: self.failUnlessEqual(res,
1580 self.NEWFILE_CONTENTS))
1583 def test_POST_upload_no_replace_queryarg(self):
1584 d = self.POST(self.public_url + "/foo?replace=false", t="upload",
1585 file=("bar.txt", self.NEWFILE_CONTENTS))
1586 d.addBoth(self.shouldFail, error.Error,
1587 "POST_upload_no_replace_queryarg",
1589 "There was already a child by that name, and you asked me "
1590 "to not replace it")
1591 d.addCallback(lambda res: self.GET(self.public_url + "/foo/bar.txt"))
1592 d.addCallback(self.failUnlessIsBarDotTxt)
1595 def test_POST_upload_no_replace_field(self):
1596 d = self.POST(self.public_url + "/foo", t="upload", replace="false",
1597 file=("bar.txt", self.NEWFILE_CONTENTS))
1598 d.addBoth(self.shouldFail, error.Error, "POST_upload_no_replace_field",
1600 "There was already a child by that name, and you asked me "
1601 "to not replace it")
1602 d.addCallback(lambda res: self.GET(self.public_url + "/foo/bar.txt"))
1603 d.addCallback(self.failUnlessIsBarDotTxt)
1606 def test_POST_upload_whendone(self):
1607 d = self.POST(self.public_url + "/foo", t="upload", when_done="/THERE",
1608 file=("new.txt", self.NEWFILE_CONTENTS))
1609 d.addBoth(self.shouldRedirect, "/THERE")
1611 d.addCallback(lambda res:
1612 self.failUnlessChildContentsAre(fn, u"new.txt",
1613 self.NEWFILE_CONTENTS))
1616 def test_POST_upload_named(self):
1618 d = self.POST(self.public_url + "/foo", t="upload",
1619 name="new.txt", file=self.NEWFILE_CONTENTS)
1620 d.addCallback(self.failUnlessURIMatchesChild, fn, u"new.txt")
1621 d.addCallback(lambda res:
1622 self.failUnlessChildContentsAre(fn, u"new.txt",
1623 self.NEWFILE_CONTENTS))
1626 def test_POST_upload_named_badfilename(self):
1627 d = self.POST(self.public_url + "/foo", t="upload",
1628 name="slashes/are/bad.txt", file=self.NEWFILE_CONTENTS)
1629 d.addBoth(self.shouldFail, error.Error,
1630 "test_POST_upload_named_badfilename",
1632 "name= may not contain a slash",
1634 # make sure that nothing was added
1635 d.addCallback(lambda res:
1636 self.failUnlessNodeKeysAre(self._foo_node,
1637 [u"bar.txt", u"blockingfile",
1638 u"empty", u"n\u00fc.txt",
1642 def test_POST_FILEURL_check(self):
1643 bar_url = self.public_url + "/foo/bar.txt"
1644 d = self.POST(bar_url, t="check")
1646 self.failUnless("Healthy :" in res)
1647 d.addCallback(_check)
1648 redir_url = "http://allmydata.org/TARGET"
1649 def _check2(statuscode, target):
1650 self.failUnlessEqual(statuscode, str(http.FOUND))
1651 self.failUnlessEqual(target, redir_url)
1652 d.addCallback(lambda res:
1653 self.shouldRedirect2("test_POST_FILEURL_check",
1657 when_done=redir_url))
1658 d.addCallback(lambda res:
1659 self.POST(bar_url, t="check", return_to=redir_url))
1661 self.failUnless("Healthy :" in res)
1662 self.failUnless("Return to file" in res)
1663 self.failUnless(redir_url in res)
1664 d.addCallback(_check3)
1666 d.addCallback(lambda res:
1667 self.POST(bar_url, t="check", output="JSON"))
1668 def _check_json(res):
1669 data = simplejson.loads(res)
1670 self.failUnless("storage-index" in data)
1671 self.failUnless(data["results"]["healthy"])
1672 d.addCallback(_check_json)
1676 def test_POST_FILEURL_check_and_repair(self):
1677 bar_url = self.public_url + "/foo/bar.txt"
1678 d = self.POST(bar_url, t="check", repair="true")
1680 self.failUnless("Healthy :" in res)
1681 d.addCallback(_check)
1682 redir_url = "http://allmydata.org/TARGET"
1683 def _check2(statuscode, target):
1684 self.failUnlessEqual(statuscode, str(http.FOUND))
1685 self.failUnlessEqual(target, redir_url)
1686 d.addCallback(lambda res:
1687 self.shouldRedirect2("test_POST_FILEURL_check_and_repair",
1690 t="check", repair="true",
1691 when_done=redir_url))
1692 d.addCallback(lambda res:
1693 self.POST(bar_url, t="check", return_to=redir_url))
1695 self.failUnless("Healthy :" in res)
1696 self.failUnless("Return to file" in res)
1697 self.failUnless(redir_url in res)
1698 d.addCallback(_check3)
1701 def test_POST_DIRURL_check(self):
1702 foo_url = self.public_url + "/foo/"
1703 d = self.POST(foo_url, t="check")
1705 self.failUnless("Healthy :" in res, res)
1706 d.addCallback(_check)
1707 redir_url = "http://allmydata.org/TARGET"
1708 def _check2(statuscode, target):
1709 self.failUnlessEqual(statuscode, str(http.FOUND))
1710 self.failUnlessEqual(target, redir_url)
1711 d.addCallback(lambda res:
1712 self.shouldRedirect2("test_POST_DIRURL_check",
1716 when_done=redir_url))
1717 d.addCallback(lambda res:
1718 self.POST(foo_url, t="check", return_to=redir_url))
1720 self.failUnless("Healthy :" in res, res)
1721 self.failUnless("Return to file/directory" in res)
1722 self.failUnless(redir_url in res)
1723 d.addCallback(_check3)
1725 d.addCallback(lambda res:
1726 self.POST(foo_url, t="check", output="JSON"))
1727 def _check_json(res):
1728 data = simplejson.loads(res)
1729 self.failUnless("storage-index" in data)
1730 self.failUnless(data["results"]["healthy"])
1731 d.addCallback(_check_json)
1735 def test_POST_DIRURL_check_and_repair(self):
1736 foo_url = self.public_url + "/foo/"
1737 d = self.POST(foo_url, t="check", repair="true")
1739 self.failUnless("Healthy :" in res, res)
1740 d.addCallback(_check)
1741 redir_url = "http://allmydata.org/TARGET"
1742 def _check2(statuscode, target):
1743 self.failUnlessEqual(statuscode, str(http.FOUND))
1744 self.failUnlessEqual(target, redir_url)
1745 d.addCallback(lambda res:
1746 self.shouldRedirect2("test_POST_DIRURL_check_and_repair",
1749 t="check", repair="true",
1750 when_done=redir_url))
1751 d.addCallback(lambda res:
1752 self.POST(foo_url, t="check", return_to=redir_url))
1754 self.failUnless("Healthy :" in res)
1755 self.failUnless("Return to file/directory" in res)
1756 self.failUnless(redir_url in res)
1757 d.addCallback(_check3)
1760 def wait_for_operation(self, ignored, ophandle):
1761 url = "/operations/" + ophandle
1762 url += "?t=status&output=JSON"
1765 data = simplejson.loads(res)
1766 if not data["finished"]:
1767 d = self.stall(delay=1.0)
1768 d.addCallback(self.wait_for_operation, ophandle)
1774 def get_operation_results(self, ignored, ophandle, output=None):
1775 url = "/operations/" + ophandle
1778 url += "&output=" + output
1781 if output and output.lower() == "json":
1782 return simplejson.loads(res)
1787 def test_POST_DIRURL_deepcheck_no_ophandle(self):
1788 d = self.shouldFail2(error.Error,
1789 "test_POST_DIRURL_deepcheck_no_ophandle",
1791 "slow operation requires ophandle=",
1792 self.POST, self.public_url, t="start-deep-check")
1795 def test_POST_DIRURL_deepcheck(self):
1796 def _check_redirect(statuscode, target):
1797 self.failUnlessEqual(statuscode, str(http.FOUND))
1798 self.failUnless(target.endswith("/operations/123"))
1799 d = self.shouldRedirect2("test_POST_DIRURL_deepcheck", _check_redirect,
1800 self.POST, self.public_url,
1801 t="start-deep-check", ophandle="123")
1802 d.addCallback(self.wait_for_operation, "123")
1803 def _check_json(data):
1804 self.failUnlessEqual(data["finished"], True)
1805 self.failUnlessEqual(data["count-objects-checked"], 8)
1806 self.failUnlessEqual(data["count-objects-healthy"], 8)
1807 d.addCallback(_check_json)
1808 d.addCallback(self.get_operation_results, "123", "html")
1809 def _check_html(res):
1810 self.failUnless("Objects Checked: <span>8</span>" in res)
1811 self.failUnless("Objects Healthy: <span>8</span>" in res)
1812 d.addCallback(_check_html)
1814 d.addCallback(lambda res:
1815 self.GET("/operations/123/"))
1816 d.addCallback(_check_html) # should be the same as without the slash
1818 d.addCallback(lambda res:
1819 self.shouldFail2(error.Error, "one", "404 Not Found",
1820 "No detailed results for SI bogus",
1821 self.GET, "/operations/123/bogus"))
1823 foo_si = self._foo_node.get_storage_index()
1824 foo_si_s = base32.b2a(foo_si)
1825 d.addCallback(lambda res:
1826 self.GET("/operations/123/%s?output=JSON" % foo_si_s))
1827 def _check_foo_json(res):
1828 data = simplejson.loads(res)
1829 self.failUnlessEqual(data["storage-index"], foo_si_s)
1830 self.failUnless(data["results"]["healthy"])
1831 d.addCallback(_check_foo_json)
1834 def test_POST_DIRURL_deepcheck_and_repair(self):
1835 d = self.POST(self.public_url, t="start-deep-check", repair="true",
1836 ophandle="124", output="json", followRedirect=True)
1837 d.addCallback(self.wait_for_operation, "124")
1838 def _check_json(data):
1839 self.failUnlessEqual(data["finished"], True)
1840 self.failUnlessEqual(data["count-objects-checked"], 8)
1841 self.failUnlessEqual(data["count-objects-healthy-pre-repair"], 8)
1842 self.failUnlessEqual(data["count-objects-unhealthy-pre-repair"], 0)
1843 self.failUnlessEqual(data["count-corrupt-shares-pre-repair"], 0)
1844 self.failUnlessEqual(data["count-repairs-attempted"], 0)
1845 self.failUnlessEqual(data["count-repairs-successful"], 0)
1846 self.failUnlessEqual(data["count-repairs-unsuccessful"], 0)
1847 self.failUnlessEqual(data["count-objects-healthy-post-repair"], 8)
1848 self.failUnlessEqual(data["count-objects-unhealthy-post-repair"], 0)
1849 self.failUnlessEqual(data["count-corrupt-shares-post-repair"], 0)
1850 d.addCallback(_check_json)
1851 d.addCallback(self.get_operation_results, "124", "html")
1852 def _check_html(res):
1853 self.failUnless("Objects Checked: <span>8</span>" in res)
1855 self.failUnless("Objects Healthy (before repair): <span>8</span>" in res)
1856 self.failUnless("Objects Unhealthy (before repair): <span>0</span>" in res)
1857 self.failUnless("Corrupt Shares (before repair): <span>0</span>" in res)
1859 self.failUnless("Repairs Attempted: <span>0</span>" in res)
1860 self.failUnless("Repairs Successful: <span>0</span>" in res)
1861 self.failUnless("Repairs Unsuccessful: <span>0</span>" in res)
1863 self.failUnless("Objects Healthy (after repair): <span>8</span>" in res)
1864 self.failUnless("Objects Unhealthy (after repair): <span>0</span>" in res)
1865 self.failUnless("Corrupt Shares (after repair): <span>0</span>" in res)
1866 d.addCallback(_check_html)
1869 def test_POST_FILEURL_bad_t(self):
1870 d = self.shouldFail2(error.Error, "POST_bad_t", "400 Bad Request",
1871 "POST to file: bad t=bogus",
1872 self.POST, self.public_url + "/foo/bar.txt",
1876 def test_POST_mkdir(self): # return value?
1877 d = self.POST(self.public_url + "/foo", t="mkdir", name="newdir")
1878 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
1879 d.addCallback(self.failUnlessNodeKeysAre, [])
1882 def test_POST_mkdir_2(self):
1883 d = self.POST(self.public_url + "/foo/newdir?t=mkdir", "")
1884 d.addCallback(lambda res:
1885 self.failUnlessNodeHasChild(self._foo_node, u"newdir"))
1886 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
1887 d.addCallback(self.failUnlessNodeKeysAre, [])
1890 def test_POST_mkdirs_2(self):
1891 d = self.POST(self.public_url + "/foo/bardir/newdir?t=mkdir", "")
1892 d.addCallback(lambda res:
1893 self.failUnlessNodeHasChild(self._foo_node, u"bardir"))
1894 d.addCallback(lambda res: self._foo_node.get(u"bardir"))
1895 d.addCallback(lambda bardirnode: bardirnode.get(u"newdir"))
1896 d.addCallback(self.failUnlessNodeKeysAre, [])
1899 def test_POST_mkdir_no_parentdir_noredirect(self):
1900 d = self.POST("/uri?t=mkdir")
1901 def _after_mkdir(res):
1902 uri.DirectoryURI.init_from_string(res)
1903 d.addCallback(_after_mkdir)
1906 def test_POST_mkdir_no_parentdir_redirect(self):
1907 d = self.POST("/uri?t=mkdir&redirect_to_result=true")
1908 d.addBoth(self.shouldRedirect, None, statuscode='303')
1909 def _check_target(target):
1910 target = urllib.unquote(target)
1911 self.failUnless(target.startswith("uri/URI:DIR2:"), target)
1912 d.addCallback(_check_target)
1915 def test_POST_noparent_bad(self):
1916 d = self.shouldHTTPError("POST /uri?t=bogus", 400, "Bad Request",
1917 "/uri accepts only PUT, PUT?t=mkdir, "
1918 "POST?t=upload, and POST?t=mkdir",
1919 self.POST, "/uri?t=bogus")
1922 def test_welcome_page_mkdir_button(self):
1923 # Fetch the welcome page.
1925 def _after_get_welcome_page(res):
1926 MKDIR_BUTTON_RE=re.compile('<form action="([^"]*)" method="post".*?<input type="hidden" name="t" value="([^"]*)" /><input type="hidden" name="([^"]*)" value="([^"]*)" /><input type="submit" value="Create a directory" />', re.I)
1927 mo = MKDIR_BUTTON_RE.search(res)
1928 formaction = mo.group(1)
1930 formaname = mo.group(3)
1931 formavalue = mo.group(4)
1932 return (formaction, formt, formaname, formavalue)
1933 d.addCallback(_after_get_welcome_page)
1934 def _after_parse_form(res):
1935 (formaction, formt, formaname, formavalue) = res
1936 return self.POST("/%s?t=%s&%s=%s" % (formaction, formt, formaname, formavalue))
1937 d.addCallback(_after_parse_form)
1938 d.addBoth(self.shouldRedirect, None, statuscode='303')
1941 def test_POST_mkdir_replace(self): # return value?
1942 d = self.POST(self.public_url + "/foo", t="mkdir", name="sub")
1943 d.addCallback(lambda res: self._foo_node.get(u"sub"))
1944 d.addCallback(self.failUnlessNodeKeysAre, [])
1947 def test_POST_mkdir_no_replace_queryarg(self): # return value?
1948 d = self.POST(self.public_url + "/foo?replace=false", t="mkdir", name="sub")
1949 d.addBoth(self.shouldFail, error.Error,
1950 "POST_mkdir_no_replace_queryarg",
1952 "There was already a child by that name, and you asked me "
1953 "to not replace it")
1954 d.addCallback(lambda res: self._foo_node.get(u"sub"))
1955 d.addCallback(self.failUnlessNodeKeysAre, [u"baz.txt"])
1958 def test_POST_mkdir_no_replace_field(self): # return value?
1959 d = self.POST(self.public_url + "/foo", t="mkdir", name="sub",
1961 d.addBoth(self.shouldFail, error.Error, "POST_mkdir_no_replace_field",
1963 "There was already a child by that name, and you asked me "
1964 "to not replace it")
1965 d.addCallback(lambda res: self._foo_node.get(u"sub"))
1966 d.addCallback(self.failUnlessNodeKeysAre, [u"baz.txt"])
1969 def test_POST_mkdir_whendone_field(self):
1970 d = self.POST(self.public_url + "/foo",
1971 t="mkdir", name="newdir", when_done="/THERE")
1972 d.addBoth(self.shouldRedirect, "/THERE")
1973 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
1974 d.addCallback(self.failUnlessNodeKeysAre, [])
1977 def test_POST_mkdir_whendone_queryarg(self):
1978 d = self.POST(self.public_url + "/foo?when_done=/THERE",
1979 t="mkdir", name="newdir")
1980 d.addBoth(self.shouldRedirect, "/THERE")
1981 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
1982 d.addCallback(self.failUnlessNodeKeysAre, [])
1985 def test_POST_bad_t(self):
1986 d = self.shouldFail2(error.Error, "POST_bad_t", "400 Bad Request",
1987 "POST to a directory with bad t=BOGUS",
1988 self.POST, self.public_url + "/foo", t="BOGUS")
1991 def test_POST_set_children(self):
1992 contents9, n9, newuri9 = self.makefile(9)
1993 contents10, n10, newuri10 = self.makefile(10)
1994 contents11, n11, newuri11 = self.makefile(11)
1997 "atomic_added_1": [ "filenode", { "rw_uri": "%s",
2000 "ctime": 1002777696.7564139,
2001 "mtime": 1002777696.7564139
2004 "atomic_added_2": [ "filenode", { "rw_uri": "%s",
2007 "ctime": 1002777696.7564139,
2008 "mtime": 1002777696.7564139
2011 "atomic_added_3": [ "filenode", { "rw_uri": "%s",
2014 "ctime": 1002777696.7564139,
2015 "mtime": 1002777696.7564139
2018 }""" % (newuri9, newuri10, newuri11)
2020 url = self.webish_url + self.public_url + "/foo" + "?t=set_children"
2022 d = client.getPage(url, method="POST", postdata=reqbody)
2024 self.failUnlessURIMatchesChild(newuri9, self._foo_node, u"atomic_added_1")
2025 self.failUnlessURIMatchesChild(newuri10, self._foo_node, u"atomic_added_2")
2026 self.failUnlessURIMatchesChild(newuri11, self._foo_node, u"atomic_added_3")
2028 d.addCallback(_then)
2029 d.addErrback(self.dump_error)
2032 def test_POST_put_uri(self):
2033 contents, n, newuri = self.makefile(8)
2034 d = self.POST(self.public_url + "/foo", t="uri", name="new.txt", uri=newuri)
2035 d.addCallback(self.failUnlessURIMatchesChild, self._foo_node, u"new.txt")
2036 d.addCallback(lambda res:
2037 self.failUnlessChildContentsAre(self._foo_node, u"new.txt",
2041 def test_POST_put_uri_replace(self):
2042 contents, n, newuri = self.makefile(8)
2043 d = self.POST(self.public_url + "/foo", t="uri", name="bar.txt", uri=newuri)
2044 d.addCallback(self.failUnlessURIMatchesChild, self._foo_node, u"bar.txt")
2045 d.addCallback(lambda res:
2046 self.failUnlessChildContentsAre(self._foo_node, u"bar.txt",
2050 def test_POST_put_uri_no_replace_queryarg(self):
2051 contents, n, newuri = self.makefile(8)
2052 d = self.POST(self.public_url + "/foo?replace=false", t="uri",
2053 name="bar.txt", uri=newuri)
2054 d.addBoth(self.shouldFail, error.Error,
2055 "POST_put_uri_no_replace_queryarg",
2057 "There was already a child by that name, and you asked me "
2058 "to not replace it")
2059 d.addCallback(lambda res: self.GET(self.public_url + "/foo/bar.txt"))
2060 d.addCallback(self.failUnlessIsBarDotTxt)
2063 def test_POST_put_uri_no_replace_field(self):
2064 contents, n, newuri = self.makefile(8)
2065 d = self.POST(self.public_url + "/foo", t="uri", replace="false",
2066 name="bar.txt", uri=newuri)
2067 d.addBoth(self.shouldFail, error.Error,
2068 "POST_put_uri_no_replace_field",
2070 "There was already a child by that name, and you asked me "
2071 "to not replace it")
2072 d.addCallback(lambda res: self.GET(self.public_url + "/foo/bar.txt"))
2073 d.addCallback(self.failUnlessIsBarDotTxt)
2076 def test_POST_delete(self):
2077 d = self.POST(self.public_url + "/foo", t="delete", name="bar.txt")
2078 d.addCallback(lambda res: self._foo_node.list())
2079 def _check(children):
2080 self.failIf(u"bar.txt" in children)
2081 d.addCallback(_check)
2084 def test_POST_rename_file(self):
2085 d = self.POST(self.public_url + "/foo", t="rename",
2086 from_name="bar.txt", to_name='wibble.txt')
2087 d.addCallback(lambda res:
2088 self.failIfNodeHasChild(self._foo_node, u"bar.txt"))
2089 d.addCallback(lambda res:
2090 self.failUnlessNodeHasChild(self._foo_node, u"wibble.txt"))
2091 d.addCallback(lambda res: self.GET(self.public_url + "/foo/wibble.txt"))
2092 d.addCallback(self.failUnlessIsBarDotTxt)
2093 d.addCallback(lambda res: self.GET(self.public_url + "/foo/wibble.txt?t=json"))
2094 d.addCallback(self.failUnlessIsBarJSON)
2097 def test_POST_rename_file_redundant(self):
2098 d = self.POST(self.public_url + "/foo", t="rename",
2099 from_name="bar.txt", to_name='bar.txt')
2100 d.addCallback(lambda res:
2101 self.failUnlessNodeHasChild(self._foo_node, u"bar.txt"))
2102 d.addCallback(lambda res: self.GET(self.public_url + "/foo/bar.txt"))
2103 d.addCallback(self.failUnlessIsBarDotTxt)
2104 d.addCallback(lambda res: self.GET(self.public_url + "/foo/bar.txt?t=json"))
2105 d.addCallback(self.failUnlessIsBarJSON)
2108 def test_POST_rename_file_replace(self):
2109 # rename a file and replace a directory with it
2110 d = self.POST(self.public_url + "/foo", t="rename",
2111 from_name="bar.txt", to_name='empty')
2112 d.addCallback(lambda res:
2113 self.failIfNodeHasChild(self._foo_node, u"bar.txt"))
2114 d.addCallback(lambda res:
2115 self.failUnlessNodeHasChild(self._foo_node, u"empty"))
2116 d.addCallback(lambda res: self.GET(self.public_url + "/foo/empty"))
2117 d.addCallback(self.failUnlessIsBarDotTxt)
2118 d.addCallback(lambda res: self.GET(self.public_url + "/foo/empty?t=json"))
2119 d.addCallback(self.failUnlessIsBarJSON)
2122 def test_POST_rename_file_no_replace_queryarg(self):
2123 # rename a file and replace a directory with it
2124 d = self.POST(self.public_url + "/foo?replace=false", t="rename",
2125 from_name="bar.txt", to_name='empty')
2126 d.addBoth(self.shouldFail, error.Error,
2127 "POST_rename_file_no_replace_queryarg",
2129 "There was already a child by that name, and you asked me "
2130 "to not replace it")
2131 d.addCallback(lambda res: self.GET(self.public_url + "/foo/empty?t=json"))
2132 d.addCallback(self.failUnlessIsEmptyJSON)
2135 def test_POST_rename_file_no_replace_field(self):
2136 # rename a file and replace a directory with it
2137 d = self.POST(self.public_url + "/foo", t="rename", replace="false",
2138 from_name="bar.txt", to_name='empty')
2139 d.addBoth(self.shouldFail, error.Error,
2140 "POST_rename_file_no_replace_field",
2142 "There was already a child by that name, and you asked me "
2143 "to not replace it")
2144 d.addCallback(lambda res: self.GET(self.public_url + "/foo/empty?t=json"))
2145 d.addCallback(self.failUnlessIsEmptyJSON)
2148 def failUnlessIsEmptyJSON(self, res):
2149 data = simplejson.loads(res)
2150 self.failUnlessEqual(data[0], "dirnode", data)
2151 self.failUnlessEqual(len(data[1]["children"]), 0)
2153 def test_POST_rename_file_slash_fail(self):
2154 d = self.POST(self.public_url + "/foo", t="rename",
2155 from_name="bar.txt", to_name='kirk/spock.txt')
2156 d.addBoth(self.shouldFail, error.Error,
2157 "test_POST_rename_file_slash_fail",
2159 "to_name= may not contain a slash",
2161 d.addCallback(lambda res:
2162 self.failUnlessNodeHasChild(self._foo_node, u"bar.txt"))
2165 def test_POST_rename_dir(self):
2166 d = self.POST(self.public_url, t="rename",
2167 from_name="foo", to_name='plunk')
2168 d.addCallback(lambda res:
2169 self.failIfNodeHasChild(self.public_root, u"foo"))
2170 d.addCallback(lambda res:
2171 self.failUnlessNodeHasChild(self.public_root, u"plunk"))
2172 d.addCallback(lambda res: self.GET(self.public_url + "/plunk?t=json"))
2173 d.addCallback(self.failUnlessIsFooJSON)
2176 def shouldRedirect(self, res, target=None, statuscode=None, which=""):
2177 """ If target is not None then the redirection has to go to target. If
2178 statuscode is not None then the redirection has to be accomplished with
2179 that HTTP status code."""
2180 if not isinstance(res, failure.Failure):
2181 to_where = (target is None) and "somewhere" or ("to " + target)
2182 self.fail("%s: we were expecting to get redirected %s, not get an"
2183 " actual page: %s" % (which, to_where, res))
2184 res.trap(error.PageRedirect)
2185 if statuscode is not None:
2186 self.failUnlessEqual(res.value.status, statuscode,
2187 "%s: not a redirect" % which)
2188 if target is not None:
2189 # the PageRedirect does not seem to capture the uri= query arg
2190 # properly, so we can't check for it.
2191 realtarget = self.webish_url + target
2192 self.failUnlessEqual(res.value.location, realtarget,
2193 "%s: wrong target" % which)
2194 return res.value.location
2196 def test_GET_URI_form(self):
2197 base = "/uri?uri=%s" % self._bar_txt_uri
2198 # this is supposed to give us a redirect to /uri/$URI, plus arguments
2199 targetbase = "/uri/%s" % urllib.quote(self._bar_txt_uri)
2201 d.addBoth(self.shouldRedirect, targetbase)
2202 d.addCallback(lambda res: self.GET(base+"&filename=bar.txt"))
2203 d.addBoth(self.shouldRedirect, targetbase+"?filename=bar.txt")
2204 d.addCallback(lambda res: self.GET(base+"&t=json"))
2205 d.addBoth(self.shouldRedirect, targetbase+"?t=json")
2206 d.addCallback(self.log, "about to get file by uri")
2207 d.addCallback(lambda res: self.GET(base, followRedirect=True))
2208 d.addCallback(self.failUnlessIsBarDotTxt)
2209 d.addCallback(self.log, "got file by uri, about to get dir by uri")
2210 d.addCallback(lambda res: self.GET("/uri?uri=%s&t=json" % self._foo_uri,
2211 followRedirect=True))
2212 d.addCallback(self.failUnlessIsFooJSON)
2213 d.addCallback(self.log, "got dir by uri")
2217 def test_GET_URI_form_bad(self):
2218 d = self.shouldFail2(error.Error, "test_GET_URI_form_bad",
2219 "400 Bad Request", "GET /uri requires uri=",
2223 def test_GET_rename_form(self):
2224 d = self.GET(self.public_url + "/foo?t=rename-form&name=bar.txt",
2225 followRedirect=True)
2227 self.failUnless('name="when_done" value="."' in res, res)
2228 self.failUnless(re.search(r'name="from_name" value="bar\.txt"', res))
2229 d.addCallback(_check)
2232 def log(self, res, msg):
2233 #print "MSG: %s RES: %s" % (msg, res)
2237 def test_GET_URI_URL(self):
2238 base = "/uri/%s" % self._bar_txt_uri
2240 d.addCallback(self.failUnlessIsBarDotTxt)
2241 d.addCallback(lambda res: self.GET(base+"?filename=bar.txt"))
2242 d.addCallback(self.failUnlessIsBarDotTxt)
2243 d.addCallback(lambda res: self.GET(base+"?filename=bar.txt&save=true"))
2244 d.addCallback(self.failUnlessIsBarDotTxt)
2247 def test_GET_URI_URL_dir(self):
2248 base = "/uri/%s?t=json" % self._foo_uri
2250 d.addCallback(self.failUnlessIsFooJSON)
2253 def test_GET_URI_URL_missing(self):
2254 base = "/uri/%s" % self._bad_file_uri
2255 d = self.shouldHTTPError("test_GET_URI_URL_missing",
2256 http.GONE, None, "NotEnoughSharesError",
2258 # TODO: how can we exercise both sides of WebDownloadTarget.fail
2259 # here? we must arrange for a download to fail after target.open()
2260 # has been called, and then inspect the response to see that it is
2261 # shorter than we expected.
2264 def test_PUT_DIRURL_uri(self):
2265 d = self.s.create_empty_dirnode()
2267 new_uri = dn.get_uri()
2268 # replace /foo with a new (empty) directory
2269 d = self.PUT(self.public_url + "/foo?t=uri", new_uri)
2270 d.addCallback(lambda res:
2271 self.failUnlessEqual(res.strip(), new_uri))
2272 d.addCallback(lambda res:
2273 self.failUnlessChildURIIs(self.public_root,
2277 d.addCallback(_made_dir)
2280 def test_PUT_DIRURL_uri_noreplace(self):
2281 d = self.s.create_empty_dirnode()
2283 new_uri = dn.get_uri()
2284 # replace /foo with a new (empty) directory, but ask that
2285 # replace=false, so it should fail
2286 d = self.shouldFail2(error.Error, "test_PUT_DIRURL_uri_noreplace",
2287 "409 Conflict", "There was already a child by that name, and you asked me to not replace it",
2289 self.public_url + "/foo?t=uri&replace=false",
2291 d.addCallback(lambda res:
2292 self.failUnlessChildURIIs(self.public_root,
2296 d.addCallback(_made_dir)
2299 def test_PUT_DIRURL_bad_t(self):
2300 d = self.shouldFail2(error.Error, "test_PUT_DIRURL_bad_t",
2301 "400 Bad Request", "PUT to a directory",
2302 self.PUT, self.public_url + "/foo?t=BOGUS", "")
2303 d.addCallback(lambda res:
2304 self.failUnlessChildURIIs(self.public_root,
2309 def test_PUT_NEWFILEURL_uri(self):
2310 contents, n, new_uri = self.makefile(8)
2311 d = self.PUT(self.public_url + "/foo/new.txt?t=uri", new_uri)
2312 d.addCallback(lambda res: self.failUnlessEqual(res.strip(), new_uri))
2313 d.addCallback(lambda res:
2314 self.failUnlessChildContentsAre(self._foo_node, u"new.txt",
2318 def test_PUT_NEWFILEURL_uri_replace(self):
2319 contents, n, new_uri = self.makefile(8)
2320 d = self.PUT(self.public_url + "/foo/bar.txt?t=uri", new_uri)
2321 d.addCallback(lambda res: self.failUnlessEqual(res.strip(), new_uri))
2322 d.addCallback(lambda res:
2323 self.failUnlessChildContentsAre(self._foo_node, u"bar.txt",
2327 def test_PUT_NEWFILEURL_uri_no_replace(self):
2328 contents, n, new_uri = self.makefile(8)
2329 d = self.PUT(self.public_url + "/foo/bar.txt?t=uri&replace=false", new_uri)
2330 d.addBoth(self.shouldFail, error.Error, "PUT_NEWFILEURL_uri_no_replace",
2332 "There was already a child by that name, and you asked me "
2333 "to not replace it")
2336 def test_PUT_NEWFILE_URI(self):
2337 file_contents = "New file contents here\n"
2338 d = self.PUT("/uri", file_contents)
2340 assert isinstance(uri, str), uri
2341 self.failUnless(uri in FakeCHKFileNode.all_contents)
2342 self.failUnlessEqual(FakeCHKFileNode.all_contents[uri],
2344 return self.GET("/uri/%s" % uri)
2345 d.addCallback(_check)
2347 self.failUnlessEqual(res, file_contents)
2348 d.addCallback(_check2)
2351 def test_PUT_NEWFILE_URI_not_mutable(self):
2352 file_contents = "New file contents here\n"
2353 d = self.PUT("/uri?mutable=false", file_contents)
2355 assert isinstance(uri, str), uri
2356 self.failUnless(uri in FakeCHKFileNode.all_contents)
2357 self.failUnlessEqual(FakeCHKFileNode.all_contents[uri],
2359 return self.GET("/uri/%s" % uri)
2360 d.addCallback(_check)
2362 self.failUnlessEqual(res, file_contents)
2363 d.addCallback(_check2)
2366 def test_PUT_NEWFILE_URI_only_PUT(self):
2367 d = self.PUT("/uri?t=bogus", "")
2368 d.addBoth(self.shouldFail, error.Error,
2369 "PUT_NEWFILE_URI_only_PUT",
2371 "/uri accepts only PUT, PUT?t=mkdir, POST?t=upload, and POST?t=mkdir")
2374 def test_PUT_NEWFILE_URI_mutable(self):
2375 file_contents = "New file contents here\n"
2376 d = self.PUT("/uri?mutable=true", file_contents)
2377 def _check1(filecap):
2378 filecap = filecap.strip()
2379 self.failUnless(filecap.startswith("URI:SSK:"), filecap)
2380 self.filecap = filecap
2381 u = uri.WriteableSSKFileURI.init_from_string(filecap)
2382 self.failUnless(u.storage_index in FakeMutableFileNode.all_contents)
2383 n = self.s.create_node_from_uri(filecap)
2384 return n.download_best_version()
2385 d.addCallback(_check1)
2387 self.failUnlessEqual(data, file_contents)
2388 return self.GET("/uri/%s" % urllib.quote(self.filecap))
2389 d.addCallback(_check2)
2391 self.failUnlessEqual(res, file_contents)
2392 d.addCallback(_check3)
2395 def test_PUT_mkdir(self):
2396 d = self.PUT("/uri?t=mkdir", "")
2398 n = self.s.create_node_from_uri(uri.strip())
2399 d2 = self.failUnlessNodeKeysAre(n, [])
2400 d2.addCallback(lambda res:
2401 self.GET("/uri/%s?t=json" % uri))
2403 d.addCallback(_check)
2404 d.addCallback(self.failUnlessIsEmptyJSON)
2407 def test_POST_check(self):
2408 d = self.POST(self.public_url + "/foo", t="check", name="bar.txt")
2410 # this returns a string form of the results, which are probably
2411 # None since we're using fake filenodes.
2412 # TODO: verify that the check actually happened, by changing
2413 # FakeCHKFileNode to count how many times .check() has been
2416 d.addCallback(_done)
2419 def test_bad_method(self):
2420 url = self.webish_url + self.public_url + "/foo/bar.txt"
2421 d = self.shouldHTTPError("test_bad_method",
2422 501, "Not Implemented",
2423 "I don't know how to treat a BOGUS request.",
2424 client.getPage, url, method="BOGUS")
2427 def test_short_url(self):
2428 url = self.webish_url + "/uri"
2429 d = self.shouldHTTPError("test_short_url", 501, "Not Implemented",
2430 "I don't know how to treat a DELETE request.",
2431 client.getPage, url, method="DELETE")
2434 def test_ophandle_bad(self):
2435 url = self.webish_url + "/operations/bogus?t=status"
2436 d = self.shouldHTTPError("test_ophandle_bad", 404, "404 Not Found",
2437 "unknown/expired handle 'bogus'",
2438 client.getPage, url)
2441 def test_ophandle_cancel(self):
2442 d = self.POST(self.public_url + "/foo/?t=start-manifest&ophandle=128",
2443 followRedirect=True)
2444 d.addCallback(lambda ignored:
2445 self.GET("/operations/128?t=status&output=JSON"))
2447 data = simplejson.loads(res)
2448 self.failUnless("finished" in data, res)
2449 monitor = self.ws.root.child_operations.handles["128"][0]
2450 d = self.POST("/operations/128?t=cancel&output=JSON")
2452 data = simplejson.loads(res)
2453 self.failUnless("finished" in data, res)
2454 # t=cancel causes the handle to be forgotten
2455 self.failUnless(monitor.is_cancelled())
2456 d.addCallback(_check2)
2458 d.addCallback(_check1)
2459 d.addCallback(lambda ignored:
2460 self.shouldHTTPError("test_ophandle_cancel",
2461 404, "404 Not Found",
2462 "unknown/expired handle '128'",
2464 "/operations/128?t=status&output=JSON"))
2467 def test_ophandle_retainfor(self):
2468 d = self.POST(self.public_url + "/foo/?t=start-manifest&ophandle=129&retain-for=60",
2469 followRedirect=True)
2470 d.addCallback(lambda ignored:
2471 self.GET("/operations/129?t=status&output=JSON&retain-for=0"))
2473 data = simplejson.loads(res)
2474 self.failUnless("finished" in data, res)
2475 d.addCallback(_check1)
2476 # the retain-for=0 will cause the handle to be expired very soon
2477 d.addCallback(self.stall, 2.0)
2478 d.addCallback(lambda ignored:
2479 self.shouldHTTPError("test_ophandle_retainfor",
2480 404, "404 Not Found",
2481 "unknown/expired handle '129'",
2483 "/operations/129?t=status&output=JSON"))
2486 def test_ophandle_release_after_complete(self):
2487 d = self.POST(self.public_url + "/foo/?t=start-manifest&ophandle=130",
2488 followRedirect=True)
2489 d.addCallback(self.wait_for_operation, "130")
2490 d.addCallback(lambda ignored:
2491 self.GET("/operations/130?t=status&output=JSON&release-after-complete=true"))
2492 # the release-after-complete=true will cause the handle to be expired
2493 d.addCallback(lambda ignored:
2494 self.shouldHTTPError("test_ophandle_release_after_complete",
2495 404, "404 Not Found",
2496 "unknown/expired handle '130'",
2498 "/operations/130?t=status&output=JSON"))
2501 def test_incident(self):
2502 d = self.POST("/report_incident", details="eek")
2504 self.failUnless("Thank you for your report!" in res, res)
2505 d.addCallback(_done)
2508 def test_static(self):
2509 webdir = os.path.join(self.staticdir, "subdir")
2510 fileutil.make_dirs(webdir)
2511 f = open(os.path.join(webdir, "hello.txt"), "wb")
2515 d = self.GET("/static/subdir/hello.txt")
2517 self.failUnlessEqual(res, "hello")
2518 d.addCallback(_check)
2522 class Util(unittest.TestCase, ShouldFailMixin):
2523 def test_parse_replace_arg(self):
2524 self.failUnlessEqual(common.parse_replace_arg("true"), True)
2525 self.failUnlessEqual(common.parse_replace_arg("false"), False)
2526 self.failUnlessEqual(common.parse_replace_arg("only-files"),
2528 self.shouldFail(AssertionError, "test_parse_replace_arg", "",
2529 common.parse_replace_arg, "only_fles")
2531 def test_abbreviate_time(self):
2532 self.failUnlessEqual(common.abbreviate_time(None), "")
2533 self.failUnlessEqual(common.abbreviate_time(1.234), "1.23s")
2534 self.failUnlessEqual(common.abbreviate_time(0.123), "123ms")
2535 self.failUnlessEqual(common.abbreviate_time(0.00123), "1.2ms")
2536 self.failUnlessEqual(common.abbreviate_time(0.000123), "123us")
2538 def test_abbreviate_rate(self):
2539 self.failUnlessEqual(common.abbreviate_rate(None), "")
2540 self.failUnlessEqual(common.abbreviate_rate(1234000), "1.23MBps")
2541 self.failUnlessEqual(common.abbreviate_rate(12340), "12.3kBps")
2542 self.failUnlessEqual(common.abbreviate_rate(123), "123Bps")
2544 def test_abbreviate_size(self):
2545 self.failUnlessEqual(common.abbreviate_size(None), "")
2546 self.failUnlessEqual(common.abbreviate_size(1.23*1000*1000*1000), "1.23GB")
2547 self.failUnlessEqual(common.abbreviate_size(1.23*1000*1000), "1.23MB")
2548 self.failUnlessEqual(common.abbreviate_size(1230), "1.2kB")
2549 self.failUnlessEqual(common.abbreviate_size(123), "123B")
2551 def test_plural(self):
2553 return "%d second%s" % (s, status.plural(s))
2554 self.failUnlessEqual(convert(0), "0 seconds")
2555 self.failUnlessEqual(convert(1), "1 second")
2556 self.failUnlessEqual(convert(2), "2 seconds")
2558 return "has share%s: %s" % (status.plural(s), ",".join(s))
2559 self.failUnlessEqual(convert2([]), "has shares: ")
2560 self.failUnlessEqual(convert2(["1"]), "has share: 1")
2561 self.failUnlessEqual(convert2(["1","2"]), "has shares: 1,2")
2564 class Grid(GridTestMixin, WebErrorMixin, unittest.TestCase, ShouldFailMixin):
2566 def CHECK(self, ign, which, args, clientnum=0):
2567 fileurl = self.fileurls[which]
2568 url = fileurl + "?" + args
2569 return self.GET(url, method="POST", clientnum=clientnum)
2571 def test_filecheck(self):
2572 self.basedir = "web/Grid/filecheck"
2574 c0 = self.g.clients[0]
2577 d = c0.upload(upload.Data(DATA, convergence=""))
2578 def _stash_uri(ur, which):
2579 self.uris[which] = ur.uri
2580 d.addCallback(_stash_uri, "good")
2581 d.addCallback(lambda ign:
2582 c0.upload(upload.Data(DATA+"1", convergence="")))
2583 d.addCallback(_stash_uri, "sick")
2584 d.addCallback(lambda ign:
2585 c0.upload(upload.Data(DATA+"2", convergence="")))
2586 d.addCallback(_stash_uri, "dead")
2587 def _stash_mutable_uri(n, which):
2588 self.uris[which] = n.get_uri()
2589 assert isinstance(self.uris[which], str)
2590 d.addCallback(lambda ign: c0.create_mutable_file(DATA+"3"))
2591 d.addCallback(_stash_mutable_uri, "corrupt")
2592 d.addCallback(lambda ign:
2593 c0.upload(upload.Data("literal", convergence="")))
2594 d.addCallback(_stash_uri, "small")
2596 def _compute_fileurls(ignored):
2598 for which in self.uris:
2599 self.fileurls[which] = "uri/" + urllib.quote(self.uris[which])
2600 d.addCallback(_compute_fileurls)
2602 def _clobber_shares(ignored):
2603 good_shares = self.find_shares(self.uris["good"])
2604 self.failUnlessEqual(len(good_shares), 10)
2605 sick_shares = self.find_shares(self.uris["sick"])
2606 os.unlink(sick_shares[0][2])
2607 dead_shares = self.find_shares(self.uris["dead"])
2608 for i in range(1, 10):
2609 os.unlink(dead_shares[i][2])
2610 c_shares = self.find_shares(self.uris["corrupt"])
2611 cso = CorruptShareOptions()
2612 cso.stdout = StringIO()
2613 cso.parseOptions([c_shares[0][2]])
2615 d.addCallback(_clobber_shares)
2617 d.addCallback(self.CHECK, "good", "t=check")
2618 def _got_html_good(res):
2619 self.failUnless("Healthy" in res, res)
2620 self.failIf("Not Healthy" in res, res)
2621 d.addCallback(_got_html_good)
2622 d.addCallback(self.CHECK, "good", "t=check&return_to=somewhere")
2623 def _got_html_good_return_to(res):
2624 self.failUnless("Healthy" in res, res)
2625 self.failIf("Not Healthy" in res, res)
2626 self.failUnless('<a href="somewhere">Return to file'
2628 d.addCallback(_got_html_good_return_to)
2629 d.addCallback(self.CHECK, "good", "t=check&output=json")
2630 def _got_json_good(res):
2631 r = simplejson.loads(res)
2632 self.failUnlessEqual(r["summary"], "Healthy")
2633 self.failUnless(r["results"]["healthy"])
2634 self.failIf(r["results"]["needs-rebalancing"])
2635 self.failUnless(r["results"]["recoverable"])
2636 d.addCallback(_got_json_good)
2638 d.addCallback(self.CHECK, "small", "t=check")
2639 def _got_html_small(res):
2640 self.failUnless("Literal files are always healthy" in res, res)
2641 self.failIf("Not Healthy" in res, res)
2642 d.addCallback(_got_html_small)
2643 d.addCallback(self.CHECK, "small", "t=check&return_to=somewhere")
2644 def _got_html_small_return_to(res):
2645 self.failUnless("Literal files are always healthy" in res, res)
2646 self.failIf("Not Healthy" in res, res)
2647 self.failUnless('<a href="somewhere">Return to file'
2649 d.addCallback(_got_html_small_return_to)
2650 d.addCallback(self.CHECK, "small", "t=check&output=json")
2651 def _got_json_small(res):
2652 r = simplejson.loads(res)
2653 self.failUnlessEqual(r["storage-index"], "")
2654 self.failUnless(r["results"]["healthy"])
2655 d.addCallback(_got_json_small)
2657 d.addCallback(self.CHECK, "sick", "t=check")
2658 def _got_html_sick(res):
2659 self.failUnless("Not Healthy" in res, res)
2660 d.addCallback(_got_html_sick)
2661 d.addCallback(self.CHECK, "sick", "t=check&output=json")
2662 def _got_json_sick(res):
2663 r = simplejson.loads(res)
2664 self.failUnlessEqual(r["summary"],
2665 "Not Healthy: 9 shares (enc 3-of-10)")
2666 self.failIf(r["results"]["healthy"])
2667 self.failIf(r["results"]["needs-rebalancing"])
2668 self.failUnless(r["results"]["recoverable"])
2669 d.addCallback(_got_json_sick)
2671 d.addCallback(self.CHECK, "dead", "t=check")
2672 def _got_html_dead(res):
2673 self.failUnless("Not Healthy" in res, res)
2674 d.addCallback(_got_html_dead)
2675 d.addCallback(self.CHECK, "dead", "t=check&output=json")
2676 def _got_json_dead(res):
2677 r = simplejson.loads(res)
2678 self.failUnlessEqual(r["summary"],
2679 "Not Healthy: 1 shares (enc 3-of-10)")
2680 self.failIf(r["results"]["healthy"])
2681 self.failIf(r["results"]["needs-rebalancing"])
2682 self.failIf(r["results"]["recoverable"])
2683 d.addCallback(_got_json_dead)
2685 d.addCallback(self.CHECK, "corrupt", "t=check&verify=true")
2686 def _got_html_corrupt(res):
2687 self.failUnless("Not Healthy! : Unhealthy" in res, res)
2688 d.addCallback(_got_html_corrupt)
2689 d.addCallback(self.CHECK, "corrupt", "t=check&verify=true&output=json")
2690 def _got_json_corrupt(res):
2691 r = simplejson.loads(res)
2692 self.failUnless("Unhealthy: 9 shares (enc 3-of-10)" in r["summary"],
2694 self.failIf(r["results"]["healthy"])
2695 self.failUnless(r["results"]["recoverable"])
2696 self.failUnlessEqual(r["results"]["count-shares-good"], 9)
2697 self.failUnlessEqual(r["results"]["count-corrupt-shares"], 1)
2698 d.addCallback(_got_json_corrupt)
2700 d.addErrback(self.explain_web_error)
2703 def test_repair_html(self):
2704 self.basedir = "web/Grid/repair_html"
2706 c0 = self.g.clients[0]
2709 d = c0.upload(upload.Data(DATA, convergence=""))
2710 def _stash_uri(ur, which):
2711 self.uris[which] = ur.uri
2712 d.addCallback(_stash_uri, "good")
2713 d.addCallback(lambda ign:
2714 c0.upload(upload.Data(DATA+"1", convergence="")))
2715 d.addCallback(_stash_uri, "sick")
2716 d.addCallback(lambda ign:
2717 c0.upload(upload.Data(DATA+"2", convergence="")))
2718 d.addCallback(_stash_uri, "dead")
2719 def _stash_mutable_uri(n, which):
2720 self.uris[which] = n.get_uri()
2721 assert isinstance(self.uris[which], str)
2722 d.addCallback(lambda ign: c0.create_mutable_file(DATA+"3"))
2723 d.addCallback(_stash_mutable_uri, "corrupt")
2725 def _compute_fileurls(ignored):
2727 for which in self.uris:
2728 self.fileurls[which] = "uri/" + urllib.quote(self.uris[which])
2729 d.addCallback(_compute_fileurls)
2731 def _clobber_shares(ignored):
2732 good_shares = self.find_shares(self.uris["good"])
2733 self.failUnlessEqual(len(good_shares), 10)
2734 sick_shares = self.find_shares(self.uris["sick"])
2735 os.unlink(sick_shares[0][2])
2736 dead_shares = self.find_shares(self.uris["dead"])
2737 for i in range(1, 10):
2738 os.unlink(dead_shares[i][2])
2739 c_shares = self.find_shares(self.uris["corrupt"])
2740 cso = CorruptShareOptions()
2741 cso.stdout = StringIO()
2742 cso.parseOptions([c_shares[0][2]])
2744 d.addCallback(_clobber_shares)
2746 d.addCallback(self.CHECK, "good", "t=check&repair=true")
2747 def _got_html_good(res):
2748 self.failUnless("Healthy" in res, res)
2749 self.failIf("Not Healthy" in res, res)
2750 self.failUnless("No repair necessary" in res, res)
2751 d.addCallback(_got_html_good)
2753 d.addCallback(self.CHECK, "sick", "t=check&repair=true")
2754 def _got_html_sick(res):
2755 self.failUnless("Healthy : healthy" in res, res)
2756 self.failIf("Not Healthy" in res, res)
2757 self.failUnless("Repair successful" in res, res)
2758 d.addCallback(_got_html_sick)
2760 # repair of a dead file will fail, of course, but it isn't yet
2761 # clear how this should be reported. Right now it shows up as
2764 #d.addCallback(self.CHECK, "dead", "t=check&repair=true")
2765 #def _got_html_dead(res):
2767 # self.failUnless("Healthy : healthy" in res, res)
2768 # self.failIf("Not Healthy" in res, res)
2769 # self.failUnless("No repair necessary" in res, res)
2770 #d.addCallback(_got_html_dead)
2772 d.addCallback(self.CHECK, "corrupt", "t=check&verify=true&repair=true")
2773 def _got_html_corrupt(res):
2774 self.failUnless("Healthy : Healthy" in res, res)
2775 self.failIf("Not Healthy" in res, res)
2776 self.failUnless("Repair successful" in res, res)
2777 d.addCallback(_got_html_corrupt)
2779 d.addErrback(self.explain_web_error)
2782 def test_repair_json(self):
2783 self.basedir = "web/Grid/repair_json"
2785 c0 = self.g.clients[0]
2788 d = c0.upload(upload.Data(DATA+"1", convergence=""))
2789 def _stash_uri(ur, which):
2790 self.uris[which] = ur.uri
2791 d.addCallback(_stash_uri, "sick")
2793 def _compute_fileurls(ignored):
2795 for which in self.uris:
2796 self.fileurls[which] = "uri/" + urllib.quote(self.uris[which])
2797 d.addCallback(_compute_fileurls)
2799 def _clobber_shares(ignored):
2800 sick_shares = self.find_shares(self.uris["sick"])
2801 os.unlink(sick_shares[0][2])
2802 d.addCallback(_clobber_shares)
2804 d.addCallback(self.CHECK, "sick", "t=check&repair=true&output=json")
2805 def _got_json_sick(res):
2806 r = simplejson.loads(res)
2807 self.failUnlessEqual(r["repair-attempted"], True)
2808 self.failUnlessEqual(r["repair-successful"], True)
2809 self.failUnlessEqual(r["pre-repair-results"]["summary"],
2810 "Not Healthy: 9 shares (enc 3-of-10)")
2811 self.failIf(r["pre-repair-results"]["results"]["healthy"])
2812 self.failUnlessEqual(r["post-repair-results"]["summary"], "healthy")
2813 self.failUnless(r["post-repair-results"]["results"]["healthy"])
2814 d.addCallback(_got_json_sick)
2816 d.addErrback(self.explain_web_error)
2819 def test_unknown(self):
2820 self.basedir = "web/Grid/unknown"
2822 c0 = self.g.clients[0]
2826 future_writecap = "x-tahoe-crazy://I_am_from_the_future."
2827 future_readcap = "x-tahoe-crazy-readonly://I_am_from_the_future."
2828 # the future cap format may contain slashes, which must be tolerated
2829 expected_info_url = "uri/%s?t=info" % urllib.quote(future_writecap,
2831 future_node = UnknownNode(future_writecap, future_readcap)
2833 d = c0.create_empty_dirnode()
2834 def _stash_root_and_create_file(n):
2836 self.rooturl = "uri/" + urllib.quote(n.get_uri()) + "/"
2837 self.rourl = "uri/" + urllib.quote(n.get_readonly_uri()) + "/"
2838 return self.rootnode.set_node(u"future", future_node)
2839 d.addCallback(_stash_root_and_create_file)
2840 # make sure directory listing tolerates unknown nodes
2841 d.addCallback(lambda ign: self.GET(self.rooturl))
2842 def _check_html(res):
2843 self.failUnlessIn("<td>future</td>", res)
2844 # find the More Info link for "future", should be relative
2845 mo = re.search(r'<a href="([^"]+)">More Info</a>', res)
2846 info_url = mo.group(1)
2847 self.failUnlessEqual(info_url, "future?t=info")
2849 d.addCallback(_check_html)
2850 d.addCallback(lambda ign: self.GET(self.rooturl+"?t=json"))
2851 def _check_json(res, expect_writecap):
2852 data = simplejson.loads(res)
2853 self.failUnlessEqual(data[0], "dirnode")
2854 f = data[1]["children"]["future"]
2855 self.failUnlessEqual(f[0], "unknown")
2857 self.failUnlessEqual(f[1]["rw_uri"], future_writecap)
2859 self.failIfIn("rw_uri", f[1])
2860 self.failUnlessEqual(f[1]["ro_uri"], future_readcap)
2861 self.failUnless("metadata" in f[1])
2862 d.addCallback(_check_json, expect_writecap=True)
2863 d.addCallback(lambda ign: self.GET(expected_info_url))
2864 def _check_info(res, expect_readcap):
2865 self.failUnlessIn("Object Type: <span>unknown</span>", res)
2866 self.failUnlessIn(future_writecap, res)
2868 self.failUnlessIn(future_readcap, res)
2869 self.failIfIn("Raw data as", res)
2870 self.failIfIn("Directory writecap", res)
2871 self.failIfIn("Checker Operations", res)
2872 self.failIfIn("Mutable File Operations", res)
2873 self.failIfIn("Directory Operations", res)
2874 d.addCallback(_check_info, expect_readcap=False)
2875 d.addCallback(lambda ign: self.GET(self.rooturl+"future?t=info"))
2876 d.addCallback(_check_info, expect_readcap=True)
2878 # and make sure that a read-only version of the directory can be
2879 # rendered too. This version will not have future_writecap
2880 d.addCallback(lambda ign: self.GET(self.rourl))
2881 d.addCallback(_check_html)
2882 d.addCallback(lambda ign: self.GET(self.rourl+"?t=json"))
2883 d.addCallback(_check_json, expect_writecap=False)
2886 def test_deep_check(self):
2887 self.basedir = "web/Grid/deep_check"
2889 c0 = self.g.clients[0]
2893 d = c0.create_empty_dirnode()
2894 def _stash_root_and_create_file(n):
2896 self.fileurls["root"] = "uri/" + urllib.quote(n.get_uri()) + "/"
2897 return n.add_file(u"good", upload.Data(DATA, convergence=""))
2898 d.addCallback(_stash_root_and_create_file)
2899 def _stash_uri(fn, which):
2900 self.uris[which] = fn.get_uri()
2902 d.addCallback(_stash_uri, "good")
2903 d.addCallback(lambda ign:
2904 self.rootnode.add_file(u"small",
2905 upload.Data("literal",
2907 d.addCallback(_stash_uri, "small")
2908 d.addCallback(lambda ign:
2909 self.rootnode.add_file(u"sick",
2910 upload.Data(DATA+"1",
2912 d.addCallback(_stash_uri, "sick")
2914 # this tests that deep-check and stream-manifest will ignore
2915 # UnknownNode instances. Hopefully this will also cover deep-stats.
2916 future_writecap = "x-tahoe-crazy://I_am_from_the_future."
2917 future_readcap = "x-tahoe-crazy-readonly://I_am_from_the_future."
2918 future_node = UnknownNode(future_writecap, future_readcap)
2919 d.addCallback(lambda ign: self.rootnode.set_node(u"future",future_node))
2921 def _clobber_shares(ignored):
2922 self.delete_shares_numbered(self.uris["sick"], [0,1])
2923 d.addCallback(_clobber_shares)
2931 d.addCallback(self.CHECK, "root", "t=stream-deep-check")
2934 units = [simplejson.loads(line)
2935 for line in res.splitlines()
2938 print "response is:", res
2939 print "undecodeable line was '%s'" % line
2941 self.failUnlessEqual(len(units), 5+1)
2942 # should be parent-first
2944 self.failUnlessEqual(u0["path"], [])
2945 self.failUnlessEqual(u0["type"], "directory")
2946 self.failUnlessEqual(u0["cap"], self.rootnode.get_uri())
2947 u0cr = u0["check-results"]
2948 self.failUnlessEqual(u0cr["results"]["count-shares-good"], 10)
2950 ugood = [u for u in units
2951 if u["type"] == "file" and u["path"] == [u"good"]][0]
2952 self.failUnlessEqual(ugood["cap"], self.uris["good"])
2953 ugoodcr = ugood["check-results"]
2954 self.failUnlessEqual(ugoodcr["results"]["count-shares-good"], 10)
2957 self.failUnlessEqual(stats["type"], "stats")
2959 self.failUnlessEqual(s["count-immutable-files"], 2)
2960 self.failUnlessEqual(s["count-literal-files"], 1)
2961 self.failUnlessEqual(s["count-directories"], 1)
2962 self.failUnlessEqual(s["count-unknown"], 1)
2963 d.addCallback(_done)
2965 d.addCallback(self.CHECK, "root", "t=stream-manifest")
2966 def _check_manifest(res):
2967 self.failUnless(res.endswith("\n"))
2968 units = [simplejson.loads(t) for t in res[:-1].split("\n")]
2969 self.failUnlessEqual(len(units), 5+1)
2970 self.failUnlessEqual(units[-1]["type"], "stats")
2972 self.failUnlessEqual(first["path"], [])
2973 self.failUnlessEqual(first["cap"], self.rootnode.get_uri())
2974 self.failUnlessEqual(first["type"], "directory")
2975 stats = units[-1]["stats"]
2976 self.failUnlessEqual(stats["count-immutable-files"], 2)
2977 self.failUnlessEqual(stats["count-literal-files"], 1)
2978 self.failUnlessEqual(stats["count-mutable-files"], 0)
2979 self.failUnlessEqual(stats["count-immutable-files"], 2)
2980 self.failUnlessEqual(stats["count-unknown"], 1)
2981 d.addCallback(_check_manifest)
2983 # now add root/subdir and root/subdir/grandchild, then make subdir
2984 # unrecoverable, then see what happens
2986 d.addCallback(lambda ign:
2987 self.rootnode.create_empty_directory(u"subdir"))
2988 d.addCallback(_stash_uri, "subdir")
2989 d.addCallback(lambda subdir_node:
2990 subdir_node.add_file(u"grandchild",
2991 upload.Data(DATA+"2",
2993 d.addCallback(_stash_uri, "grandchild")
2995 d.addCallback(lambda ign:
2996 self.delete_shares_numbered(self.uris["subdir"],
3004 # root/subdir [unrecoverable]
3005 # root/subdir/grandchild
3007 # how should a streaming-JSON API indicate fatal error?
3008 # answer: emit ERROR: instead of a JSON string
3010 d.addCallback(self.CHECK, "root", "t=stream-manifest")
3011 def _check_broken_manifest(res):
3012 lines = res.splitlines()
3014 for (i,line) in enumerate(lines)
3015 if line.startswith("ERROR:")]
3017 self.fail("no ERROR: in output: %s" % (res,))
3018 first_error = error_lines[0]
3019 error_line = lines[first_error]
3020 error_msg = lines[first_error+1:]
3021 error_msg_s = "\n".join(error_msg) + "\n"
3022 self.failUnlessIn("ERROR: UnrecoverableFileError(no recoverable versions)",
3024 self.failUnless(len(error_msg) > 2, error_msg_s) # some traceback
3025 units = [simplejson.loads(line) for line in lines[:first_error]]
3026 self.failUnlessEqual(len(units), 6) # includes subdir
3027 last_unit = units[-1]
3028 self.failUnlessEqual(last_unit["path"], ["subdir"])
3029 d.addCallback(_check_broken_manifest)
3031 d.addCallback(self.CHECK, "root", "t=stream-deep-check")
3032 def _check_broken_deepcheck(res):
3033 lines = res.splitlines()
3035 for (i,line) in enumerate(lines)
3036 if line.startswith("ERROR:")]
3038 self.fail("no ERROR: in output: %s" % (res,))
3039 first_error = error_lines[0]
3040 error_line = lines[first_error]
3041 error_msg = lines[first_error+1:]
3042 error_msg_s = "\n".join(error_msg) + "\n"
3043 self.failUnlessIn("ERROR: UnrecoverableFileError(no recoverable versions)",
3045 self.failUnless(len(error_msg) > 2, error_msg_s) # some traceback
3046 units = [simplejson.loads(line) for line in lines[:first_error]]
3047 self.failUnlessEqual(len(units), 6) # includes subdir
3048 last_unit = units[-1]
3049 self.failUnlessEqual(last_unit["path"], ["subdir"])
3050 r = last_unit["check-results"]["results"]
3051 self.failUnlessEqual(r["count-recoverable-versions"], 0)
3052 self.failUnlessEqual(r["count-shares-good"], 1)
3053 self.failUnlessEqual(r["recoverable"], False)
3054 d.addCallback(_check_broken_deepcheck)
3056 d.addErrback(self.explain_web_error)
3059 def test_deep_check_and_repair(self):
3060 self.basedir = "web/Grid/deep_check_and_repair"
3062 c0 = self.g.clients[0]
3066 d = c0.create_empty_dirnode()
3067 def _stash_root_and_create_file(n):
3069 self.fileurls["root"] = "uri/" + urllib.quote(n.get_uri()) + "/"
3070 return n.add_file(u"good", upload.Data(DATA, convergence=""))
3071 d.addCallback(_stash_root_and_create_file)
3072 def _stash_uri(fn, which):
3073 self.uris[which] = fn.get_uri()
3074 d.addCallback(_stash_uri, "good")
3075 d.addCallback(lambda ign:
3076 self.rootnode.add_file(u"small",
3077 upload.Data("literal",
3079 d.addCallback(_stash_uri, "small")
3080 d.addCallback(lambda ign:
3081 self.rootnode.add_file(u"sick",
3082 upload.Data(DATA+"1",
3084 d.addCallback(_stash_uri, "sick")
3085 #d.addCallback(lambda ign:
3086 # self.rootnode.add_file(u"dead",
3087 # upload.Data(DATA+"2",
3089 #d.addCallback(_stash_uri, "dead")
3091 #d.addCallback(lambda ign: c0.create_mutable_file("mutable"))
3092 #d.addCallback(lambda fn: self.rootnode.set_node(u"corrupt", fn))
3093 #d.addCallback(_stash_uri, "corrupt")
3095 def _clobber_shares(ignored):
3096 good_shares = self.find_shares(self.uris["good"])
3097 self.failUnlessEqual(len(good_shares), 10)
3098 sick_shares = self.find_shares(self.uris["sick"])
3099 os.unlink(sick_shares[0][2])
3100 #dead_shares = self.find_shares(self.uris["dead"])
3101 #for i in range(1, 10):
3102 # os.unlink(dead_shares[i][2])
3104 #c_shares = self.find_shares(self.uris["corrupt"])
3105 #cso = CorruptShareOptions()
3106 #cso.stdout = StringIO()
3107 #cso.parseOptions([c_shares[0][2]])
3109 d.addCallback(_clobber_shares)
3112 # root/good CHK, 10 shares
3114 # root/sick CHK, 9 shares
3116 d.addCallback(self.CHECK, "root", "t=stream-deep-check&repair=true")
3118 units = [simplejson.loads(line)
3119 for line in res.splitlines()
3121 self.failUnlessEqual(len(units), 4+1)
3122 # should be parent-first
3124 self.failUnlessEqual(u0["path"], [])
3125 self.failUnlessEqual(u0["type"], "directory")
3126 self.failUnlessEqual(u0["cap"], self.rootnode.get_uri())
3127 u0crr = u0["check-and-repair-results"]
3128 self.failUnlessEqual(u0crr["repair-attempted"], False)
3129 self.failUnlessEqual(u0crr["pre-repair-results"]["results"]["count-shares-good"], 10)
3131 ugood = [u for u in units
3132 if u["type"] == "file" and u["path"] == [u"good"]][0]
3133 self.failUnlessEqual(ugood["cap"], self.uris["good"])
3134 ugoodcrr = ugood["check-and-repair-results"]
3135 self.failUnlessEqual(u0crr["repair-attempted"], False)
3136 self.failUnlessEqual(u0crr["pre-repair-results"]["results"]["count-shares-good"], 10)
3138 usick = [u for u in units
3139 if u["type"] == "file" and u["path"] == [u"sick"]][0]
3140 self.failUnlessEqual(usick["cap"], self.uris["sick"])
3141 usickcrr = usick["check-and-repair-results"]
3142 self.failUnlessEqual(usickcrr["repair-attempted"], True)
3143 self.failUnlessEqual(usickcrr["repair-successful"], True)
3144 self.failUnlessEqual(usickcrr["pre-repair-results"]["results"]["count-shares-good"], 9)
3145 self.failUnlessEqual(usickcrr["post-repair-results"]["results"]["count-shares-good"], 10)
3148 self.failUnlessEqual(stats["type"], "stats")
3150 self.failUnlessEqual(s["count-immutable-files"], 2)
3151 self.failUnlessEqual(s["count-literal-files"], 1)
3152 self.failUnlessEqual(s["count-directories"], 1)
3153 d.addCallback(_done)
3155 d.addErrback(self.explain_web_error)
3158 def _count_leases(self, ignored, which):
3159 u = self.uris[which]
3160 shares = self.find_shares(u)
3162 for shnum, serverid, fn in shares:
3163 sf = get_share_file(fn)
3164 num_leases = len(list(sf.get_leases()))
3165 lease_counts.append( (fn, num_leases) )
3168 def _assert_leasecount(self, lease_counts, expected):
3169 for (fn, num_leases) in lease_counts:
3170 if num_leases != expected:
3171 self.fail("expected %d leases, have %d, on %s" %
3172 (expected, num_leases, fn))
3174 def test_add_lease(self):
3175 self.basedir = "web/Grid/add_lease"
3176 self.set_up_grid(num_clients=2)
3177 c0 = self.g.clients[0]
3180 d = c0.upload(upload.Data(DATA, convergence=""))
3181 def _stash_uri(ur, which):
3182 self.uris[which] = ur.uri
3183 d.addCallback(_stash_uri, "one")
3184 d.addCallback(lambda ign:
3185 c0.upload(upload.Data(DATA+"1", convergence="")))
3186 d.addCallback(_stash_uri, "two")
3187 def _stash_mutable_uri(n, which):
3188 self.uris[which] = n.get_uri()
3189 assert isinstance(self.uris[which], str)
3190 d.addCallback(lambda ign: c0.create_mutable_file(DATA+"2"))
3191 d.addCallback(_stash_mutable_uri, "mutable")
3193 def _compute_fileurls(ignored):
3195 for which in self.uris:
3196 self.fileurls[which] = "uri/" + urllib.quote(self.uris[which])
3197 d.addCallback(_compute_fileurls)
3199 d.addCallback(self._count_leases, "one")
3200 d.addCallback(self._assert_leasecount, 1)
3201 d.addCallback(self._count_leases, "two")
3202 d.addCallback(self._assert_leasecount, 1)
3203 d.addCallback(self._count_leases, "mutable")
3204 d.addCallback(self._assert_leasecount, 1)
3206 d.addCallback(self.CHECK, "one", "t=check") # no add-lease
3207 def _got_html_good(res):
3208 self.failUnless("Healthy" in res, res)
3209 self.failIf("Not Healthy" in res, res)
3210 d.addCallback(_got_html_good)
3212 d.addCallback(self._count_leases, "one")
3213 d.addCallback(self._assert_leasecount, 1)
3214 d.addCallback(self._count_leases, "two")
3215 d.addCallback(self._assert_leasecount, 1)
3216 d.addCallback(self._count_leases, "mutable")
3217 d.addCallback(self._assert_leasecount, 1)
3219 # this CHECK uses the original client, which uses the same
3220 # lease-secrets, so it will just renew the original lease
3221 d.addCallback(self.CHECK, "one", "t=check&add-lease=true")
3222 d.addCallback(_got_html_good)
3224 d.addCallback(self._count_leases, "one")
3225 d.addCallback(self._assert_leasecount, 1)
3226 d.addCallback(self._count_leases, "two")
3227 d.addCallback(self._assert_leasecount, 1)
3228 d.addCallback(self._count_leases, "mutable")
3229 d.addCallback(self._assert_leasecount, 1)
3231 # this CHECK uses an alternate client, which adds a second lease
3232 d.addCallback(self.CHECK, "one", "t=check&add-lease=true", clientnum=1)
3233 d.addCallback(_got_html_good)
3235 d.addCallback(self._count_leases, "one")
3236 d.addCallback(self._assert_leasecount, 2)
3237 d.addCallback(self._count_leases, "two")
3238 d.addCallback(self._assert_leasecount, 1)
3239 d.addCallback(self._count_leases, "mutable")
3240 d.addCallback(self._assert_leasecount, 1)
3242 d.addCallback(self.CHECK, "mutable", "t=check&add-lease=true")
3243 d.addCallback(_got_html_good)
3245 d.addCallback(self._count_leases, "one")
3246 d.addCallback(self._assert_leasecount, 2)
3247 d.addCallback(self._count_leases, "two")
3248 d.addCallback(self._assert_leasecount, 1)
3249 d.addCallback(self._count_leases, "mutable")
3250 d.addCallback(self._assert_leasecount, 1)
3252 d.addCallback(self.CHECK, "mutable", "t=check&add-lease=true",
3254 d.addCallback(_got_html_good)
3256 d.addCallback(self._count_leases, "one")
3257 d.addCallback(self._assert_leasecount, 2)
3258 d.addCallback(self._count_leases, "two")
3259 d.addCallback(self._assert_leasecount, 1)
3260 d.addCallback(self._count_leases, "mutable")
3261 d.addCallback(self._assert_leasecount, 2)
3263 d.addErrback(self.explain_web_error)
3266 def test_deep_add_lease(self):
3267 self.basedir = "web/Grid/deep_add_lease"
3268 self.set_up_grid(num_clients=2)
3269 c0 = self.g.clients[0]
3273 d = c0.create_empty_dirnode()
3274 def _stash_root_and_create_file(n):
3276 self.uris["root"] = n.get_uri()
3277 self.fileurls["root"] = "uri/" + urllib.quote(n.get_uri()) + "/"
3278 return n.add_file(u"one", upload.Data(DATA, convergence=""))
3279 d.addCallback(_stash_root_and_create_file)
3280 def _stash_uri(fn, which):
3281 self.uris[which] = fn.get_uri()
3282 d.addCallback(_stash_uri, "one")
3283 d.addCallback(lambda ign:
3284 self.rootnode.add_file(u"small",
3285 upload.Data("literal",
3287 d.addCallback(_stash_uri, "small")
3289 d.addCallback(lambda ign: c0.create_mutable_file("mutable"))
3290 d.addCallback(lambda fn: self.rootnode.set_node(u"mutable", fn))
3291 d.addCallback(_stash_uri, "mutable")
3293 d.addCallback(self.CHECK, "root", "t=stream-deep-check") # no add-lease
3295 units = [simplejson.loads(line)
3296 for line in res.splitlines()
3298 # root, one, small, mutable, stats
3299 self.failUnlessEqual(len(units), 4+1)
3300 d.addCallback(_done)
3302 d.addCallback(self._count_leases, "root")
3303 d.addCallback(self._assert_leasecount, 1)
3304 d.addCallback(self._count_leases, "one")
3305 d.addCallback(self._assert_leasecount, 1)
3306 d.addCallback(self._count_leases, "mutable")
3307 d.addCallback(self._assert_leasecount, 1)
3309 d.addCallback(self.CHECK, "root", "t=stream-deep-check&add-lease=true")
3310 d.addCallback(_done)
3312 d.addCallback(self._count_leases, "root")
3313 d.addCallback(self._assert_leasecount, 1)
3314 d.addCallback(self._count_leases, "one")
3315 d.addCallback(self._assert_leasecount, 1)
3316 d.addCallback(self._count_leases, "mutable")
3317 d.addCallback(self._assert_leasecount, 1)
3319 d.addCallback(self.CHECK, "root", "t=stream-deep-check&add-lease=true",
3321 d.addCallback(_done)
3323 d.addCallback(self._count_leases, "root")
3324 d.addCallback(self._assert_leasecount, 2)
3325 d.addCallback(self._count_leases, "one")
3326 d.addCallback(self._assert_leasecount, 2)
3327 d.addCallback(self._count_leases, "mutable")
3328 d.addCallback(self._assert_leasecount, 2)
3330 d.addErrback(self.explain_web_error)
3334 def test_exceptions(self):
3335 self.basedir = "web/Grid/exceptions"
3336 self.set_up_grid(num_clients=1, num_servers=2)
3337 c0 = self.g.clients[0]
3340 d = c0.create_empty_dirnode()
3342 self.fileurls["root"] = "uri/" + urllib.quote(n.get_uri()) + "/"
3343 self.fileurls["imaginary"] = self.fileurls["root"] + "imaginary"
3345 d.addCallback(_stash_root)
3346 d.addCallback(lambda ign: c0.upload(upload.Data(DATA, convergence="")))
3348 self.fileurls["1share"] = "uri/" + urllib.quote(ur.uri)
3349 self.delete_shares_numbered(ur.uri, range(1,10))
3351 u = uri.from_string(ur.uri)
3352 u.key = testutil.flip_bit(u.key, 0)
3353 baduri = u.to_string()
3354 self.fileurls["0shares"] = "uri/" + urllib.quote(baduri)
3355 d.addCallback(_stash_bad)
3356 d.addCallback(lambda ign: c0.create_empty_dirnode())
3357 def _mangle_dirnode_1share(n):
3359 url = self.fileurls["dir-1share"] = "uri/" + urllib.quote(u) + "/"
3360 self.fileurls["dir-1share-json"] = url + "?t=json"
3361 self.delete_shares_numbered(u, range(1,10))
3362 d.addCallback(_mangle_dirnode_1share)
3363 d.addCallback(lambda ign: c0.create_empty_dirnode())
3364 def _mangle_dirnode_0share(n):
3366 url = self.fileurls["dir-0share"] = "uri/" + urllib.quote(u) + "/"
3367 self.fileurls["dir-0share-json"] = url + "?t=json"
3368 self.delete_shares_numbered(u, range(0,10))
3369 d.addCallback(_mangle_dirnode_0share)
3371 # NotEnoughSharesError should be reported sensibly, with a
3372 # text/plain explanation of the problem, and perhaps some
3373 # information on which shares *could* be found.
3375 d.addCallback(lambda ignored:
3376 self.shouldHTTPError("GET unrecoverable",
3377 410, "Gone", "NoSharesError",
3378 self.GET, self.fileurls["0shares"]))
3379 def _check_zero_shares(body):
3380 self.failIf("<html>" in body, body)
3381 body = " ".join(body.strip().split())
3382 exp = ("NoSharesError: no shares could be found. "
3383 "Zero shares usually indicates a corrupt URI, or that "
3384 "no servers were connected, but it might also indicate "
3385 "severe corruption. You should perform a filecheck on "
3386 "this object to learn more. The full error message is: "
3387 "Failed to get enough shareholders: have 0, need 3")
3388 self.failUnlessEqual(exp, body)
3389 d.addCallback(_check_zero_shares)
3392 d.addCallback(lambda ignored:
3393 self.shouldHTTPError("GET 1share",
3394 410, "Gone", "NotEnoughSharesError",
3395 self.GET, self.fileurls["1share"]))
3396 def _check_one_share(body):
3397 self.failIf("<html>" in body, body)
3398 body = " ".join(body.strip().split())
3399 exp = ("NotEnoughSharesError: This indicates that some "
3400 "servers were unavailable, or that shares have been "
3401 "lost to server departure, hard drive failure, or disk "
3402 "corruption. You should perform a filecheck on "
3403 "this object to learn more. The full error message is:"
3404 " Failed to get enough shareholders: have 1, need 3")
3405 self.failUnlessEqual(exp, body)
3406 d.addCallback(_check_one_share)
3408 d.addCallback(lambda ignored:
3409 self.shouldHTTPError("GET imaginary",
3410 404, "Not Found", None,
3411 self.GET, self.fileurls["imaginary"]))
3412 def _missing_child(body):
3413 self.failUnless("No such child: imaginary" in body, body)
3414 d.addCallback(_missing_child)
3416 d.addCallback(lambda ignored: self.GET(self.fileurls["dir-0share"]))
3417 def _check_0shares_dir_html(body):
3418 self.failUnless("<html>" in body, body)
3419 # we should see the regular page, but without the child table or
3421 body = " ".join(body.strip().split())
3422 self.failUnlessIn('href="?t=info">More info on this directory',
3424 exp = ("UnrecoverableFileError: the directory (or mutable file) "
3425 "could not be retrieved, because there were insufficient "
3426 "good shares. This might indicate that no servers were "
3427 "connected, insufficient servers were connected, the URI "
3428 "was corrupt, or that shares have been lost due to server "
3429 "departure, hard drive failure, or disk corruption. You "
3430 "should perform a filecheck on this object to learn more.")
3431 self.failUnlessIn(exp, body)
3432 self.failUnlessIn("No upload forms: directory is unreadable", body)
3433 d.addCallback(_check_0shares_dir_html)
3435 d.addCallback(lambda ignored: self.GET(self.fileurls["dir-1share"]))
3436 def _check_1shares_dir_html(body):
3437 # at some point, we'll split UnrecoverableFileError into 0-shares
3438 # and some-shares like we did for immutable files (since there
3439 # are different sorts of advice to offer in each case). For now,
3440 # they present the same way.
3441 self.failUnless("<html>" in body, body)
3442 body = " ".join(body.strip().split())
3443 self.failUnlessIn('href="?t=info">More info on this directory',
3445 exp = ("UnrecoverableFileError: the directory (or mutable file) "
3446 "could not be retrieved, because there were insufficient "
3447 "good shares. This might indicate that no servers were "
3448 "connected, insufficient servers were connected, the URI "
3449 "was corrupt, or that shares have been lost due to server "
3450 "departure, hard drive failure, or disk corruption. You "
3451 "should perform a filecheck on this object to learn more.")
3452 self.failUnlessIn(exp, body)
3453 self.failUnlessIn("No upload forms: directory is unreadable", body)
3454 d.addCallback(_check_1shares_dir_html)
3456 d.addCallback(lambda ignored:
3457 self.shouldHTTPError("GET dir-0share-json",
3458 410, "Gone", "UnrecoverableFileError",
3460 self.fileurls["dir-0share-json"]))
3461 def _check_unrecoverable_file(body):
3462 self.failIf("<html>" in body, body)
3463 body = " ".join(body.strip().split())
3464 exp = ("UnrecoverableFileError: the directory (or mutable file) "
3465 "could not be retrieved, because there were insufficient "
3466 "good shares. This might indicate that no servers were "
3467 "connected, insufficient servers were connected, the URI "
3468 "was corrupt, or that shares have been lost due to server "
3469 "departure, hard drive failure, or disk corruption. You "
3470 "should perform a filecheck on this object to learn more.")
3471 self.failUnlessEqual(exp, body)
3472 d.addCallback(_check_unrecoverable_file)
3474 d.addCallback(lambda ignored:
3475 self.shouldHTTPError("GET dir-1share-json",
3476 410, "Gone", "UnrecoverableFileError",
3478 self.fileurls["dir-1share-json"]))
3479 d.addCallback(_check_unrecoverable_file)
3481 d.addCallback(lambda ignored:
3482 self.shouldHTTPError("GET imaginary",
3483 404, "Not Found", None,
3484 self.GET, self.fileurls["imaginary"]))
3486 # attach a webapi child that throws a random error, to test how it
3488 w = c0.getServiceNamed("webish")
3489 w.root.putChild("ERRORBOOM", ErrorBoom())
3491 d.addCallback(lambda ignored:
3492 self.shouldHTTPError("GET errorboom_html",
3493 500, "Internal Server Error", None,
3494 self.GET, "ERRORBOOM"))
3495 def _internal_error_html(body):
3496 # test that a weird exception during a webapi operation with
3497 # Accept:*/* results in a text/html stack trace, while one
3498 # without that Accept: line gets us a text/plain stack trace
3499 self.failUnless("<html>" in body, "expected HTML, not '%s'" % body)
3500 d.addCallback(_internal_error_html)
3502 d.addCallback(lambda ignored:
3503 self.shouldHTTPError("GET errorboom_text",
3504 500, "Internal Server Error", None,
3505 self.GET, "ERRORBOOM",
3506 headers={"accept": ["text/plain"]}))
3507 def _internal_error_text(body):
3508 # test that a weird exception during a webapi operation with
3509 # Accept:*/* results in a text/html stack trace, while one
3510 # without that Accept: line gets us a text/plain stack trace
3511 self.failIf("<html>" in body, body)
3512 self.failUnless(body.startswith("Traceback "), body)
3513 d.addCallback(_internal_error_text)
3515 def _flush_errors(res):
3516 # Trial: please ignore the CompletelyUnhandledError in the logs
3517 self.flushLoggedErrors(CompletelyUnhandledError)
3519 d.addBoth(_flush_errors)
3523 class CompletelyUnhandledError(Exception):
3525 class ErrorBoom(rend.Page):
3526 def beforeRender(self, ctx):
3527 raise CompletelyUnhandledError("whoops")