1 import os.path, re, urllib
3 from StringIO import StringIO
4 from twisted.application import service
5 from twisted.trial import unittest
6 from twisted.internet import defer, reactor
7 from twisted.web import client, error, http
8 from twisted.python import failure, log
10 from allmydata import interfaces, uri, webish
11 from allmydata.storage.shares import get_share_file
12 from allmydata.storage_client import StorageFarmBroker
13 from allmydata.immutable import upload, download
14 from allmydata.unknown import UnknownNode
15 from allmydata.web import status, common
16 from allmydata.scripts.debug import CorruptShareOptions, corrupt_share
17 from allmydata.util import fileutil, base32
18 from allmydata.util.assertutil import precondition
19 from allmydata.test.common import FakeDirectoryNode, FakeCHKFileNode, \
20 FakeMutableFileNode, create_chk_filenode, WebErrorMixin, ShouldFailMixin
21 from allmydata.interfaces import IURI, INewDirectoryURI, \
22 IReadonlyNewDirectoryURI, IFileURI, IMutableFileURI, IMutableFileNode
23 from allmydata.mutable import servermap, publish, retrieve
24 import common_util as testutil
25 from allmydata.test.no_network import GridTestMixin
27 from allmydata.test.common_web import HTTPClientGETFactory, \
30 # create a fake uploader/downloader, and a couple of fake dirnodes, then
31 # create a webserver that works against them
33 timeout = 480 # Most of these take longer than 240 seconds on Francois's arm box.
35 class FakeStatsProvider:
37 stats = {'stats': {}, 'counters': {}}
40 class FakeClient(service.MultiService):
41 nodeid = "fake_nodeid"
42 nickname = "fake_nickname"
43 basedir = "fake_basedir"
44 def get_versions(self):
45 return {'allmydata': "fake",
50 introducer_furl = "None"
52 _all_upload_status = [upload.UploadStatus()]
53 _all_download_status = [download.DownloadStatus()]
54 _all_mapupdate_statuses = [servermap.UpdateStatus()]
55 _all_publish_statuses = [publish.PublishStatus()]
56 _all_retrieve_statuses = [retrieve.RetrieveStatus()]
57 convergence = "some random string"
58 stats_provider = FakeStatsProvider()
60 def connected_to_introducer(self):
63 storage_broker = StorageFarmBroker(None, permute_peers=True)
64 def get_storage_broker(self):
65 return self.storage_broker
67 def create_node_from_uri(self, auri, readcap=None):
70 precondition(isinstance(auri, str), auri)
71 u = uri.from_string(auri)
72 if (INewDirectoryURI.providedBy(u)
73 or IReadonlyNewDirectoryURI.providedBy(u)):
74 return FakeDirectoryNode(self).init_from_uri(u)
75 if IFileURI.providedBy(u):
76 return FakeCHKFileNode(u, self)
77 assert IMutableFileURI.providedBy(u), u
78 return FakeMutableFileNode(self).init_from_uri(u)
80 def create_empty_dirnode(self):
81 n = FakeDirectoryNode(self)
83 d.addCallback(lambda res: n)
86 MUTABLE_SIZELIMIT = FakeMutableFileNode.MUTABLE_SIZELIMIT
87 def create_mutable_file(self, contents=""):
88 n = FakeMutableFileNode(self)
89 return n.create(contents)
91 def upload(self, uploadable):
92 d = uploadable.get_size()
93 d.addCallback(lambda size: uploadable.read(size))
96 n = create_chk_filenode(self, data)
97 results = upload.UploadResults()
98 results.uri = n.get_uri()
100 d.addCallback(_got_data)
103 def list_all_upload_statuses(self):
104 return self._all_upload_status
105 def list_all_download_statuses(self):
106 return self._all_download_status
107 def list_all_mapupdate_statuses(self):
108 return self._all_mapupdate_statuses
109 def list_all_publish_statuses(self):
110 return self._all_publish_statuses
111 def list_all_retrieve_statuses(self):
112 return self._all_retrieve_statuses
113 def list_all_helper_statuses(self):
116 class WebMixin(object):
118 self.s = FakeClient()
119 self.s.startService()
120 self.staticdir = self.mktemp()
121 self.ws = s = webish.WebishServer(self.s, "0", staticdir=self.staticdir)
122 s.setServiceParent(self.s)
123 self.webish_port = port = s.listener._port.getHost().port
124 self.webish_url = "http://localhost:%d" % port
126 l = [ self.s.create_empty_dirnode() for x in range(6) ]
127 d = defer.DeferredList(l)
129 self.public_root = res[0][1]
130 assert interfaces.IDirectoryNode.providedBy(self.public_root), res
131 self.public_url = "/uri/" + self.public_root.get_uri()
132 self.private_root = res[1][1]
136 self._foo_uri = foo.get_uri()
137 self._foo_readonly_uri = foo.get_readonly_uri()
138 self._foo_verifycap = foo.get_verify_cap().to_string()
139 # NOTE: we ignore the deferred on all set_uri() calls, because we
140 # know the fake nodes do these synchronously
141 self.public_root.set_uri(u"foo", foo.get_uri())
143 self.BAR_CONTENTS, n, self._bar_txt_uri = self.makefile(0)
144 foo.set_uri(u"bar.txt", self._bar_txt_uri)
145 self._bar_txt_verifycap = n.get_verify_cap().to_string()
147 foo.set_uri(u"empty", res[3][1].get_uri())
148 sub_uri = res[4][1].get_uri()
149 self._sub_uri = sub_uri
150 foo.set_uri(u"sub", sub_uri)
151 sub = self.s.create_node_from_uri(sub_uri)
153 _ign, n, blocking_uri = self.makefile(1)
154 foo.set_uri(u"blockingfile", blocking_uri)
156 unicode_filename = u"n\u00fc.txt" # n u-umlaut . t x t
157 # ok, unicode calls it LATIN SMALL LETTER U WITH DIAERESIS but I
158 # still think of it as an umlaut
159 foo.set_uri(unicode_filename, self._bar_txt_uri)
161 _ign, n, baz_file = self.makefile(2)
162 self._baz_file_uri = baz_file
163 sub.set_uri(u"baz.txt", baz_file)
165 _ign, n, self._bad_file_uri = self.makefile(3)
166 # this uri should not be downloadable
167 del FakeCHKFileNode.all_contents[self._bad_file_uri]
170 self.public_root.set_uri(u"reedownlee", rodir.get_readonly_uri())
171 rodir.set_uri(u"nor", baz_file)
176 # public/foo/blockingfile
179 # public/foo/sub/baz.txt
181 # public/reedownlee/nor
182 self.NEWFILE_CONTENTS = "newfile contents\n"
184 return foo.get_metadata_for(u"bar.txt")
186 def _got_metadata(metadata):
187 self._bar_txt_metadata = metadata
188 d.addCallback(_got_metadata)
191 def makefile(self, number):
192 contents = "contents of file %s\n" % number
193 n = create_chk_filenode(self.s, contents)
194 return contents, n, n.get_uri()
197 return self.s.stopService()
199 def failUnlessIsBarDotTxt(self, res):
200 self.failUnlessEqual(res, self.BAR_CONTENTS, res)
202 def failUnlessIsBarJSON(self, res):
203 data = simplejson.loads(res)
204 self.failUnless(isinstance(data, list))
205 self.failUnlessEqual(data[0], u"filenode")
206 self.failUnless(isinstance(data[1], dict))
207 self.failIf(data[1]["mutable"])
208 self.failIf("rw_uri" in data[1]) # immutable
209 self.failUnlessEqual(data[1]["ro_uri"], self._bar_txt_uri)
210 self.failUnlessEqual(data[1]["verify_uri"], self._bar_txt_verifycap)
211 self.failUnlessEqual(data[1]["size"], len(self.BAR_CONTENTS))
213 def failUnlessIsFooJSON(self, res):
214 data = simplejson.loads(res)
215 self.failUnless(isinstance(data, list))
216 self.failUnlessEqual(data[0], "dirnode", res)
217 self.failUnless(isinstance(data[1], dict))
218 self.failUnless(data[1]["mutable"])
219 self.failUnless("rw_uri" in data[1]) # mutable
220 self.failUnlessEqual(data[1]["rw_uri"], self._foo_uri)
221 self.failUnlessEqual(data[1]["ro_uri"], self._foo_readonly_uri)
222 self.failUnlessEqual(data[1]["verify_uri"], self._foo_verifycap)
224 kidnames = sorted([unicode(n) for n in data[1]["children"]])
225 self.failUnlessEqual(kidnames,
226 [u"bar.txt", u"blockingfile", u"empty",
227 u"n\u00fc.txt", u"sub"])
228 kids = dict( [(unicode(name),value)
230 in data[1]["children"].iteritems()] )
231 self.failUnlessEqual(kids[u"sub"][0], "dirnode")
232 self.failUnless("metadata" in kids[u"sub"][1])
233 self.failUnless("ctime" in kids[u"sub"][1]["metadata"])
234 self.failUnless("mtime" in kids[u"sub"][1]["metadata"])
235 self.failUnlessEqual(kids[u"bar.txt"][0], "filenode")
236 self.failUnlessEqual(kids[u"bar.txt"][1]["size"], len(self.BAR_CONTENTS))
237 self.failUnlessEqual(kids[u"bar.txt"][1]["ro_uri"], self._bar_txt_uri)
238 self.failUnlessEqual(kids[u"bar.txt"][1]["verify_uri"],
239 self._bar_txt_verifycap)
240 self.failUnlessEqual(kids[u"bar.txt"][1]["metadata"]["ctime"],
241 self._bar_txt_metadata["ctime"])
242 self.failUnlessEqual(kids[u"n\u00fc.txt"][1]["ro_uri"],
245 def GET(self, urlpath, followRedirect=False, return_response=False,
247 # if return_response=True, this fires with (data, statuscode,
248 # respheaders) instead of just data.
249 assert not isinstance(urlpath, unicode)
250 url = self.webish_url + urlpath
251 factory = HTTPClientGETFactory(url, method="GET",
252 followRedirect=followRedirect, **kwargs)
253 reactor.connectTCP("localhost", self.webish_port, factory)
256 return (data, factory.status, factory.response_headers)
258 d.addCallback(_got_data)
259 return factory.deferred
261 def HEAD(self, urlpath, return_response=False, **kwargs):
262 # this requires some surgery, because twisted.web.client doesn't want
263 # to give us back the response headers.
264 factory = HTTPClientHEADFactory(urlpath, method="HEAD", **kwargs)
265 reactor.connectTCP("localhost", self.webish_port, factory)
268 return (data, factory.status, factory.response_headers)
270 d.addCallback(_got_data)
271 return factory.deferred
273 def PUT(self, urlpath, data, **kwargs):
274 url = self.webish_url + urlpath
275 return client.getPage(url, method="PUT", postdata=data, **kwargs)
277 def DELETE(self, urlpath):
278 url = self.webish_url + urlpath
279 return client.getPage(url, method="DELETE")
281 def POST(self, urlpath, followRedirect=False, **fields):
282 url = self.webish_url + urlpath
283 sepbase = "boogabooga"
287 form.append('Content-Disposition: form-data; name="_charset"')
291 for name, value in fields.iteritems():
292 if isinstance(value, tuple):
293 filename, value = value
294 form.append('Content-Disposition: form-data; name="%s"; '
295 'filename="%s"' % (name, filename.encode("utf-8")))
297 form.append('Content-Disposition: form-data; name="%s"' % name)
299 if isinstance(value, unicode):
300 value = value.encode("utf-8")
303 assert isinstance(value, str)
307 body = "\r\n".join(form) + "\r\n"
308 headers = {"content-type": "multipart/form-data; boundary=%s" % sepbase,
310 return client.getPage(url, method="POST", postdata=body,
311 headers=headers, followRedirect=followRedirect)
313 def shouldFail(self, res, expected_failure, which,
314 substring=None, response_substring=None):
315 if isinstance(res, failure.Failure):
316 res.trap(expected_failure)
318 self.failUnless(substring in str(res),
319 "substring '%s' not in '%s'"
320 % (substring, str(res)))
321 if response_substring:
322 self.failUnless(response_substring in res.value.response,
323 "response substring '%s' not in '%s'"
324 % (response_substring, res.value.response))
326 self.fail("%s was supposed to raise %s, not get '%s'" %
327 (which, expected_failure, res))
329 def shouldFail2(self, expected_failure, which, substring,
331 callable, *args, **kwargs):
332 assert substring is None or isinstance(substring, str)
333 assert response_substring is None or isinstance(response_substring, str)
334 d = defer.maybeDeferred(callable, *args, **kwargs)
336 if isinstance(res, failure.Failure):
337 res.trap(expected_failure)
339 self.failUnless(substring in str(res),
340 "%s: substring '%s' not in '%s'"
341 % (which, substring, str(res)))
342 if response_substring:
343 self.failUnless(response_substring in res.value.response,
344 "%s: response substring '%s' not in '%s'"
346 response_substring, res.value.response))
348 self.fail("%s was supposed to raise %s, not get '%s'" %
349 (which, expected_failure, res))
353 def should404(self, res, which):
354 if isinstance(res, failure.Failure):
355 res.trap(error.Error)
356 self.failUnlessEqual(res.value.status, "404")
358 self.fail("%s was supposed to Error(404), not get '%s'" %
362 class Web(WebMixin, WebErrorMixin, testutil.StallMixin, unittest.TestCase):
363 def test_create(self):
366 def test_welcome(self):
369 self.failUnless('Welcome To TahoeLAFS' in res, res)
371 self.s.basedir = 'web/test_welcome'
372 fileutil.make_dirs("web/test_welcome")
373 fileutil.make_dirs("web/test_welcome/private")
375 d.addCallback(_check)
378 def test_provisioning(self):
379 d = self.GET("/provisioning/")
381 self.failUnless('Tahoe Provisioning Tool' in res)
382 fields = {'filled': True,
383 "num_users": int(50e3),
384 "files_per_user": 1000,
385 "space_per_user": int(1e9),
386 "sharing_ratio": 1.0,
387 "encoding_parameters": "3-of-10-5",
389 "ownership_mode": "A",
390 "download_rate": 100,
395 return self.POST("/provisioning/", **fields)
397 d.addCallback(_check)
399 self.failUnless('Tahoe Provisioning Tool' in res)
400 self.failUnless("Share space consumed: 167.01TB" in res)
402 fields = {'filled': True,
403 "num_users": int(50e6),
404 "files_per_user": 1000,
405 "space_per_user": int(5e9),
406 "sharing_ratio": 1.0,
407 "encoding_parameters": "25-of-100-50",
408 "num_servers": 30000,
409 "ownership_mode": "E",
410 "drive_failure_model": "U",
412 "download_rate": 1000,
417 return self.POST("/provisioning/", **fields)
418 d.addCallback(_check2)
420 self.failUnless("Share space consumed: huge!" in res)
421 fields = {'filled': True}
422 return self.POST("/provisioning/", **fields)
423 d.addCallback(_check3)
425 self.failUnless("Share space consumed:" in res)
426 d.addCallback(_check4)
429 def test_reliability_tool(self):
431 from allmydata import reliability
432 _hush_pyflakes = reliability
434 raise unittest.SkipTest("reliability tool requires NumPy")
436 d = self.GET("/reliability/")
438 self.failUnless('Tahoe Reliability Tool' in res)
439 fields = {'drive_lifetime': "8Y",
444 "check_period": "1M",
445 "report_period": "3M",
448 return self.POST("/reliability/", **fields)
450 d.addCallback(_check)
452 self.failUnless('Tahoe Reliability Tool' in res)
453 r = r'Probability of loss \(no maintenance\):\s+<span>0.033591'
454 self.failUnless(re.search(r, res), res)
455 d.addCallback(_check2)
458 def test_status(self):
459 dl_num = self.s.list_all_download_statuses()[0].get_counter()
460 ul_num = self.s.list_all_upload_statuses()[0].get_counter()
461 mu_num = self.s.list_all_mapupdate_statuses()[0].get_counter()
462 pub_num = self.s.list_all_publish_statuses()[0].get_counter()
463 ret_num = self.s.list_all_retrieve_statuses()[0].get_counter()
464 d = self.GET("/status", followRedirect=True)
466 self.failUnless('Upload and Download Status' in res, res)
467 self.failUnless('"down-%d"' % dl_num in res, res)
468 self.failUnless('"up-%d"' % ul_num in res, res)
469 self.failUnless('"mapupdate-%d"' % mu_num in res, res)
470 self.failUnless('"publish-%d"' % pub_num in res, res)
471 self.failUnless('"retrieve-%d"' % ret_num in res, res)
472 d.addCallback(_check)
473 d.addCallback(lambda res: self.GET("/status/?t=json"))
474 def _check_json(res):
475 data = simplejson.loads(res)
476 self.failUnless(isinstance(data, dict))
477 active = data["active"]
478 # TODO: test more. We need a way to fake an active operation
480 d.addCallback(_check_json)
482 d.addCallback(lambda res: self.GET("/status/down-%d" % dl_num))
484 self.failUnless("File Download Status" in res, res)
485 d.addCallback(_check_dl)
486 d.addCallback(lambda res: self.GET("/status/up-%d" % ul_num))
488 self.failUnless("File Upload Status" in res, res)
489 d.addCallback(_check_ul)
490 d.addCallback(lambda res: self.GET("/status/mapupdate-%d" % mu_num))
491 def _check_mapupdate(res):
492 self.failUnless("Mutable File Servermap Update Status" in res, res)
493 d.addCallback(_check_mapupdate)
494 d.addCallback(lambda res: self.GET("/status/publish-%d" % pub_num))
495 def _check_publish(res):
496 self.failUnless("Mutable File Publish Status" in res, res)
497 d.addCallback(_check_publish)
498 d.addCallback(lambda res: self.GET("/status/retrieve-%d" % ret_num))
499 def _check_retrieve(res):
500 self.failUnless("Mutable File Retrieve Status" in res, res)
501 d.addCallback(_check_retrieve)
505 def test_status_numbers(self):
506 drrm = status.DownloadResultsRendererMixin()
507 self.failUnlessEqual(drrm.render_time(None, None), "")
508 self.failUnlessEqual(drrm.render_time(None, 2.5), "2.50s")
509 self.failUnlessEqual(drrm.render_time(None, 0.25), "250ms")
510 self.failUnlessEqual(drrm.render_time(None, 0.0021), "2.1ms")
511 self.failUnlessEqual(drrm.render_time(None, 0.000123), "123us")
512 self.failUnlessEqual(drrm.render_rate(None, None), "")
513 self.failUnlessEqual(drrm.render_rate(None, 2500000), "2.50MBps")
514 self.failUnlessEqual(drrm.render_rate(None, 30100), "30.1kBps")
515 self.failUnlessEqual(drrm.render_rate(None, 123), "123Bps")
517 urrm = status.UploadResultsRendererMixin()
518 self.failUnlessEqual(urrm.render_time(None, None), "")
519 self.failUnlessEqual(urrm.render_time(None, 2.5), "2.50s")
520 self.failUnlessEqual(urrm.render_time(None, 0.25), "250ms")
521 self.failUnlessEqual(urrm.render_time(None, 0.0021), "2.1ms")
522 self.failUnlessEqual(urrm.render_time(None, 0.000123), "123us")
523 self.failUnlessEqual(urrm.render_rate(None, None), "")
524 self.failUnlessEqual(urrm.render_rate(None, 2500000), "2.50MBps")
525 self.failUnlessEqual(urrm.render_rate(None, 30100), "30.1kBps")
526 self.failUnlessEqual(urrm.render_rate(None, 123), "123Bps")
528 def test_GET_FILEURL(self):
529 d = self.GET(self.public_url + "/foo/bar.txt")
530 d.addCallback(self.failUnlessIsBarDotTxt)
533 def test_GET_FILEURL_range(self):
534 headers = {"range": "bytes=1-10"}
535 d = self.GET(self.public_url + "/foo/bar.txt", headers=headers,
536 return_response=True)
537 def _got((res, status, headers)):
538 self.failUnlessEqual(int(status), 206)
539 self.failUnless(headers.has_key("content-range"))
540 self.failUnlessEqual(headers["content-range"][0],
541 "bytes 1-10/%d" % len(self.BAR_CONTENTS))
542 self.failUnlessEqual(res, self.BAR_CONTENTS[1:11])
546 def test_GET_FILEURL_partial_range(self):
547 headers = {"range": "bytes=5-"}
548 length = len(self.BAR_CONTENTS)
549 d = self.GET(self.public_url + "/foo/bar.txt", headers=headers,
550 return_response=True)
551 def _got((res, status, headers)):
552 self.failUnlessEqual(int(status), 206)
553 self.failUnless(headers.has_key("content-range"))
554 self.failUnlessEqual(headers["content-range"][0],
555 "bytes 5-%d/%d" % (length-1, length))
556 self.failUnlessEqual(res, self.BAR_CONTENTS[5:])
560 def test_HEAD_FILEURL_range(self):
561 headers = {"range": "bytes=1-10"}
562 d = self.HEAD(self.public_url + "/foo/bar.txt", headers=headers,
563 return_response=True)
564 def _got((res, status, headers)):
565 self.failUnlessEqual(res, "")
566 self.failUnlessEqual(int(status), 206)
567 self.failUnless(headers.has_key("content-range"))
568 self.failUnlessEqual(headers["content-range"][0],
569 "bytes 1-10/%d" % len(self.BAR_CONTENTS))
573 def test_HEAD_FILEURL_partial_range(self):
574 headers = {"range": "bytes=5-"}
575 length = len(self.BAR_CONTENTS)
576 d = self.HEAD(self.public_url + "/foo/bar.txt", headers=headers,
577 return_response=True)
578 def _got((res, status, headers)):
579 self.failUnlessEqual(int(status), 206)
580 self.failUnless(headers.has_key("content-range"))
581 self.failUnlessEqual(headers["content-range"][0],
582 "bytes 5-%d/%d" % (length-1, length))
586 def test_GET_FILEURL_range_bad(self):
587 headers = {"range": "BOGUS=fizbop-quarnak"}
588 d = self.shouldFail2(error.Error, "test_GET_FILEURL_range_bad",
590 "Syntactically invalid http range header",
591 self.GET, self.public_url + "/foo/bar.txt",
595 def test_HEAD_FILEURL(self):
596 d = self.HEAD(self.public_url + "/foo/bar.txt", return_response=True)
597 def _got((res, status, headers)):
598 self.failUnlessEqual(res, "")
599 self.failUnlessEqual(headers["content-length"][0],
600 str(len(self.BAR_CONTENTS)))
601 self.failUnlessEqual(headers["content-type"], ["text/plain"])
605 def test_GET_FILEURL_named(self):
606 base = "/file/%s" % urllib.quote(self._bar_txt_uri)
607 base2 = "/named/%s" % urllib.quote(self._bar_txt_uri)
608 d = self.GET(base + "/@@name=/blah.txt")
609 d.addCallback(self.failUnlessIsBarDotTxt)
610 d.addCallback(lambda res: self.GET(base + "/blah.txt"))
611 d.addCallback(self.failUnlessIsBarDotTxt)
612 d.addCallback(lambda res: self.GET(base + "/ignore/lots/blah.txt"))
613 d.addCallback(self.failUnlessIsBarDotTxt)
614 d.addCallback(lambda res: self.GET(base2 + "/@@name=/blah.txt"))
615 d.addCallback(self.failUnlessIsBarDotTxt)
616 save_url = base + "?save=true&filename=blah.txt"
617 d.addCallback(lambda res: self.GET(save_url))
618 d.addCallback(self.failUnlessIsBarDotTxt) # TODO: check headers
619 u_filename = u"n\u00e9wer.txt" # n e-acute w e r . t x t
620 u_fn_e = urllib.quote(u_filename.encode("utf-8"))
621 u_url = base + "?save=true&filename=" + u_fn_e
622 d.addCallback(lambda res: self.GET(u_url))
623 d.addCallback(self.failUnlessIsBarDotTxt) # TODO: check headers
626 def test_PUT_FILEURL_named_bad(self):
627 base = "/file/%s" % urllib.quote(self._bar_txt_uri)
628 d = self.shouldFail2(error.Error, "test_PUT_FILEURL_named_bad",
630 "/file can only be used with GET or HEAD",
631 self.PUT, base + "/@@name=/blah.txt", "")
634 def test_GET_DIRURL_named_bad(self):
635 base = "/file/%s" % urllib.quote(self._foo_uri)
636 d = self.shouldFail2(error.Error, "test_PUT_DIRURL_named_bad",
639 self.GET, base + "/@@name=/blah.txt")
642 def test_GET_slash_file_bad(self):
643 d = self.shouldFail2(error.Error, "test_GET_slash_file_bad",
645 "/file must be followed by a file-cap and a name",
649 def test_GET_unhandled_URI_named(self):
650 contents, n, newuri = self.makefile(12)
651 verifier_cap = n.get_verify_cap().to_string()
652 base = "/file/%s" % urllib.quote(verifier_cap)
653 # client.create_node_from_uri() can't handle verify-caps
654 d = self.shouldFail2(error.Error, "GET_unhandled_URI_named",
656 "is not a valid file- or directory- cap",
660 def test_GET_unhandled_URI(self):
661 contents, n, newuri = self.makefile(12)
662 verifier_cap = n.get_verify_cap().to_string()
663 base = "/uri/%s" % urllib.quote(verifier_cap)
664 # client.create_node_from_uri() can't handle verify-caps
665 d = self.shouldFail2(error.Error, "test_GET_unhandled_URI",
667 "is not a valid file- or directory- cap",
671 def test_GET_FILE_URI(self):
672 base = "/uri/%s" % urllib.quote(self._bar_txt_uri)
674 d.addCallback(self.failUnlessIsBarDotTxt)
677 def test_GET_FILE_URI_badchild(self):
678 base = "/uri/%s/boguschild" % urllib.quote(self._bar_txt_uri)
679 errmsg = "Files have no children, certainly not named 'boguschild'"
680 d = self.shouldFail2(error.Error, "test_GET_FILE_URI_badchild",
681 "400 Bad Request", errmsg,
685 def test_PUT_FILE_URI_badchild(self):
686 base = "/uri/%s/boguschild" % urllib.quote(self._bar_txt_uri)
687 errmsg = "Cannot create directory 'boguschild', because its parent is a file, not a directory"
688 d = self.shouldFail2(error.Error, "test_GET_FILE_URI_badchild",
689 "400 Bad Request", errmsg,
693 def test_GET_FILEURL_save(self):
694 d = self.GET(self.public_url + "/foo/bar.txt?filename=bar.txt&save=true")
695 # TODO: look at the headers, expect a Content-Disposition: attachment
697 d.addCallback(self.failUnlessIsBarDotTxt)
700 def test_GET_FILEURL_missing(self):
701 d = self.GET(self.public_url + "/foo/missing")
702 d.addBoth(self.should404, "test_GET_FILEURL_missing")
705 def test_PUT_NEWFILEURL(self):
706 d = self.PUT(self.public_url + "/foo/new.txt", self.NEWFILE_CONTENTS)
707 # TODO: we lose the response code, so we can't check this
708 #self.failUnlessEqual(responsecode, 201)
709 d.addCallback(self.failUnlessURIMatchesChild, self._foo_node, u"new.txt")
710 d.addCallback(lambda res:
711 self.failUnlessChildContentsAre(self._foo_node, u"new.txt",
712 self.NEWFILE_CONTENTS))
715 def test_PUT_NEWFILEURL_not_mutable(self):
716 d = self.PUT(self.public_url + "/foo/new.txt?mutable=false",
717 self.NEWFILE_CONTENTS)
718 # TODO: we lose the response code, so we can't check this
719 #self.failUnlessEqual(responsecode, 201)
720 d.addCallback(self.failUnlessURIMatchesChild, self._foo_node, u"new.txt")
721 d.addCallback(lambda res:
722 self.failUnlessChildContentsAre(self._foo_node, u"new.txt",
723 self.NEWFILE_CONTENTS))
726 def test_PUT_NEWFILEURL_range_bad(self):
727 headers = {"content-range": "bytes 1-10/%d" % len(self.NEWFILE_CONTENTS)}
728 target = self.public_url + "/foo/new.txt"
729 d = self.shouldFail2(error.Error, "test_PUT_NEWFILEURL_range_bad",
730 "501 Not Implemented",
731 "Content-Range in PUT not yet supported",
732 # (and certainly not for immutable files)
733 self.PUT, target, self.NEWFILE_CONTENTS[1:11],
735 d.addCallback(lambda res:
736 self.failIfNodeHasChild(self._foo_node, u"new.txt"))
739 def test_PUT_NEWFILEURL_mutable(self):
740 d = self.PUT(self.public_url + "/foo/new.txt?mutable=true",
741 self.NEWFILE_CONTENTS)
742 # TODO: we lose the response code, so we can't check this
743 #self.failUnlessEqual(responsecode, 201)
745 u = uri.from_string_mutable_filenode(res)
746 self.failUnless(u.is_mutable())
747 self.failIf(u.is_readonly())
749 d.addCallback(_check_uri)
750 d.addCallback(self.failUnlessURIMatchesChild, self._foo_node, u"new.txt")
751 d.addCallback(lambda res:
752 self.failUnlessMutableChildContentsAre(self._foo_node,
754 self.NEWFILE_CONTENTS))
757 def test_PUT_NEWFILEURL_mutable_toobig(self):
758 d = self.shouldFail2(error.Error, "test_PUT_NEWFILEURL_mutable_toobig",
759 "413 Request Entity Too Large",
760 "SDMF is limited to one segment, and 10001 > 10000",
762 self.public_url + "/foo/new.txt?mutable=true",
763 "b" * (self.s.MUTABLE_SIZELIMIT+1))
766 def test_PUT_NEWFILEURL_replace(self):
767 d = self.PUT(self.public_url + "/foo/bar.txt", self.NEWFILE_CONTENTS)
768 # TODO: we lose the response code, so we can't check this
769 #self.failUnlessEqual(responsecode, 200)
770 d.addCallback(self.failUnlessURIMatchesChild, self._foo_node, u"bar.txt")
771 d.addCallback(lambda res:
772 self.failUnlessChildContentsAre(self._foo_node, u"bar.txt",
773 self.NEWFILE_CONTENTS))
776 def test_PUT_NEWFILEURL_bad_t(self):
777 d = self.shouldFail2(error.Error, "PUT_bad_t", "400 Bad Request",
778 "PUT to a file: bad t=bogus",
779 self.PUT, self.public_url + "/foo/bar.txt?t=bogus",
783 def test_PUT_NEWFILEURL_no_replace(self):
784 d = self.PUT(self.public_url + "/foo/bar.txt?replace=false",
785 self.NEWFILE_CONTENTS)
786 d.addBoth(self.shouldFail, error.Error, "PUT_NEWFILEURL_no_replace",
788 "There was already a child by that name, and you asked me "
792 def test_PUT_NEWFILEURL_mkdirs(self):
793 d = self.PUT(self.public_url + "/foo/newdir/new.txt", self.NEWFILE_CONTENTS)
795 d.addCallback(self.failUnlessURIMatchesChild, fn, u"newdir/new.txt")
796 d.addCallback(lambda res: self.failIfNodeHasChild(fn, u"new.txt"))
797 d.addCallback(lambda res: self.failUnlessNodeHasChild(fn, u"newdir"))
798 d.addCallback(lambda res:
799 self.failUnlessChildContentsAre(fn, u"newdir/new.txt",
800 self.NEWFILE_CONTENTS))
803 def test_PUT_NEWFILEURL_blocked(self):
804 d = self.PUT(self.public_url + "/foo/blockingfile/new.txt",
805 self.NEWFILE_CONTENTS)
806 d.addBoth(self.shouldFail, error.Error, "PUT_NEWFILEURL_blocked",
808 "Unable to create directory 'blockingfile': a file was in the way")
811 def test_DELETE_FILEURL(self):
812 d = self.DELETE(self.public_url + "/foo/bar.txt")
813 d.addCallback(lambda res:
814 self.failIfNodeHasChild(self._foo_node, u"bar.txt"))
817 def test_DELETE_FILEURL_missing(self):
818 d = self.DELETE(self.public_url + "/foo/missing")
819 d.addBoth(self.should404, "test_DELETE_FILEURL_missing")
822 def test_DELETE_FILEURL_missing2(self):
823 d = self.DELETE(self.public_url + "/missing/missing")
824 d.addBoth(self.should404, "test_DELETE_FILEURL_missing2")
827 def test_GET_FILEURL_json(self):
828 # twisted.web.http.parse_qs ignores any query args without an '=', so
829 # I can't do "GET /path?json", I have to do "GET /path/t=json"
830 # instead. This may make it tricky to emulate the S3 interface
832 d = self.GET(self.public_url + "/foo/bar.txt?t=json")
833 d.addCallback(self.failUnlessIsBarJSON)
836 def test_GET_FILEURL_json_missing(self):
837 d = self.GET(self.public_url + "/foo/missing?json")
838 d.addBoth(self.should404, "test_GET_FILEURL_json_missing")
841 def test_GET_FILEURL_uri(self):
842 d = self.GET(self.public_url + "/foo/bar.txt?t=uri")
844 self.failUnlessEqual(res, self._bar_txt_uri)
845 d.addCallback(_check)
846 d.addCallback(lambda res:
847 self.GET(self.public_url + "/foo/bar.txt?t=readonly-uri"))
849 # for now, for files, uris and readonly-uris are the same
850 self.failUnlessEqual(res, self._bar_txt_uri)
851 d.addCallback(_check2)
854 def test_GET_FILEURL_badtype(self):
855 d = self.shouldHTTPError("GET t=bogus", 400, "Bad Request",
858 self.public_url + "/foo/bar.txt?t=bogus")
861 def test_GET_FILEURL_uri_missing(self):
862 d = self.GET(self.public_url + "/foo/missing?t=uri")
863 d.addBoth(self.should404, "test_GET_FILEURL_uri_missing")
866 def test_GET_DIRURL(self):
867 # the addSlash means we get a redirect here
868 # from /uri/$URI/foo/ , we need ../../../ to get back to the root
870 d = self.GET(self.public_url + "/foo", followRedirect=True)
872 self.failUnless(('<a href="%s">Return to Welcome page' % ROOT)
874 # the FILE reference points to a URI, but it should end in bar.txt
875 bar_url = ("%s/file/%s/@@named=/bar.txt" %
876 (ROOT, urllib.quote(self._bar_txt_uri)))
877 get_bar = "".join([r'<td>FILE</td>',
879 r'<a href="%s">bar.txt</a>' % bar_url,
881 r'\s+<td>%d</td>' % len(self.BAR_CONTENTS),
883 self.failUnless(re.search(get_bar, res), res)
884 for line in res.split("\n"):
885 # find the line that contains the delete button for bar.txt
886 if ("form action" in line and
887 'value="delete"' in line and
888 'value="bar.txt"' in line):
889 # the form target should use a relative URL
890 foo_url = urllib.quote("%s/uri/%s/" % (ROOT, self._foo_uri))
891 self.failUnless(('action="%s"' % foo_url) in line, line)
892 # and the when_done= should too
893 #done_url = urllib.quote(???)
894 #self.failUnless(('name="when_done" value="%s"' % done_url)
898 self.fail("unable to find delete-bar.txt line", res)
900 # the DIR reference just points to a URI
901 sub_url = ("%s/uri/%s/" % (ROOT, urllib.quote(self._sub_uri)))
902 get_sub = ((r'<td>DIR</td>')
903 +r'\s+<td><a href="%s">sub</a></td>' % sub_url)
904 self.failUnless(re.search(get_sub, res), res)
905 d.addCallback(_check)
907 # look at a directory which is readonly
908 d.addCallback(lambda res:
909 self.GET(self.public_url + "/reedownlee", followRedirect=True))
911 self.failUnless("(read-only)" in res, res)
912 self.failIf("Upload a file" in res, res)
913 d.addCallback(_check2)
915 # and at a directory that contains a readonly directory
916 d.addCallback(lambda res:
917 self.GET(self.public_url, followRedirect=True))
919 self.failUnless(re.search('<td>DIR-RO</td>'
920 r'\s+<td><a href="[\.\/]+/uri/URI%3ADIR2-RO%3A[^"]+">reedownlee</a></td>', res), res)
921 d.addCallback(_check3)
923 # and an empty directory
924 d.addCallback(lambda res: self.GET(self.public_url + "/foo/empty/"))
926 self.failUnless("directory is empty" in res, res)
927 MKDIR_BUTTON_RE=re.compile('<input type="hidden" name="t" value="mkdir" />.*<legend class="freeform-form-label">Create a new directory in this directory</legend>.*<input type="submit" value="Create" />', re.I)
928 self.failUnless(MKDIR_BUTTON_RE.search(res), res)
929 d.addCallback(_check4)
933 def test_GET_DIRURL_badtype(self):
934 d = self.shouldHTTPError("test_GET_DIRURL_badtype",
938 self.public_url + "/foo?t=bogus")
941 def test_GET_DIRURL_json(self):
942 d = self.GET(self.public_url + "/foo?t=json")
943 d.addCallback(self.failUnlessIsFooJSON)
947 def test_POST_DIRURL_manifest_no_ophandle(self):
948 d = self.shouldFail2(error.Error,
949 "test_POST_DIRURL_manifest_no_ophandle",
951 "slow operation requires ophandle=",
952 self.POST, self.public_url, t="start-manifest")
955 def test_POST_DIRURL_manifest(self):
956 d = defer.succeed(None)
957 def getman(ignored, output):
958 d = self.POST(self.public_url + "/foo/?t=start-manifest&ophandle=125",
960 d.addCallback(self.wait_for_operation, "125")
961 d.addCallback(self.get_operation_results, "125", output)
963 d.addCallback(getman, None)
964 def _got_html(manifest):
965 self.failUnless("Manifest of SI=" in manifest)
966 self.failUnless("<td>sub</td>" in manifest)
967 self.failUnless(self._sub_uri in manifest)
968 self.failUnless("<td>sub/baz.txt</td>" in manifest)
969 d.addCallback(_got_html)
971 # both t=status and unadorned GET should be identical
972 d.addCallback(lambda res: self.GET("/operations/125"))
973 d.addCallback(_got_html)
975 d.addCallback(getman, "html")
976 d.addCallback(_got_html)
977 d.addCallback(getman, "text")
978 def _got_text(manifest):
979 self.failUnless("\nsub " + self._sub_uri + "\n" in manifest)
980 self.failUnless("\nsub/baz.txt URI:CHK:" in manifest)
981 d.addCallback(_got_text)
982 d.addCallback(getman, "JSON")
984 data = res["manifest"]
986 for (path_list, cap) in data:
987 got[tuple(path_list)] = cap
988 self.failUnlessEqual(got[(u"sub",)], self._sub_uri)
989 self.failUnless((u"sub",u"baz.txt") in got)
990 self.failUnless("finished" in res)
991 self.failUnless("origin" in res)
992 self.failUnless("storage-index" in res)
993 self.failUnless("verifycaps" in res)
994 self.failUnless("stats" in res)
995 d.addCallback(_got_json)
998 def test_POST_DIRURL_deepsize_no_ophandle(self):
999 d = self.shouldFail2(error.Error,
1000 "test_POST_DIRURL_deepsize_no_ophandle",
1002 "slow operation requires ophandle=",
1003 self.POST, self.public_url, t="start-deep-size")
1006 def test_POST_DIRURL_deepsize(self):
1007 d = self.POST(self.public_url + "/foo/?t=start-deep-size&ophandle=126",
1008 followRedirect=True)
1009 d.addCallback(self.wait_for_operation, "126")
1010 d.addCallback(self.get_operation_results, "126", "json")
1011 def _got_json(data):
1012 self.failUnlessEqual(data["finished"], True)
1014 self.failUnless(size > 1000)
1015 d.addCallback(_got_json)
1016 d.addCallback(self.get_operation_results, "126", "text")
1018 mo = re.search(r'^size: (\d+)$', res, re.M)
1019 self.failUnless(mo, res)
1020 size = int(mo.group(1))
1021 # with directories, the size varies.
1022 self.failUnless(size > 1000)
1023 d.addCallback(_got_text)
1026 def test_POST_DIRURL_deepstats_no_ophandle(self):
1027 d = self.shouldFail2(error.Error,
1028 "test_POST_DIRURL_deepstats_no_ophandle",
1030 "slow operation requires ophandle=",
1031 self.POST, self.public_url, t="start-deep-stats")
1034 def test_POST_DIRURL_deepstats(self):
1035 d = self.POST(self.public_url + "/foo/?t=start-deep-stats&ophandle=127",
1036 followRedirect=True)
1037 d.addCallback(self.wait_for_operation, "127")
1038 d.addCallback(self.get_operation_results, "127", "json")
1039 def _got_json(stats):
1040 expected = {"count-immutable-files": 3,
1041 "count-mutable-files": 0,
1042 "count-literal-files": 0,
1044 "count-directories": 3,
1045 "size-immutable-files": 57,
1046 "size-literal-files": 0,
1047 #"size-directories": 1912, # varies
1048 #"largest-directory": 1590,
1049 "largest-directory-children": 5,
1050 "largest-immutable-file": 19,
1052 for k,v in expected.iteritems():
1053 self.failUnlessEqual(stats[k], v,
1054 "stats[%s] was %s, not %s" %
1056 self.failUnlessEqual(stats["size-files-histogram"],
1058 d.addCallback(_got_json)
1061 def test_POST_DIRURL_stream_manifest(self):
1062 d = self.POST(self.public_url + "/foo/?t=stream-manifest")
1064 self.failUnless(res.endswith("\n"))
1065 units = [simplejson.loads(t) for t in res[:-1].split("\n")]
1066 self.failUnlessEqual(len(units), 7)
1067 self.failUnlessEqual(units[-1]["type"], "stats")
1069 self.failUnlessEqual(first["path"], [])
1070 self.failUnlessEqual(first["cap"], self._foo_uri)
1071 self.failUnlessEqual(first["type"], "directory")
1072 baz = [u for u in units[:-1] if u["cap"] == self._baz_file_uri][0]
1073 self.failUnlessEqual(baz["path"], ["sub", "baz.txt"])
1074 self.failIfEqual(baz["storage-index"], None)
1075 self.failIfEqual(baz["verifycap"], None)
1076 self.failIfEqual(baz["repaircap"], None)
1078 d.addCallback(_check)
1081 def test_GET_DIRURL_uri(self):
1082 d = self.GET(self.public_url + "/foo?t=uri")
1084 self.failUnlessEqual(res, self._foo_uri)
1085 d.addCallback(_check)
1088 def test_GET_DIRURL_readonly_uri(self):
1089 d = self.GET(self.public_url + "/foo?t=readonly-uri")
1091 self.failUnlessEqual(res, self._foo_readonly_uri)
1092 d.addCallback(_check)
1095 def test_PUT_NEWDIRURL(self):
1096 d = self.PUT(self.public_url + "/foo/newdir?t=mkdir", "")
1097 d.addCallback(lambda res:
1098 self.failUnlessNodeHasChild(self._foo_node, u"newdir"))
1099 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
1100 d.addCallback(self.failUnlessNodeKeysAre, [])
1103 def test_PUT_NEWDIRURL_exists(self):
1104 d = self.PUT(self.public_url + "/foo/sub?t=mkdir", "")
1105 d.addCallback(lambda res:
1106 self.failUnlessNodeHasChild(self._foo_node, u"sub"))
1107 d.addCallback(lambda res: self._foo_node.get(u"sub"))
1108 d.addCallback(self.failUnlessNodeKeysAre, [u"baz.txt"])
1111 def test_PUT_NEWDIRURL_blocked(self):
1112 d = self.shouldFail2(error.Error, "PUT_NEWDIRURL_blocked",
1113 "409 Conflict", "Unable to create directory 'bar.txt': a file was in the way",
1115 self.public_url + "/foo/bar.txt/sub?t=mkdir", "")
1116 d.addCallback(lambda res:
1117 self.failUnlessNodeHasChild(self._foo_node, u"sub"))
1118 d.addCallback(lambda res: self._foo_node.get(u"sub"))
1119 d.addCallback(self.failUnlessNodeKeysAre, [u"baz.txt"])
1122 def test_PUT_NEWDIRURL_mkdir_p(self):
1123 d = defer.succeed(None)
1124 d.addCallback(lambda res: self.POST(self.public_url + "/foo", t='mkdir', name='mkp'))
1125 d.addCallback(lambda res: self.failUnlessNodeHasChild(self._foo_node, u"mkp"))
1126 d.addCallback(lambda res: self._foo_node.get(u"mkp"))
1127 def mkdir_p(mkpnode):
1128 url = '/uri/%s?t=mkdir-p&path=/sub1/sub2' % urllib.quote(mkpnode.get_uri())
1130 def made_subsub(ssuri):
1131 d = self._foo_node.get_child_at_path(u"mkp/sub1/sub2")
1132 d.addCallback(lambda ssnode: self.failUnlessEqual(ssnode.get_uri(), ssuri))
1134 d.addCallback(lambda uri2: self.failUnlessEqual(uri2, ssuri))
1136 d.addCallback(made_subsub)
1138 d.addCallback(mkdir_p)
1141 def test_PUT_NEWDIRURL_mkdirs(self):
1142 d = self.PUT(self.public_url + "/foo/subdir/newdir?t=mkdir", "")
1143 d.addCallback(lambda res:
1144 self.failIfNodeHasChild(self._foo_node, u"newdir"))
1145 d.addCallback(lambda res:
1146 self.failUnlessNodeHasChild(self._foo_node, u"subdir"))
1147 d.addCallback(lambda res:
1148 self._foo_node.get_child_at_path(u"subdir/newdir"))
1149 d.addCallback(self.failUnlessNodeKeysAre, [])
1152 def test_DELETE_DIRURL(self):
1153 d = self.DELETE(self.public_url + "/foo")
1154 d.addCallback(lambda res:
1155 self.failIfNodeHasChild(self.public_root, u"foo"))
1158 def test_DELETE_DIRURL_missing(self):
1159 d = self.DELETE(self.public_url + "/foo/missing")
1160 d.addBoth(self.should404, "test_DELETE_DIRURL_missing")
1161 d.addCallback(lambda res:
1162 self.failUnlessNodeHasChild(self.public_root, u"foo"))
1165 def test_DELETE_DIRURL_missing2(self):
1166 d = self.DELETE(self.public_url + "/missing")
1167 d.addBoth(self.should404, "test_DELETE_DIRURL_missing2")
1170 def dump_root(self):
1172 w = webish.DirnodeWalkerMixin()
1173 def visitor(childpath, childnode, metadata):
1175 d = w.walk(self.public_root, visitor)
1178 def failUnlessNodeKeysAre(self, node, expected_keys):
1179 for k in expected_keys:
1180 assert isinstance(k, unicode)
1182 def _check(children):
1183 self.failUnlessEqual(sorted(children.keys()), sorted(expected_keys))
1184 d.addCallback(_check)
1186 def failUnlessNodeHasChild(self, node, name):
1187 assert isinstance(name, unicode)
1189 def _check(children):
1190 self.failUnless(name in children)
1191 d.addCallback(_check)
1193 def failIfNodeHasChild(self, node, name):
1194 assert isinstance(name, unicode)
1196 def _check(children):
1197 self.failIf(name in children)
1198 d.addCallback(_check)
1201 def failUnlessChildContentsAre(self, node, name, expected_contents):
1202 assert isinstance(name, unicode)
1203 d = node.get_child_at_path(name)
1204 d.addCallback(lambda node: node.download_to_data())
1205 def _check(contents):
1206 self.failUnlessEqual(contents, expected_contents)
1207 d.addCallback(_check)
1210 def failUnlessMutableChildContentsAre(self, node, name, expected_contents):
1211 assert isinstance(name, unicode)
1212 d = node.get_child_at_path(name)
1213 d.addCallback(lambda node: node.download_best_version())
1214 def _check(contents):
1215 self.failUnlessEqual(contents, expected_contents)
1216 d.addCallback(_check)
1219 def failUnlessChildURIIs(self, node, name, expected_uri):
1220 assert isinstance(name, unicode)
1221 d = node.get_child_at_path(name)
1223 self.failUnlessEqual(child.get_uri(), expected_uri.strip())
1224 d.addCallback(_check)
1227 def failUnlessURIMatchesChild(self, got_uri, node, name):
1228 assert isinstance(name, unicode)
1229 d = node.get_child_at_path(name)
1231 self.failUnlessEqual(got_uri.strip(), child.get_uri())
1232 d.addCallback(_check)
1235 def failUnlessCHKURIHasContents(self, got_uri, contents):
1236 self.failUnless(FakeCHKFileNode.all_contents[got_uri] == contents)
1238 def test_POST_upload(self):
1239 d = self.POST(self.public_url + "/foo", t="upload",
1240 file=("new.txt", self.NEWFILE_CONTENTS))
1242 d.addCallback(self.failUnlessURIMatchesChild, fn, u"new.txt")
1243 d.addCallback(lambda res:
1244 self.failUnlessChildContentsAre(fn, u"new.txt",
1245 self.NEWFILE_CONTENTS))
1248 def test_POST_upload_unicode(self):
1249 filename = u"n\u00e9wer.txt" # n e-acute w e r . t x t
1250 d = self.POST(self.public_url + "/foo", t="upload",
1251 file=(filename, self.NEWFILE_CONTENTS))
1253 d.addCallback(self.failUnlessURIMatchesChild, fn, filename)
1254 d.addCallback(lambda res:
1255 self.failUnlessChildContentsAre(fn, filename,
1256 self.NEWFILE_CONTENTS))
1257 target_url = self.public_url + "/foo/" + filename.encode("utf-8")
1258 d.addCallback(lambda res: self.GET(target_url))
1259 d.addCallback(lambda contents: self.failUnlessEqual(contents,
1260 self.NEWFILE_CONTENTS,
1264 def test_POST_upload_unicode_named(self):
1265 filename = u"n\u00e9wer.txt" # n e-acute w e r . t x t
1266 d = self.POST(self.public_url + "/foo", t="upload",
1268 file=("overridden", self.NEWFILE_CONTENTS))
1270 d.addCallback(self.failUnlessURIMatchesChild, fn, filename)
1271 d.addCallback(lambda res:
1272 self.failUnlessChildContentsAre(fn, filename,
1273 self.NEWFILE_CONTENTS))
1274 target_url = self.public_url + "/foo/" + filename.encode("utf-8")
1275 d.addCallback(lambda res: self.GET(target_url))
1276 d.addCallback(lambda contents: self.failUnlessEqual(contents,
1277 self.NEWFILE_CONTENTS,
1281 def test_POST_upload_no_link(self):
1282 d = self.POST("/uri", t="upload",
1283 file=("new.txt", self.NEWFILE_CONTENTS))
1284 def _check_upload_results(page):
1285 # this should be a page which describes the results of the upload
1286 # that just finished.
1287 self.failUnless("Upload Results:" in page)
1288 self.failUnless("URI:" in page)
1289 uri_re = re.compile("URI: <tt><span>(.*)</span>")
1290 mo = uri_re.search(page)
1291 self.failUnless(mo, page)
1292 new_uri = mo.group(1)
1294 d.addCallback(_check_upload_results)
1295 d.addCallback(self.failUnlessCHKURIHasContents, self.NEWFILE_CONTENTS)
1298 def test_POST_upload_no_link_whendone(self):
1299 d = self.POST("/uri", t="upload", when_done="/",
1300 file=("new.txt", self.NEWFILE_CONTENTS))
1301 d.addBoth(self.shouldRedirect, "/")
1304 def shouldRedirect2(self, which, checker, callable, *args, **kwargs):
1305 d = defer.maybeDeferred(callable, *args, **kwargs)
1307 if isinstance(res, failure.Failure):
1308 res.trap(error.PageRedirect)
1309 statuscode = res.value.status
1310 target = res.value.location
1311 return checker(statuscode, target)
1312 self.fail("%s: callable was supposed to redirect, not return '%s'"
1317 def test_POST_upload_no_link_whendone_results(self):
1318 def check(statuscode, target):
1319 self.failUnlessEqual(statuscode, str(http.FOUND))
1320 self.failUnless(target.startswith(self.webish_url), target)
1321 return client.getPage(target, method="GET")
1322 d = self.shouldRedirect2("test_POST_upload_no_link_whendone_results",
1324 self.POST, "/uri", t="upload",
1325 when_done="/uri/%(uri)s",
1326 file=("new.txt", self.NEWFILE_CONTENTS))
1327 d.addCallback(lambda res:
1328 self.failUnlessEqual(res, self.NEWFILE_CONTENTS))
1331 def test_POST_upload_no_link_mutable(self):
1332 d = self.POST("/uri", t="upload", mutable="true",
1333 file=("new.txt", self.NEWFILE_CONTENTS))
1334 def _check(new_uri):
1335 new_uri = new_uri.strip()
1336 self.new_uri = new_uri
1338 self.failUnless(IMutableFileURI.providedBy(u))
1339 self.failUnless(u.storage_index in FakeMutableFileNode.all_contents)
1340 n = self.s.create_node_from_uri(new_uri)
1341 return n.download_best_version()
1342 d.addCallback(_check)
1344 self.failUnlessEqual(data, self.NEWFILE_CONTENTS)
1345 return self.GET("/uri/%s" % urllib.quote(self.new_uri))
1346 d.addCallback(_check2)
1348 self.failUnlessEqual(data, self.NEWFILE_CONTENTS)
1349 return self.GET("/file/%s" % urllib.quote(self.new_uri))
1350 d.addCallback(_check3)
1352 self.failUnlessEqual(data, self.NEWFILE_CONTENTS)
1353 d.addCallback(_check4)
1356 def test_POST_upload_no_link_mutable_toobig(self):
1357 d = self.shouldFail2(error.Error,
1358 "test_POST_upload_no_link_mutable_toobig",
1359 "413 Request Entity Too Large",
1360 "SDMF is limited to one segment, and 10001 > 10000",
1362 "/uri", t="upload", mutable="true",
1364 "b" * (self.s.MUTABLE_SIZELIMIT+1)) )
1367 def test_POST_upload_mutable(self):
1368 # this creates a mutable file
1369 d = self.POST(self.public_url + "/foo", t="upload", mutable="true",
1370 file=("new.txt", self.NEWFILE_CONTENTS))
1372 d.addCallback(self.failUnlessURIMatchesChild, fn, u"new.txt")
1373 d.addCallback(lambda res:
1374 self.failUnlessMutableChildContentsAre(fn, u"new.txt",
1375 self.NEWFILE_CONTENTS))
1376 d.addCallback(lambda res: self._foo_node.get(u"new.txt"))
1378 self.failUnless(IMutableFileNode.providedBy(newnode))
1379 self.failUnless(newnode.is_mutable())
1380 self.failIf(newnode.is_readonly())
1381 self._mutable_node = newnode
1382 self._mutable_uri = newnode.get_uri()
1385 # now upload it again and make sure that the URI doesn't change
1386 NEWER_CONTENTS = self.NEWFILE_CONTENTS + "newer\n"
1387 d.addCallback(lambda res:
1388 self.POST(self.public_url + "/foo", t="upload",
1390 file=("new.txt", NEWER_CONTENTS)))
1391 d.addCallback(self.failUnlessURIMatchesChild, fn, u"new.txt")
1392 d.addCallback(lambda res:
1393 self.failUnlessMutableChildContentsAre(fn, u"new.txt",
1395 d.addCallback(lambda res: self._foo_node.get(u"new.txt"))
1397 self.failUnless(IMutableFileNode.providedBy(newnode))
1398 self.failUnless(newnode.is_mutable())
1399 self.failIf(newnode.is_readonly())
1400 self.failUnlessEqual(self._mutable_uri, newnode.get_uri())
1401 d.addCallback(_got2)
1403 # upload a second time, using PUT instead of POST
1404 NEW2_CONTENTS = NEWER_CONTENTS + "overwrite with PUT\n"
1405 d.addCallback(lambda res:
1406 self.PUT(self.public_url + "/foo/new.txt", NEW2_CONTENTS))
1407 d.addCallback(self.failUnlessURIMatchesChild, fn, u"new.txt")
1408 d.addCallback(lambda res:
1409 self.failUnlessMutableChildContentsAre(fn, u"new.txt",
1412 # finally list the directory, since mutable files are displayed
1413 # slightly differently
1415 d.addCallback(lambda res:
1416 self.GET(self.public_url + "/foo/",
1417 followRedirect=True))
1418 def _check_page(res):
1419 # TODO: assert more about the contents
1420 self.failUnless("SSK" in res)
1422 d.addCallback(_check_page)
1424 d.addCallback(lambda res: self._foo_node.get(u"new.txt"))
1426 self.failUnless(IMutableFileNode.providedBy(newnode))
1427 self.failUnless(newnode.is_mutable())
1428 self.failIf(newnode.is_readonly())
1429 self.failUnlessEqual(self._mutable_uri, newnode.get_uri())
1430 d.addCallback(_got3)
1432 # look at the JSON form of the enclosing directory
1433 d.addCallback(lambda res:
1434 self.GET(self.public_url + "/foo/?t=json",
1435 followRedirect=True))
1436 def _check_page_json(res):
1437 parsed = simplejson.loads(res)
1438 self.failUnlessEqual(parsed[0], "dirnode")
1439 children = dict( [(unicode(name),value)
1441 in parsed[1]["children"].iteritems()] )
1442 self.failUnless("new.txt" in children)
1443 new_json = children["new.txt"]
1444 self.failUnlessEqual(new_json[0], "filenode")
1445 self.failUnless(new_json[1]["mutable"])
1446 self.failUnlessEqual(new_json[1]["rw_uri"], self._mutable_uri)
1447 ro_uri = unicode(self._mutable_node.get_readonly().to_string())
1448 self.failUnlessEqual(new_json[1]["ro_uri"], ro_uri)
1449 d.addCallback(_check_page_json)
1451 # and the JSON form of the file
1452 d.addCallback(lambda res:
1453 self.GET(self.public_url + "/foo/new.txt?t=json"))
1454 def _check_file_json(res):
1455 parsed = simplejson.loads(res)
1456 self.failUnlessEqual(parsed[0], "filenode")
1457 self.failUnless(parsed[1]["mutable"])
1458 self.failUnlessEqual(parsed[1]["rw_uri"], self._mutable_uri)
1459 ro_uri = unicode(self._mutable_node.get_readonly().to_string())
1460 self.failUnlessEqual(parsed[1]["ro_uri"], ro_uri)
1461 d.addCallback(_check_file_json)
1463 # and look at t=uri and t=readonly-uri
1464 d.addCallback(lambda res:
1465 self.GET(self.public_url + "/foo/new.txt?t=uri"))
1466 d.addCallback(lambda res: self.failUnlessEqual(res, self._mutable_uri))
1467 d.addCallback(lambda res:
1468 self.GET(self.public_url + "/foo/new.txt?t=readonly-uri"))
1469 def _check_ro_uri(res):
1470 ro_uri = unicode(self._mutable_node.get_readonly().to_string())
1471 self.failUnlessEqual(res, ro_uri)
1472 d.addCallback(_check_ro_uri)
1474 # make sure we can get to it from /uri/URI
1475 d.addCallback(lambda res:
1476 self.GET("/uri/%s" % urllib.quote(self._mutable_uri)))
1477 d.addCallback(lambda res:
1478 self.failUnlessEqual(res, NEW2_CONTENTS))
1480 # and that HEAD computes the size correctly
1481 d.addCallback(lambda res:
1482 self.HEAD(self.public_url + "/foo/new.txt",
1483 return_response=True))
1484 def _got_headers((res, status, headers)):
1485 self.failUnlessEqual(res, "")
1486 self.failUnlessEqual(headers["content-length"][0],
1487 str(len(NEW2_CONTENTS)))
1488 self.failUnlessEqual(headers["content-type"], ["text/plain"])
1489 d.addCallback(_got_headers)
1491 # make sure that size errors are displayed correctly for overwrite
1492 d.addCallback(lambda res:
1493 self.shouldFail2(error.Error,
1494 "test_POST_upload_mutable-toobig",
1495 "413 Request Entity Too Large",
1496 "SDMF is limited to one segment, and 10001 > 10000",
1498 self.public_url + "/foo", t="upload",
1501 "b" * (self.s.MUTABLE_SIZELIMIT+1)),
1504 d.addErrback(self.dump_error)
1507 def test_POST_upload_mutable_toobig(self):
1508 d = self.shouldFail2(error.Error,
1509 "test_POST_upload_no_link_mutable_toobig",
1510 "413 Request Entity Too Large",
1511 "SDMF is limited to one segment, and 10001 > 10000",
1513 self.public_url + "/foo",
1514 t="upload", mutable="true",
1516 "b" * (self.s.MUTABLE_SIZELIMIT+1)) )
1519 def dump_error(self, f):
1520 # if the web server returns an error code (like 400 Bad Request),
1521 # web.client.getPage puts the HTTP response body into the .response
1522 # attribute of the exception object that it gives back. It does not
1523 # appear in the Failure's repr(), so the ERROR that trial displays
1524 # will be rather terse and unhelpful. addErrback this method to the
1525 # end of your chain to get more information out of these errors.
1526 if f.check(error.Error):
1527 print "web.error.Error:"
1529 print f.value.response
1532 def test_POST_upload_replace(self):
1533 d = self.POST(self.public_url + "/foo", t="upload",
1534 file=("bar.txt", self.NEWFILE_CONTENTS))
1536 d.addCallback(self.failUnlessURIMatchesChild, fn, u"bar.txt")
1537 d.addCallback(lambda res:
1538 self.failUnlessChildContentsAre(fn, u"bar.txt",
1539 self.NEWFILE_CONTENTS))
1542 def test_POST_upload_no_replace_ok(self):
1543 d = self.POST(self.public_url + "/foo?replace=false", t="upload",
1544 file=("new.txt", self.NEWFILE_CONTENTS))
1545 d.addCallback(lambda res: self.GET(self.public_url + "/foo/new.txt"))
1546 d.addCallback(lambda res: self.failUnlessEqual(res,
1547 self.NEWFILE_CONTENTS))
1550 def test_POST_upload_no_replace_queryarg(self):
1551 d = self.POST(self.public_url + "/foo?replace=false", t="upload",
1552 file=("bar.txt", self.NEWFILE_CONTENTS))
1553 d.addBoth(self.shouldFail, error.Error,
1554 "POST_upload_no_replace_queryarg",
1556 "There was already a child by that name, and you asked me "
1557 "to not replace it")
1558 d.addCallback(lambda res: self.GET(self.public_url + "/foo/bar.txt"))
1559 d.addCallback(self.failUnlessIsBarDotTxt)
1562 def test_POST_upload_no_replace_field(self):
1563 d = self.POST(self.public_url + "/foo", t="upload", replace="false",
1564 file=("bar.txt", self.NEWFILE_CONTENTS))
1565 d.addBoth(self.shouldFail, error.Error, "POST_upload_no_replace_field",
1567 "There was already a child by that name, and you asked me "
1568 "to not replace it")
1569 d.addCallback(lambda res: self.GET(self.public_url + "/foo/bar.txt"))
1570 d.addCallback(self.failUnlessIsBarDotTxt)
1573 def test_POST_upload_whendone(self):
1574 d = self.POST(self.public_url + "/foo", t="upload", when_done="/THERE",
1575 file=("new.txt", self.NEWFILE_CONTENTS))
1576 d.addBoth(self.shouldRedirect, "/THERE")
1578 d.addCallback(lambda res:
1579 self.failUnlessChildContentsAre(fn, u"new.txt",
1580 self.NEWFILE_CONTENTS))
1583 def test_POST_upload_named(self):
1585 d = self.POST(self.public_url + "/foo", t="upload",
1586 name="new.txt", file=self.NEWFILE_CONTENTS)
1587 d.addCallback(self.failUnlessURIMatchesChild, fn, u"new.txt")
1588 d.addCallback(lambda res:
1589 self.failUnlessChildContentsAre(fn, u"new.txt",
1590 self.NEWFILE_CONTENTS))
1593 def test_POST_upload_named_badfilename(self):
1594 d = self.POST(self.public_url + "/foo", t="upload",
1595 name="slashes/are/bad.txt", file=self.NEWFILE_CONTENTS)
1596 d.addBoth(self.shouldFail, error.Error,
1597 "test_POST_upload_named_badfilename",
1599 "name= may not contain a slash",
1601 # make sure that nothing was added
1602 d.addCallback(lambda res:
1603 self.failUnlessNodeKeysAre(self._foo_node,
1604 [u"bar.txt", u"blockingfile",
1605 u"empty", u"n\u00fc.txt",
1609 def test_POST_FILEURL_check(self):
1610 bar_url = self.public_url + "/foo/bar.txt"
1611 d = self.POST(bar_url, t="check")
1613 self.failUnless("Healthy :" in res)
1614 d.addCallback(_check)
1615 redir_url = "http://allmydata.org/TARGET"
1616 def _check2(statuscode, target):
1617 self.failUnlessEqual(statuscode, str(http.FOUND))
1618 self.failUnlessEqual(target, redir_url)
1619 d.addCallback(lambda res:
1620 self.shouldRedirect2("test_POST_FILEURL_check",
1624 when_done=redir_url))
1625 d.addCallback(lambda res:
1626 self.POST(bar_url, t="check", return_to=redir_url))
1628 self.failUnless("Healthy :" in res)
1629 self.failUnless("Return to file" in res)
1630 self.failUnless(redir_url in res)
1631 d.addCallback(_check3)
1633 d.addCallback(lambda res:
1634 self.POST(bar_url, t="check", output="JSON"))
1635 def _check_json(res):
1636 data = simplejson.loads(res)
1637 self.failUnless("storage-index" in data)
1638 self.failUnless(data["results"]["healthy"])
1639 d.addCallback(_check_json)
1643 def test_POST_FILEURL_check_and_repair(self):
1644 bar_url = self.public_url + "/foo/bar.txt"
1645 d = self.POST(bar_url, t="check", repair="true")
1647 self.failUnless("Healthy :" in res)
1648 d.addCallback(_check)
1649 redir_url = "http://allmydata.org/TARGET"
1650 def _check2(statuscode, target):
1651 self.failUnlessEqual(statuscode, str(http.FOUND))
1652 self.failUnlessEqual(target, redir_url)
1653 d.addCallback(lambda res:
1654 self.shouldRedirect2("test_POST_FILEURL_check_and_repair",
1657 t="check", repair="true",
1658 when_done=redir_url))
1659 d.addCallback(lambda res:
1660 self.POST(bar_url, t="check", return_to=redir_url))
1662 self.failUnless("Healthy :" in res)
1663 self.failUnless("Return to file" in res)
1664 self.failUnless(redir_url in res)
1665 d.addCallback(_check3)
1668 def test_POST_DIRURL_check(self):
1669 foo_url = self.public_url + "/foo/"
1670 d = self.POST(foo_url, t="check")
1672 self.failUnless("Healthy :" in res, res)
1673 d.addCallback(_check)
1674 redir_url = "http://allmydata.org/TARGET"
1675 def _check2(statuscode, target):
1676 self.failUnlessEqual(statuscode, str(http.FOUND))
1677 self.failUnlessEqual(target, redir_url)
1678 d.addCallback(lambda res:
1679 self.shouldRedirect2("test_POST_DIRURL_check",
1683 when_done=redir_url))
1684 d.addCallback(lambda res:
1685 self.POST(foo_url, t="check", return_to=redir_url))
1687 self.failUnless("Healthy :" in res, res)
1688 self.failUnless("Return to file/directory" in res)
1689 self.failUnless(redir_url in res)
1690 d.addCallback(_check3)
1692 d.addCallback(lambda res:
1693 self.POST(foo_url, t="check", output="JSON"))
1694 def _check_json(res):
1695 data = simplejson.loads(res)
1696 self.failUnless("storage-index" in data)
1697 self.failUnless(data["results"]["healthy"])
1698 d.addCallback(_check_json)
1702 def test_POST_DIRURL_check_and_repair(self):
1703 foo_url = self.public_url + "/foo/"
1704 d = self.POST(foo_url, t="check", repair="true")
1706 self.failUnless("Healthy :" in res, res)
1707 d.addCallback(_check)
1708 redir_url = "http://allmydata.org/TARGET"
1709 def _check2(statuscode, target):
1710 self.failUnlessEqual(statuscode, str(http.FOUND))
1711 self.failUnlessEqual(target, redir_url)
1712 d.addCallback(lambda res:
1713 self.shouldRedirect2("test_POST_DIRURL_check_and_repair",
1716 t="check", repair="true",
1717 when_done=redir_url))
1718 d.addCallback(lambda res:
1719 self.POST(foo_url, t="check", return_to=redir_url))
1721 self.failUnless("Healthy :" in res)
1722 self.failUnless("Return to file/directory" in res)
1723 self.failUnless(redir_url in res)
1724 d.addCallback(_check3)
1727 def wait_for_operation(self, ignored, ophandle):
1728 url = "/operations/" + ophandle
1729 url += "?t=status&output=JSON"
1732 data = simplejson.loads(res)
1733 if not data["finished"]:
1734 d = self.stall(delay=1.0)
1735 d.addCallback(self.wait_for_operation, ophandle)
1741 def get_operation_results(self, ignored, ophandle, output=None):
1742 url = "/operations/" + ophandle
1745 url += "&output=" + output
1748 if output and output.lower() == "json":
1749 return simplejson.loads(res)
1754 def test_POST_DIRURL_deepcheck_no_ophandle(self):
1755 d = self.shouldFail2(error.Error,
1756 "test_POST_DIRURL_deepcheck_no_ophandle",
1758 "slow operation requires ophandle=",
1759 self.POST, self.public_url, t="start-deep-check")
1762 def test_POST_DIRURL_deepcheck(self):
1763 def _check_redirect(statuscode, target):
1764 self.failUnlessEqual(statuscode, str(http.FOUND))
1765 self.failUnless(target.endswith("/operations/123"))
1766 d = self.shouldRedirect2("test_POST_DIRURL_deepcheck", _check_redirect,
1767 self.POST, self.public_url,
1768 t="start-deep-check", ophandle="123")
1769 d.addCallback(self.wait_for_operation, "123")
1770 def _check_json(data):
1771 self.failUnlessEqual(data["finished"], True)
1772 self.failUnlessEqual(data["count-objects-checked"], 8)
1773 self.failUnlessEqual(data["count-objects-healthy"], 8)
1774 d.addCallback(_check_json)
1775 d.addCallback(self.get_operation_results, "123", "html")
1776 def _check_html(res):
1777 self.failUnless("Objects Checked: <span>8</span>" in res)
1778 self.failUnless("Objects Healthy: <span>8</span>" in res)
1779 d.addCallback(_check_html)
1781 d.addCallback(lambda res:
1782 self.GET("/operations/123/"))
1783 d.addCallback(_check_html) # should be the same as without the slash
1785 d.addCallback(lambda res:
1786 self.shouldFail2(error.Error, "one", "404 Not Found",
1787 "No detailed results for SI bogus",
1788 self.GET, "/operations/123/bogus"))
1790 foo_si = self._foo_node.get_storage_index()
1791 foo_si_s = base32.b2a(foo_si)
1792 d.addCallback(lambda res:
1793 self.GET("/operations/123/%s?output=JSON" % foo_si_s))
1794 def _check_foo_json(res):
1795 data = simplejson.loads(res)
1796 self.failUnlessEqual(data["storage-index"], foo_si_s)
1797 self.failUnless(data["results"]["healthy"])
1798 d.addCallback(_check_foo_json)
1801 def test_POST_DIRURL_deepcheck_and_repair(self):
1802 d = self.POST(self.public_url, t="start-deep-check", repair="true",
1803 ophandle="124", output="json", followRedirect=True)
1804 d.addCallback(self.wait_for_operation, "124")
1805 def _check_json(data):
1806 self.failUnlessEqual(data["finished"], True)
1807 self.failUnlessEqual(data["count-objects-checked"], 8)
1808 self.failUnlessEqual(data["count-objects-healthy-pre-repair"], 8)
1809 self.failUnlessEqual(data["count-objects-unhealthy-pre-repair"], 0)
1810 self.failUnlessEqual(data["count-corrupt-shares-pre-repair"], 0)
1811 self.failUnlessEqual(data["count-repairs-attempted"], 0)
1812 self.failUnlessEqual(data["count-repairs-successful"], 0)
1813 self.failUnlessEqual(data["count-repairs-unsuccessful"], 0)
1814 self.failUnlessEqual(data["count-objects-healthy-post-repair"], 8)
1815 self.failUnlessEqual(data["count-objects-unhealthy-post-repair"], 0)
1816 self.failUnlessEqual(data["count-corrupt-shares-post-repair"], 0)
1817 d.addCallback(_check_json)
1818 d.addCallback(self.get_operation_results, "124", "html")
1819 def _check_html(res):
1820 self.failUnless("Objects Checked: <span>8</span>" in res)
1822 self.failUnless("Objects Healthy (before repair): <span>8</span>" in res)
1823 self.failUnless("Objects Unhealthy (before repair): <span>0</span>" in res)
1824 self.failUnless("Corrupt Shares (before repair): <span>0</span>" in res)
1826 self.failUnless("Repairs Attempted: <span>0</span>" in res)
1827 self.failUnless("Repairs Successful: <span>0</span>" in res)
1828 self.failUnless("Repairs Unsuccessful: <span>0</span>" in res)
1830 self.failUnless("Objects Healthy (after repair): <span>8</span>" in res)
1831 self.failUnless("Objects Unhealthy (after repair): <span>0</span>" in res)
1832 self.failUnless("Corrupt Shares (after repair): <span>0</span>" in res)
1833 d.addCallback(_check_html)
1836 def test_POST_FILEURL_bad_t(self):
1837 d = self.shouldFail2(error.Error, "POST_bad_t", "400 Bad Request",
1838 "POST to file: bad t=bogus",
1839 self.POST, self.public_url + "/foo/bar.txt",
1843 def test_POST_mkdir(self): # return value?
1844 d = self.POST(self.public_url + "/foo", t="mkdir", name="newdir")
1845 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
1846 d.addCallback(self.failUnlessNodeKeysAre, [])
1849 def test_POST_mkdir_2(self):
1850 d = self.POST(self.public_url + "/foo/newdir?t=mkdir", "")
1851 d.addCallback(lambda res:
1852 self.failUnlessNodeHasChild(self._foo_node, u"newdir"))
1853 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
1854 d.addCallback(self.failUnlessNodeKeysAre, [])
1857 def test_POST_mkdirs_2(self):
1858 d = self.POST(self.public_url + "/foo/bardir/newdir?t=mkdir", "")
1859 d.addCallback(lambda res:
1860 self.failUnlessNodeHasChild(self._foo_node, u"bardir"))
1861 d.addCallback(lambda res: self._foo_node.get(u"bardir"))
1862 d.addCallback(lambda bardirnode: bardirnode.get(u"newdir"))
1863 d.addCallback(self.failUnlessNodeKeysAre, [])
1866 def test_POST_mkdir_no_parentdir_noredirect(self):
1867 d = self.POST("/uri?t=mkdir")
1868 def _after_mkdir(res):
1869 uri.NewDirectoryURI.init_from_string(res)
1870 d.addCallback(_after_mkdir)
1873 def test_POST_mkdir_no_parentdir_redirect(self):
1874 d = self.POST("/uri?t=mkdir&redirect_to_result=true")
1875 d.addBoth(self.shouldRedirect, None, statuscode='303')
1876 def _check_target(target):
1877 target = urllib.unquote(target)
1878 self.failUnless(target.startswith("uri/URI:DIR2:"), target)
1879 d.addCallback(_check_target)
1882 def test_POST_noparent_bad(self):
1883 d = self.shouldHTTPError("POST /uri?t=bogus", 400, "Bad Request",
1884 "/uri accepts only PUT, PUT?t=mkdir, "
1885 "POST?t=upload, and POST?t=mkdir",
1886 self.POST, "/uri?t=bogus")
1889 def test_welcome_page_mkdir_button(self):
1890 # Fetch the welcome page.
1892 def _after_get_welcome_page(res):
1893 MKDIR_BUTTON_RE=re.compile('<form action="([^"]*)" method="post".*?<input type="hidden" name="t" value="([^"]*)" /><input type="hidden" name="([^"]*)" value="([^"]*)" /><input type="submit" value="Create a directory" />', re.I)
1894 mo = MKDIR_BUTTON_RE.search(res)
1895 formaction = mo.group(1)
1897 formaname = mo.group(3)
1898 formavalue = mo.group(4)
1899 return (formaction, formt, formaname, formavalue)
1900 d.addCallback(_after_get_welcome_page)
1901 def _after_parse_form(res):
1902 (formaction, formt, formaname, formavalue) = res
1903 return self.POST("/%s?t=%s&%s=%s" % (formaction, formt, formaname, formavalue))
1904 d.addCallback(_after_parse_form)
1905 d.addBoth(self.shouldRedirect, None, statuscode='303')
1908 def test_POST_mkdir_replace(self): # return value?
1909 d = self.POST(self.public_url + "/foo", t="mkdir", name="sub")
1910 d.addCallback(lambda res: self._foo_node.get(u"sub"))
1911 d.addCallback(self.failUnlessNodeKeysAre, [])
1914 def test_POST_mkdir_no_replace_queryarg(self): # return value?
1915 d = self.POST(self.public_url + "/foo?replace=false", t="mkdir", name="sub")
1916 d.addBoth(self.shouldFail, error.Error,
1917 "POST_mkdir_no_replace_queryarg",
1919 "There was already a child by that name, and you asked me "
1920 "to not replace it")
1921 d.addCallback(lambda res: self._foo_node.get(u"sub"))
1922 d.addCallback(self.failUnlessNodeKeysAre, [u"baz.txt"])
1925 def test_POST_mkdir_no_replace_field(self): # return value?
1926 d = self.POST(self.public_url + "/foo", t="mkdir", name="sub",
1928 d.addBoth(self.shouldFail, error.Error, "POST_mkdir_no_replace_field",
1930 "There was already a child by that name, and you asked me "
1931 "to not replace it")
1932 d.addCallback(lambda res: self._foo_node.get(u"sub"))
1933 d.addCallback(self.failUnlessNodeKeysAre, [u"baz.txt"])
1936 def test_POST_mkdir_whendone_field(self):
1937 d = self.POST(self.public_url + "/foo",
1938 t="mkdir", name="newdir", when_done="/THERE")
1939 d.addBoth(self.shouldRedirect, "/THERE")
1940 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
1941 d.addCallback(self.failUnlessNodeKeysAre, [])
1944 def test_POST_mkdir_whendone_queryarg(self):
1945 d = self.POST(self.public_url + "/foo?when_done=/THERE",
1946 t="mkdir", name="newdir")
1947 d.addBoth(self.shouldRedirect, "/THERE")
1948 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
1949 d.addCallback(self.failUnlessNodeKeysAre, [])
1952 def test_POST_bad_t(self):
1953 d = self.shouldFail2(error.Error, "POST_bad_t", "400 Bad Request",
1954 "POST to a directory with bad t=BOGUS",
1955 self.POST, self.public_url + "/foo", t="BOGUS")
1958 def test_POST_set_children(self):
1959 contents9, n9, newuri9 = self.makefile(9)
1960 contents10, n10, newuri10 = self.makefile(10)
1961 contents11, n11, newuri11 = self.makefile(11)
1964 "atomic_added_1": [ "filenode", { "rw_uri": "%s",
1967 "ctime": 1002777696.7564139,
1968 "mtime": 1002777696.7564139
1971 "atomic_added_2": [ "filenode", { "rw_uri": "%s",
1974 "ctime": 1002777696.7564139,
1975 "mtime": 1002777696.7564139
1978 "atomic_added_3": [ "filenode", { "rw_uri": "%s",
1981 "ctime": 1002777696.7564139,
1982 "mtime": 1002777696.7564139
1985 }""" % (newuri9, newuri10, newuri11)
1987 url = self.webish_url + self.public_url + "/foo" + "?t=set_children"
1989 d = client.getPage(url, method="POST", postdata=reqbody)
1991 self.failUnlessURIMatchesChild(newuri9, self._foo_node, u"atomic_added_1")
1992 self.failUnlessURIMatchesChild(newuri10, self._foo_node, u"atomic_added_2")
1993 self.failUnlessURIMatchesChild(newuri11, self._foo_node, u"atomic_added_3")
1995 d.addCallback(_then)
1996 d.addErrback(self.dump_error)
1999 def test_POST_put_uri(self):
2000 contents, n, newuri = self.makefile(8)
2001 d = self.POST(self.public_url + "/foo", t="uri", name="new.txt", uri=newuri)
2002 d.addCallback(self.failUnlessURIMatchesChild, self._foo_node, u"new.txt")
2003 d.addCallback(lambda res:
2004 self.failUnlessChildContentsAre(self._foo_node, u"new.txt",
2008 def test_POST_put_uri_replace(self):
2009 contents, n, newuri = self.makefile(8)
2010 d = self.POST(self.public_url + "/foo", t="uri", name="bar.txt", uri=newuri)
2011 d.addCallback(self.failUnlessURIMatchesChild, self._foo_node, u"bar.txt")
2012 d.addCallback(lambda res:
2013 self.failUnlessChildContentsAre(self._foo_node, u"bar.txt",
2017 def test_POST_put_uri_no_replace_queryarg(self):
2018 contents, n, newuri = self.makefile(8)
2019 d = self.POST(self.public_url + "/foo?replace=false", t="uri",
2020 name="bar.txt", uri=newuri)
2021 d.addBoth(self.shouldFail, error.Error,
2022 "POST_put_uri_no_replace_queryarg",
2024 "There was already a child by that name, and you asked me "
2025 "to not replace it")
2026 d.addCallback(lambda res: self.GET(self.public_url + "/foo/bar.txt"))
2027 d.addCallback(self.failUnlessIsBarDotTxt)
2030 def test_POST_put_uri_no_replace_field(self):
2031 contents, n, newuri = self.makefile(8)
2032 d = self.POST(self.public_url + "/foo", t="uri", replace="false",
2033 name="bar.txt", uri=newuri)
2034 d.addBoth(self.shouldFail, error.Error,
2035 "POST_put_uri_no_replace_field",
2037 "There was already a child by that name, and you asked me "
2038 "to not replace it")
2039 d.addCallback(lambda res: self.GET(self.public_url + "/foo/bar.txt"))
2040 d.addCallback(self.failUnlessIsBarDotTxt)
2043 def test_POST_delete(self):
2044 d = self.POST(self.public_url + "/foo", t="delete", name="bar.txt")
2045 d.addCallback(lambda res: self._foo_node.list())
2046 def _check(children):
2047 self.failIf(u"bar.txt" in children)
2048 d.addCallback(_check)
2051 def test_POST_rename_file(self):
2052 d = self.POST(self.public_url + "/foo", t="rename",
2053 from_name="bar.txt", to_name='wibble.txt')
2054 d.addCallback(lambda res:
2055 self.failIfNodeHasChild(self._foo_node, u"bar.txt"))
2056 d.addCallback(lambda res:
2057 self.failUnlessNodeHasChild(self._foo_node, u"wibble.txt"))
2058 d.addCallback(lambda res: self.GET(self.public_url + "/foo/wibble.txt"))
2059 d.addCallback(self.failUnlessIsBarDotTxt)
2060 d.addCallback(lambda res: self.GET(self.public_url + "/foo/wibble.txt?t=json"))
2061 d.addCallback(self.failUnlessIsBarJSON)
2064 def test_POST_rename_file_redundant(self):
2065 d = self.POST(self.public_url + "/foo", t="rename",
2066 from_name="bar.txt", to_name='bar.txt')
2067 d.addCallback(lambda res:
2068 self.failUnlessNodeHasChild(self._foo_node, u"bar.txt"))
2069 d.addCallback(lambda res: self.GET(self.public_url + "/foo/bar.txt"))
2070 d.addCallback(self.failUnlessIsBarDotTxt)
2071 d.addCallback(lambda res: self.GET(self.public_url + "/foo/bar.txt?t=json"))
2072 d.addCallback(self.failUnlessIsBarJSON)
2075 def test_POST_rename_file_replace(self):
2076 # rename a file and replace a directory with it
2077 d = self.POST(self.public_url + "/foo", t="rename",
2078 from_name="bar.txt", to_name='empty')
2079 d.addCallback(lambda res:
2080 self.failIfNodeHasChild(self._foo_node, u"bar.txt"))
2081 d.addCallback(lambda res:
2082 self.failUnlessNodeHasChild(self._foo_node, u"empty"))
2083 d.addCallback(lambda res: self.GET(self.public_url + "/foo/empty"))
2084 d.addCallback(self.failUnlessIsBarDotTxt)
2085 d.addCallback(lambda res: self.GET(self.public_url + "/foo/empty?t=json"))
2086 d.addCallback(self.failUnlessIsBarJSON)
2089 def test_POST_rename_file_no_replace_queryarg(self):
2090 # rename a file and replace a directory with it
2091 d = self.POST(self.public_url + "/foo?replace=false", t="rename",
2092 from_name="bar.txt", to_name='empty')
2093 d.addBoth(self.shouldFail, error.Error,
2094 "POST_rename_file_no_replace_queryarg",
2096 "There was already a child by that name, and you asked me "
2097 "to not replace it")
2098 d.addCallback(lambda res: self.GET(self.public_url + "/foo/empty?t=json"))
2099 d.addCallback(self.failUnlessIsEmptyJSON)
2102 def test_POST_rename_file_no_replace_field(self):
2103 # rename a file and replace a directory with it
2104 d = self.POST(self.public_url + "/foo", t="rename", replace="false",
2105 from_name="bar.txt", to_name='empty')
2106 d.addBoth(self.shouldFail, error.Error,
2107 "POST_rename_file_no_replace_field",
2109 "There was already a child by that name, and you asked me "
2110 "to not replace it")
2111 d.addCallback(lambda res: self.GET(self.public_url + "/foo/empty?t=json"))
2112 d.addCallback(self.failUnlessIsEmptyJSON)
2115 def failUnlessIsEmptyJSON(self, res):
2116 data = simplejson.loads(res)
2117 self.failUnlessEqual(data[0], "dirnode", data)
2118 self.failUnlessEqual(len(data[1]["children"]), 0)
2120 def test_POST_rename_file_slash_fail(self):
2121 d = self.POST(self.public_url + "/foo", t="rename",
2122 from_name="bar.txt", to_name='kirk/spock.txt')
2123 d.addBoth(self.shouldFail, error.Error,
2124 "test_POST_rename_file_slash_fail",
2126 "to_name= may not contain a slash",
2128 d.addCallback(lambda res:
2129 self.failUnlessNodeHasChild(self._foo_node, u"bar.txt"))
2132 def test_POST_rename_dir(self):
2133 d = self.POST(self.public_url, t="rename",
2134 from_name="foo", to_name='plunk')
2135 d.addCallback(lambda res:
2136 self.failIfNodeHasChild(self.public_root, u"foo"))
2137 d.addCallback(lambda res:
2138 self.failUnlessNodeHasChild(self.public_root, u"plunk"))
2139 d.addCallback(lambda res: self.GET(self.public_url + "/plunk?t=json"))
2140 d.addCallback(self.failUnlessIsFooJSON)
2143 def shouldRedirect(self, res, target=None, statuscode=None, which=""):
2144 """ If target is not None then the redirection has to go to target. If
2145 statuscode is not None then the redirection has to be accomplished with
2146 that HTTP status code."""
2147 if not isinstance(res, failure.Failure):
2148 to_where = (target is None) and "somewhere" or ("to " + target)
2149 self.fail("%s: we were expecting to get redirected %s, not get an"
2150 " actual page: %s" % (which, to_where, res))
2151 res.trap(error.PageRedirect)
2152 if statuscode is not None:
2153 self.failUnlessEqual(res.value.status, statuscode,
2154 "%s: not a redirect" % which)
2155 if target is not None:
2156 # the PageRedirect does not seem to capture the uri= query arg
2157 # properly, so we can't check for it.
2158 realtarget = self.webish_url + target
2159 self.failUnlessEqual(res.value.location, realtarget,
2160 "%s: wrong target" % which)
2161 return res.value.location
2163 def test_GET_URI_form(self):
2164 base = "/uri?uri=%s" % self._bar_txt_uri
2165 # this is supposed to give us a redirect to /uri/$URI, plus arguments
2166 targetbase = "/uri/%s" % urllib.quote(self._bar_txt_uri)
2168 d.addBoth(self.shouldRedirect, targetbase)
2169 d.addCallback(lambda res: self.GET(base+"&filename=bar.txt"))
2170 d.addBoth(self.shouldRedirect, targetbase+"?filename=bar.txt")
2171 d.addCallback(lambda res: self.GET(base+"&t=json"))
2172 d.addBoth(self.shouldRedirect, targetbase+"?t=json")
2173 d.addCallback(self.log, "about to get file by uri")
2174 d.addCallback(lambda res: self.GET(base, followRedirect=True))
2175 d.addCallback(self.failUnlessIsBarDotTxt)
2176 d.addCallback(self.log, "got file by uri, about to get dir by uri")
2177 d.addCallback(lambda res: self.GET("/uri?uri=%s&t=json" % self._foo_uri,
2178 followRedirect=True))
2179 d.addCallback(self.failUnlessIsFooJSON)
2180 d.addCallback(self.log, "got dir by uri")
2184 def test_GET_URI_form_bad(self):
2185 d = self.shouldFail2(error.Error, "test_GET_URI_form_bad",
2186 "400 Bad Request", "GET /uri requires uri=",
2190 def test_GET_rename_form(self):
2191 d = self.GET(self.public_url + "/foo?t=rename-form&name=bar.txt",
2192 followRedirect=True)
2194 self.failUnless('name="when_done" value="."' in res, res)
2195 self.failUnless(re.search(r'name="from_name" value="bar\.txt"', res))
2196 d.addCallback(_check)
2199 def log(self, res, msg):
2200 #print "MSG: %s RES: %s" % (msg, res)
2204 def test_GET_URI_URL(self):
2205 base = "/uri/%s" % self._bar_txt_uri
2207 d.addCallback(self.failUnlessIsBarDotTxt)
2208 d.addCallback(lambda res: self.GET(base+"?filename=bar.txt"))
2209 d.addCallback(self.failUnlessIsBarDotTxt)
2210 d.addCallback(lambda res: self.GET(base+"?filename=bar.txt&save=true"))
2211 d.addCallback(self.failUnlessIsBarDotTxt)
2214 def test_GET_URI_URL_dir(self):
2215 base = "/uri/%s?t=json" % self._foo_uri
2217 d.addCallback(self.failUnlessIsFooJSON)
2220 def test_GET_URI_URL_missing(self):
2221 base = "/uri/%s" % self._bad_file_uri
2222 d = self.shouldHTTPError("test_GET_URI_URL_missing",
2223 http.GONE, None, "NotEnoughSharesError",
2225 # TODO: how can we exercise both sides of WebDownloadTarget.fail
2226 # here? we must arrange for a download to fail after target.open()
2227 # has been called, and then inspect the response to see that it is
2228 # shorter than we expected.
2231 def test_PUT_DIRURL_uri(self):
2232 d = self.s.create_empty_dirnode()
2234 new_uri = dn.get_uri()
2235 # replace /foo with a new (empty) directory
2236 d = self.PUT(self.public_url + "/foo?t=uri", new_uri)
2237 d.addCallback(lambda res:
2238 self.failUnlessEqual(res.strip(), new_uri))
2239 d.addCallback(lambda res:
2240 self.failUnlessChildURIIs(self.public_root,
2244 d.addCallback(_made_dir)
2247 def test_PUT_DIRURL_uri_noreplace(self):
2248 d = self.s.create_empty_dirnode()
2250 new_uri = dn.get_uri()
2251 # replace /foo with a new (empty) directory, but ask that
2252 # replace=false, so it should fail
2253 d = self.shouldFail2(error.Error, "test_PUT_DIRURL_uri_noreplace",
2254 "409 Conflict", "There was already a child by that name, and you asked me to not replace it",
2256 self.public_url + "/foo?t=uri&replace=false",
2258 d.addCallback(lambda res:
2259 self.failUnlessChildURIIs(self.public_root,
2263 d.addCallback(_made_dir)
2266 def test_PUT_DIRURL_bad_t(self):
2267 d = self.shouldFail2(error.Error, "test_PUT_DIRURL_bad_t",
2268 "400 Bad Request", "PUT to a directory",
2269 self.PUT, self.public_url + "/foo?t=BOGUS", "")
2270 d.addCallback(lambda res:
2271 self.failUnlessChildURIIs(self.public_root,
2276 def test_PUT_NEWFILEURL_uri(self):
2277 contents, n, new_uri = self.makefile(8)
2278 d = self.PUT(self.public_url + "/foo/new.txt?t=uri", new_uri)
2279 d.addCallback(lambda res: self.failUnlessEqual(res.strip(), new_uri))
2280 d.addCallback(lambda res:
2281 self.failUnlessChildContentsAre(self._foo_node, u"new.txt",
2285 def test_PUT_NEWFILEURL_uri_replace(self):
2286 contents, n, new_uri = self.makefile(8)
2287 d = self.PUT(self.public_url + "/foo/bar.txt?t=uri", new_uri)
2288 d.addCallback(lambda res: self.failUnlessEqual(res.strip(), new_uri))
2289 d.addCallback(lambda res:
2290 self.failUnlessChildContentsAre(self._foo_node, u"bar.txt",
2294 def test_PUT_NEWFILEURL_uri_no_replace(self):
2295 contents, n, new_uri = self.makefile(8)
2296 d = self.PUT(self.public_url + "/foo/bar.txt?t=uri&replace=false", new_uri)
2297 d.addBoth(self.shouldFail, error.Error, "PUT_NEWFILEURL_uri_no_replace",
2299 "There was already a child by that name, and you asked me "
2300 "to not replace it")
2303 def test_PUT_NEWFILE_URI(self):
2304 file_contents = "New file contents here\n"
2305 d = self.PUT("/uri", file_contents)
2307 assert isinstance(uri, str), uri
2308 self.failUnless(uri in FakeCHKFileNode.all_contents)
2309 self.failUnlessEqual(FakeCHKFileNode.all_contents[uri],
2311 return self.GET("/uri/%s" % uri)
2312 d.addCallback(_check)
2314 self.failUnlessEqual(res, file_contents)
2315 d.addCallback(_check2)
2318 def test_PUT_NEWFILE_URI_not_mutable(self):
2319 file_contents = "New file contents here\n"
2320 d = self.PUT("/uri?mutable=false", file_contents)
2322 assert isinstance(uri, str), uri
2323 self.failUnless(uri in FakeCHKFileNode.all_contents)
2324 self.failUnlessEqual(FakeCHKFileNode.all_contents[uri],
2326 return self.GET("/uri/%s" % uri)
2327 d.addCallback(_check)
2329 self.failUnlessEqual(res, file_contents)
2330 d.addCallback(_check2)
2333 def test_PUT_NEWFILE_URI_only_PUT(self):
2334 d = self.PUT("/uri?t=bogus", "")
2335 d.addBoth(self.shouldFail, error.Error,
2336 "PUT_NEWFILE_URI_only_PUT",
2338 "/uri accepts only PUT, PUT?t=mkdir, POST?t=upload, and POST?t=mkdir")
2341 def test_PUT_NEWFILE_URI_mutable(self):
2342 file_contents = "New file contents here\n"
2343 d = self.PUT("/uri?mutable=true", file_contents)
2344 def _check_mutable(uri):
2347 self.failUnless(IMutableFileURI.providedBy(u))
2348 self.failUnless(u.storage_index in FakeMutableFileNode.all_contents)
2349 n = self.s.create_node_from_uri(uri)
2350 return n.download_best_version()
2351 d.addCallback(_check_mutable)
2352 def _check2_mutable(data):
2353 self.failUnlessEqual(data, file_contents)
2354 d.addCallback(_check2_mutable)
2358 self.failUnless(uri.to_string() in FakeCHKFileNode.all_contents)
2359 self.failUnlessEqual(FakeCHKFileNode.all_contents[uri.to_string()],
2361 return self.GET("/uri/%s" % uri)
2362 d.addCallback(_check)
2364 self.failUnlessEqual(res, file_contents)
2365 d.addCallback(_check2)
2368 def test_PUT_mkdir(self):
2369 d = self.PUT("/uri?t=mkdir", "")
2371 n = self.s.create_node_from_uri(uri.strip())
2372 d2 = self.failUnlessNodeKeysAre(n, [])
2373 d2.addCallback(lambda res:
2374 self.GET("/uri/%s?t=json" % uri))
2376 d.addCallback(_check)
2377 d.addCallback(self.failUnlessIsEmptyJSON)
2380 def test_POST_check(self):
2381 d = self.POST(self.public_url + "/foo", t="check", name="bar.txt")
2383 # this returns a string form of the results, which are probably
2384 # None since we're using fake filenodes.
2385 # TODO: verify that the check actually happened, by changing
2386 # FakeCHKFileNode to count how many times .check() has been
2389 d.addCallback(_done)
2392 def test_bad_method(self):
2393 url = self.webish_url + self.public_url + "/foo/bar.txt"
2394 d = self.shouldHTTPError("test_bad_method",
2395 501, "Not Implemented",
2396 "I don't know how to treat a BOGUS request.",
2397 client.getPage, url, method="BOGUS")
2400 def test_short_url(self):
2401 url = self.webish_url + "/uri"
2402 d = self.shouldHTTPError("test_short_url", 501, "Not Implemented",
2403 "I don't know how to treat a DELETE request.",
2404 client.getPage, url, method="DELETE")
2407 def test_ophandle_bad(self):
2408 url = self.webish_url + "/operations/bogus?t=status"
2409 d = self.shouldHTTPError("test_ophandle_bad", 404, "404 Not Found",
2410 "unknown/expired handle 'bogus'",
2411 client.getPage, url)
2414 def test_ophandle_cancel(self):
2415 d = self.POST(self.public_url + "/foo/?t=start-manifest&ophandle=128",
2416 followRedirect=True)
2417 d.addCallback(lambda ignored:
2418 self.GET("/operations/128?t=status&output=JSON"))
2420 data = simplejson.loads(res)
2421 self.failUnless("finished" in data, res)
2422 monitor = self.ws.root.child_operations.handles["128"][0]
2423 d = self.POST("/operations/128?t=cancel&output=JSON")
2425 data = simplejson.loads(res)
2426 self.failUnless("finished" in data, res)
2427 # t=cancel causes the handle to be forgotten
2428 self.failUnless(monitor.is_cancelled())
2429 d.addCallback(_check2)
2431 d.addCallback(_check1)
2432 d.addCallback(lambda ignored:
2433 self.shouldHTTPError("test_ophandle_cancel",
2434 404, "404 Not Found",
2435 "unknown/expired handle '128'",
2437 "/operations/128?t=status&output=JSON"))
2440 def test_ophandle_retainfor(self):
2441 d = self.POST(self.public_url + "/foo/?t=start-manifest&ophandle=129&retain-for=60",
2442 followRedirect=True)
2443 d.addCallback(lambda ignored:
2444 self.GET("/operations/129?t=status&output=JSON&retain-for=0"))
2446 data = simplejson.loads(res)
2447 self.failUnless("finished" in data, res)
2448 d.addCallback(_check1)
2449 # the retain-for=0 will cause the handle to be expired very soon
2450 d.addCallback(self.stall, 2.0)
2451 d.addCallback(lambda ignored:
2452 self.shouldHTTPError("test_ophandle_retainfor",
2453 404, "404 Not Found",
2454 "unknown/expired handle '129'",
2456 "/operations/129?t=status&output=JSON"))
2459 def test_ophandle_release_after_complete(self):
2460 d = self.POST(self.public_url + "/foo/?t=start-manifest&ophandle=130",
2461 followRedirect=True)
2462 d.addCallback(self.wait_for_operation, "130")
2463 d.addCallback(lambda ignored:
2464 self.GET("/operations/130?t=status&output=JSON&release-after-complete=true"))
2465 # the release-after-complete=true will cause the handle to be expired
2466 d.addCallback(lambda ignored:
2467 self.shouldHTTPError("test_ophandle_release_after_complete",
2468 404, "404 Not Found",
2469 "unknown/expired handle '130'",
2471 "/operations/130?t=status&output=JSON"))
2474 def test_incident(self):
2475 d = self.POST("/report_incident", details="eek")
2477 self.failUnless("Thank you for your report!" in res, res)
2478 d.addCallback(_done)
2481 def test_static(self):
2482 webdir = os.path.join(self.staticdir, "subdir")
2483 fileutil.make_dirs(webdir)
2484 f = open(os.path.join(webdir, "hello.txt"), "wb")
2488 d = self.GET("/static/subdir/hello.txt")
2490 self.failUnlessEqual(res, "hello")
2491 d.addCallback(_check)
2495 class Util(unittest.TestCase):
2496 def test_abbreviate_time(self):
2497 self.failUnlessEqual(common.abbreviate_time(None), "")
2498 self.failUnlessEqual(common.abbreviate_time(1.234), "1.23s")
2499 self.failUnlessEqual(common.abbreviate_time(0.123), "123ms")
2500 self.failUnlessEqual(common.abbreviate_time(0.00123), "1.2ms")
2501 self.failUnlessEqual(common.abbreviate_time(0.000123), "123us")
2503 def test_abbreviate_rate(self):
2504 self.failUnlessEqual(common.abbreviate_rate(None), "")
2505 self.failUnlessEqual(common.abbreviate_rate(1234000), "1.23MBps")
2506 self.failUnlessEqual(common.abbreviate_rate(12340), "12.3kBps")
2507 self.failUnlessEqual(common.abbreviate_rate(123), "123Bps")
2509 def test_abbreviate_size(self):
2510 self.failUnlessEqual(common.abbreviate_size(None), "")
2511 self.failUnlessEqual(common.abbreviate_size(1.23*1000*1000*1000), "1.23GB")
2512 self.failUnlessEqual(common.abbreviate_size(1.23*1000*1000), "1.23MB")
2513 self.failUnlessEqual(common.abbreviate_size(1230), "1.2kB")
2514 self.failUnlessEqual(common.abbreviate_size(123), "123B")
2516 def test_plural(self):
2518 return "%d second%s" % (s, status.plural(s))
2519 self.failUnlessEqual(convert(0), "0 seconds")
2520 self.failUnlessEqual(convert(1), "1 second")
2521 self.failUnlessEqual(convert(2), "2 seconds")
2523 return "has share%s: %s" % (status.plural(s), ",".join(s))
2524 self.failUnlessEqual(convert2([]), "has shares: ")
2525 self.failUnlessEqual(convert2(["1"]), "has share: 1")
2526 self.failUnlessEqual(convert2(["1","2"]), "has shares: 1,2")
2529 class Grid(GridTestMixin, WebErrorMixin, unittest.TestCase, ShouldFailMixin):
2531 def CHECK(self, ign, which, args, clientnum=0):
2532 fileurl = self.fileurls[which]
2533 url = fileurl + "?" + args
2534 return self.GET(url, method="POST", clientnum=clientnum)
2536 def test_filecheck(self):
2537 self.basedir = "web/Grid/filecheck"
2539 c0 = self.g.clients[0]
2542 d = c0.upload(upload.Data(DATA, convergence=""))
2543 def _stash_uri(ur, which):
2544 self.uris[which] = ur.uri
2545 d.addCallback(_stash_uri, "good")
2546 d.addCallback(lambda ign:
2547 c0.upload(upload.Data(DATA+"1", convergence="")))
2548 d.addCallback(_stash_uri, "sick")
2549 d.addCallback(lambda ign:
2550 c0.upload(upload.Data(DATA+"2", convergence="")))
2551 d.addCallback(_stash_uri, "dead")
2552 def _stash_mutable_uri(n, which):
2553 self.uris[which] = n.get_uri()
2554 assert isinstance(self.uris[which], str)
2555 d.addCallback(lambda ign: c0.create_mutable_file(DATA+"3"))
2556 d.addCallback(_stash_mutable_uri, "corrupt")
2557 d.addCallback(lambda ign:
2558 c0.upload(upload.Data("literal", convergence="")))
2559 d.addCallback(_stash_uri, "small")
2561 def _compute_fileurls(ignored):
2563 for which in self.uris:
2564 self.fileurls[which] = "uri/" + urllib.quote(self.uris[which])
2565 d.addCallback(_compute_fileurls)
2567 def _clobber_shares(ignored):
2568 good_shares = self.find_shares(self.uris["good"])
2569 self.failUnlessEqual(len(good_shares), 10)
2570 sick_shares = self.find_shares(self.uris["sick"])
2571 os.unlink(sick_shares[0][2])
2572 dead_shares = self.find_shares(self.uris["dead"])
2573 for i in range(1, 10):
2574 os.unlink(dead_shares[i][2])
2575 c_shares = self.find_shares(self.uris["corrupt"])
2576 cso = CorruptShareOptions()
2577 cso.stdout = StringIO()
2578 cso.parseOptions([c_shares[0][2]])
2580 d.addCallback(_clobber_shares)
2582 d.addCallback(self.CHECK, "good", "t=check")
2583 def _got_html_good(res):
2584 self.failUnless("Healthy" in res, res)
2585 self.failIf("Not Healthy" in res, res)
2586 d.addCallback(_got_html_good)
2587 d.addCallback(self.CHECK, "good", "t=check&return_to=somewhere")
2588 def _got_html_good_return_to(res):
2589 self.failUnless("Healthy" in res, res)
2590 self.failIf("Not Healthy" in res, res)
2591 self.failUnless('<a href="somewhere">Return to file'
2593 d.addCallback(_got_html_good_return_to)
2594 d.addCallback(self.CHECK, "good", "t=check&output=json")
2595 def _got_json_good(res):
2596 r = simplejson.loads(res)
2597 self.failUnlessEqual(r["summary"], "Healthy")
2598 self.failUnless(r["results"]["healthy"])
2599 self.failIf(r["results"]["needs-rebalancing"])
2600 self.failUnless(r["results"]["recoverable"])
2601 d.addCallback(_got_json_good)
2603 d.addCallback(self.CHECK, "small", "t=check")
2604 def _got_html_small(res):
2605 self.failUnless("Literal files are always healthy" in res, res)
2606 self.failIf("Not Healthy" in res, res)
2607 d.addCallback(_got_html_small)
2608 d.addCallback(self.CHECK, "small", "t=check&return_to=somewhere")
2609 def _got_html_small_return_to(res):
2610 self.failUnless("Literal files are always healthy" in res, res)
2611 self.failIf("Not Healthy" in res, res)
2612 self.failUnless('<a href="somewhere">Return to file'
2614 d.addCallback(_got_html_small_return_to)
2615 d.addCallback(self.CHECK, "small", "t=check&output=json")
2616 def _got_json_small(res):
2617 r = simplejson.loads(res)
2618 self.failUnlessEqual(r["storage-index"], "")
2619 self.failUnless(r["results"]["healthy"])
2620 d.addCallback(_got_json_small)
2622 d.addCallback(self.CHECK, "sick", "t=check")
2623 def _got_html_sick(res):
2624 self.failUnless("Not Healthy" in res, res)
2625 d.addCallback(_got_html_sick)
2626 d.addCallback(self.CHECK, "sick", "t=check&output=json")
2627 def _got_json_sick(res):
2628 r = simplejson.loads(res)
2629 self.failUnlessEqual(r["summary"],
2630 "Not Healthy: 9 shares (enc 3-of-10)")
2631 self.failIf(r["results"]["healthy"])
2632 self.failIf(r["results"]["needs-rebalancing"])
2633 self.failUnless(r["results"]["recoverable"])
2634 d.addCallback(_got_json_sick)
2636 d.addCallback(self.CHECK, "dead", "t=check")
2637 def _got_html_dead(res):
2638 self.failUnless("Not Healthy" in res, res)
2639 d.addCallback(_got_html_dead)
2640 d.addCallback(self.CHECK, "dead", "t=check&output=json")
2641 def _got_json_dead(res):
2642 r = simplejson.loads(res)
2643 self.failUnlessEqual(r["summary"],
2644 "Not Healthy: 1 shares (enc 3-of-10)")
2645 self.failIf(r["results"]["healthy"])
2646 self.failIf(r["results"]["needs-rebalancing"])
2647 self.failIf(r["results"]["recoverable"])
2648 d.addCallback(_got_json_dead)
2650 d.addCallback(self.CHECK, "corrupt", "t=check&verify=true")
2651 def _got_html_corrupt(res):
2652 self.failUnless("Not Healthy! : Unhealthy" in res, res)
2653 d.addCallback(_got_html_corrupt)
2654 d.addCallback(self.CHECK, "corrupt", "t=check&verify=true&output=json")
2655 def _got_json_corrupt(res):
2656 r = simplejson.loads(res)
2657 self.failUnless("Unhealthy: 9 shares (enc 3-of-10)" in r["summary"],
2659 self.failIf(r["results"]["healthy"])
2660 self.failUnless(r["results"]["recoverable"])
2661 self.failUnlessEqual(r["results"]["count-shares-good"], 9)
2662 self.failUnlessEqual(r["results"]["count-corrupt-shares"], 1)
2663 d.addCallback(_got_json_corrupt)
2665 d.addErrback(self.explain_web_error)
2668 def test_repair_html(self):
2669 self.basedir = "web/Grid/repair_html"
2671 c0 = self.g.clients[0]
2674 d = c0.upload(upload.Data(DATA, convergence=""))
2675 def _stash_uri(ur, which):
2676 self.uris[which] = ur.uri
2677 d.addCallback(_stash_uri, "good")
2678 d.addCallback(lambda ign:
2679 c0.upload(upload.Data(DATA+"1", convergence="")))
2680 d.addCallback(_stash_uri, "sick")
2681 d.addCallback(lambda ign:
2682 c0.upload(upload.Data(DATA+"2", convergence="")))
2683 d.addCallback(_stash_uri, "dead")
2684 def _stash_mutable_uri(n, which):
2685 self.uris[which] = n.get_uri()
2686 assert isinstance(self.uris[which], str)
2687 d.addCallback(lambda ign: c0.create_mutable_file(DATA+"3"))
2688 d.addCallback(_stash_mutable_uri, "corrupt")
2690 def _compute_fileurls(ignored):
2692 for which in self.uris:
2693 self.fileurls[which] = "uri/" + urllib.quote(self.uris[which])
2694 d.addCallback(_compute_fileurls)
2696 def _clobber_shares(ignored):
2697 good_shares = self.find_shares(self.uris["good"])
2698 self.failUnlessEqual(len(good_shares), 10)
2699 sick_shares = self.find_shares(self.uris["sick"])
2700 os.unlink(sick_shares[0][2])
2701 dead_shares = self.find_shares(self.uris["dead"])
2702 for i in range(1, 10):
2703 os.unlink(dead_shares[i][2])
2704 c_shares = self.find_shares(self.uris["corrupt"])
2705 cso = CorruptShareOptions()
2706 cso.stdout = StringIO()
2707 cso.parseOptions([c_shares[0][2]])
2709 d.addCallback(_clobber_shares)
2711 d.addCallback(self.CHECK, "good", "t=check&repair=true")
2712 def _got_html_good(res):
2713 self.failUnless("Healthy" in res, res)
2714 self.failIf("Not Healthy" in res, res)
2715 self.failUnless("No repair necessary" in res, res)
2716 d.addCallback(_got_html_good)
2718 d.addCallback(self.CHECK, "sick", "t=check&repair=true")
2719 def _got_html_sick(res):
2720 self.failUnless("Healthy : healthy" in res, res)
2721 self.failIf("Not Healthy" in res, res)
2722 self.failUnless("Repair successful" in res, res)
2723 d.addCallback(_got_html_sick)
2725 # repair of a dead file will fail, of course, but it isn't yet
2726 # clear how this should be reported. Right now it shows up as
2729 #d.addCallback(self.CHECK, "dead", "t=check&repair=true")
2730 #def _got_html_dead(res):
2732 # self.failUnless("Healthy : healthy" in res, res)
2733 # self.failIf("Not Healthy" in res, res)
2734 # self.failUnless("No repair necessary" in res, res)
2735 #d.addCallback(_got_html_dead)
2737 d.addCallback(self.CHECK, "corrupt", "t=check&verify=true&repair=true")
2738 def _got_html_corrupt(res):
2739 self.failUnless("Healthy : Healthy" in res, res)
2740 self.failIf("Not Healthy" in res, res)
2741 self.failUnless("Repair successful" in res, res)
2742 d.addCallback(_got_html_corrupt)
2744 d.addErrback(self.explain_web_error)
2747 def test_repair_json(self):
2748 self.basedir = "web/Grid/repair_json"
2750 c0 = self.g.clients[0]
2753 d = c0.upload(upload.Data(DATA+"1", convergence=""))
2754 def _stash_uri(ur, which):
2755 self.uris[which] = ur.uri
2756 d.addCallback(_stash_uri, "sick")
2758 def _compute_fileurls(ignored):
2760 for which in self.uris:
2761 self.fileurls[which] = "uri/" + urllib.quote(self.uris[which])
2762 d.addCallback(_compute_fileurls)
2764 def _clobber_shares(ignored):
2765 sick_shares = self.find_shares(self.uris["sick"])
2766 os.unlink(sick_shares[0][2])
2767 d.addCallback(_clobber_shares)
2769 d.addCallback(self.CHECK, "sick", "t=check&repair=true&output=json")
2770 def _got_json_sick(res):
2771 r = simplejson.loads(res)
2772 self.failUnlessEqual(r["repair-attempted"], True)
2773 self.failUnlessEqual(r["repair-successful"], True)
2774 self.failUnlessEqual(r["pre-repair-results"]["summary"],
2775 "Not Healthy: 9 shares (enc 3-of-10)")
2776 self.failIf(r["pre-repair-results"]["results"]["healthy"])
2777 self.failUnlessEqual(r["post-repair-results"]["summary"], "healthy")
2778 self.failUnless(r["post-repair-results"]["results"]["healthy"])
2779 d.addCallback(_got_json_sick)
2781 d.addErrback(self.explain_web_error)
2784 def test_unknown(self):
2785 self.basedir = "web/Grid/unknown"
2787 c0 = self.g.clients[0]
2791 future_writecap = "x-tahoe-crazy://I_am_from_the_future."
2792 future_readcap = "x-tahoe-crazy-readonly://I_am_from_the_future."
2793 # the future cap format may contain slashes, which must be tolerated
2794 expected_info_url = "uri/%s?t=info" % urllib.quote(future_writecap,
2796 future_node = UnknownNode(future_writecap, future_readcap)
2798 d = c0.create_empty_dirnode()
2799 def _stash_root_and_create_file(n):
2801 self.rooturl = "uri/" + urllib.quote(n.get_uri()) + "/"
2802 self.rourl = "uri/" + urllib.quote(n.get_readonly_uri()) + "/"
2803 return self.rootnode.set_node(u"future", future_node)
2804 d.addCallback(_stash_root_and_create_file)
2805 # make sure directory listing tolerates unknown nodes
2806 d.addCallback(lambda ign: self.GET(self.rooturl))
2807 def _check_html(res):
2808 self.failUnlessIn("<td>future</td>", res)
2809 # find the More Info link for "future", should be relative
2810 mo = re.search(r'<a href="([^"]+)">More Info</a>', res)
2811 info_url = mo.group(1)
2812 self.failUnlessEqual(info_url, "future?t=info")
2814 d.addCallback(_check_html)
2815 d.addCallback(lambda ign: self.GET(self.rooturl+"?t=json"))
2816 def _check_json(res, expect_writecap):
2817 data = simplejson.loads(res)
2818 self.failUnlessEqual(data[0], "dirnode")
2819 f = data[1]["children"]["future"]
2820 self.failUnlessEqual(f[0], "unknown")
2822 self.failUnlessEqual(f[1]["rw_uri"], future_writecap)
2824 self.failIfIn("rw_uri", f[1])
2825 self.failUnlessEqual(f[1]["ro_uri"], future_readcap)
2826 self.failUnless("metadata" in f[1])
2827 d.addCallback(_check_json, expect_writecap=True)
2828 d.addCallback(lambda ign: self.GET(expected_info_url))
2829 def _check_info(res, expect_readcap):
2830 self.failUnlessIn("Object Type: <span>unknown</span>", res)
2831 self.failUnlessIn(future_writecap, res)
2833 self.failUnlessIn(future_readcap, res)
2834 self.failIfIn("Raw data as", res)
2835 self.failIfIn("Directory writecap", res)
2836 self.failIfIn("Checker Operations", res)
2837 self.failIfIn("Mutable File Operations", res)
2838 self.failIfIn("Directory Operations", res)
2839 d.addCallback(_check_info, expect_readcap=False)
2840 d.addCallback(lambda ign: self.GET(self.rooturl+"future?t=info"))
2841 d.addCallback(_check_info, expect_readcap=True)
2843 # and make sure that a read-only version of the directory can be
2844 # rendered too. This version will not have future_writecap
2845 d.addCallback(lambda ign: self.GET(self.rourl))
2846 d.addCallback(_check_html)
2847 d.addCallback(lambda ign: self.GET(self.rourl+"?t=json"))
2848 d.addCallback(_check_json, expect_writecap=False)
2851 def test_deep_check(self):
2852 self.basedir = "web/Grid/deep_check"
2854 c0 = self.g.clients[0]
2858 d = c0.create_empty_dirnode()
2859 def _stash_root_and_create_file(n):
2861 self.fileurls["root"] = "uri/" + urllib.quote(n.get_uri()) + "/"
2862 return n.add_file(u"good", upload.Data(DATA, convergence=""))
2863 d.addCallback(_stash_root_and_create_file)
2864 def _stash_uri(fn, which):
2865 self.uris[which] = fn.get_uri()
2867 d.addCallback(_stash_uri, "good")
2868 d.addCallback(lambda ign:
2869 self.rootnode.add_file(u"small",
2870 upload.Data("literal",
2872 d.addCallback(_stash_uri, "small")
2873 d.addCallback(lambda ign:
2874 self.rootnode.add_file(u"sick",
2875 upload.Data(DATA+"1",
2877 d.addCallback(_stash_uri, "sick")
2879 # this tests that deep-check and stream-manifest will ignore
2880 # UnknownNode instances. Hopefully this will also cover deep-stats.
2881 future_writecap = "x-tahoe-crazy://I_am_from_the_future."
2882 future_readcap = "x-tahoe-crazy-readonly://I_am_from_the_future."
2883 future_node = UnknownNode(future_writecap, future_readcap)
2884 d.addCallback(lambda ign: self.rootnode.set_node(u"future",future_node))
2886 def _clobber_shares(ignored):
2887 self.delete_shares_numbered(self.uris["sick"], [0,1])
2888 d.addCallback(_clobber_shares)
2896 d.addCallback(self.CHECK, "root", "t=stream-deep-check")
2899 units = [simplejson.loads(line)
2900 for line in res.splitlines()
2903 print "response is:", res
2904 print "undecodeable line was '%s'" % line
2906 self.failUnlessEqual(len(units), 5+1)
2907 # should be parent-first
2909 self.failUnlessEqual(u0["path"], [])
2910 self.failUnlessEqual(u0["type"], "directory")
2911 self.failUnlessEqual(u0["cap"], self.rootnode.get_uri())
2912 u0cr = u0["check-results"]
2913 self.failUnlessEqual(u0cr["results"]["count-shares-good"], 10)
2915 ugood = [u for u in units
2916 if u["type"] == "file" and u["path"] == [u"good"]][0]
2917 self.failUnlessEqual(ugood["cap"], self.uris["good"])
2918 ugoodcr = ugood["check-results"]
2919 self.failUnlessEqual(ugoodcr["results"]["count-shares-good"], 10)
2922 self.failUnlessEqual(stats["type"], "stats")
2924 self.failUnlessEqual(s["count-immutable-files"], 2)
2925 self.failUnlessEqual(s["count-literal-files"], 1)
2926 self.failUnlessEqual(s["count-directories"], 1)
2927 self.failUnlessEqual(s["count-unknown"], 1)
2928 d.addCallback(_done)
2930 d.addCallback(self.CHECK, "root", "t=stream-manifest")
2931 def _check_manifest(res):
2932 self.failUnless(res.endswith("\n"))
2933 units = [simplejson.loads(t) for t in res[:-1].split("\n")]
2934 self.failUnlessEqual(len(units), 5+1)
2935 self.failUnlessEqual(units[-1]["type"], "stats")
2937 self.failUnlessEqual(first["path"], [])
2938 self.failUnlessEqual(first["cap"], self.rootnode.get_uri())
2939 self.failUnlessEqual(first["type"], "directory")
2940 stats = units[-1]["stats"]
2941 self.failUnlessEqual(stats["count-immutable-files"], 2)
2942 self.failUnlessEqual(stats["count-literal-files"], 1)
2943 self.failUnlessEqual(stats["count-mutable-files"], 0)
2944 self.failUnlessEqual(stats["count-immutable-files"], 2)
2945 self.failUnlessEqual(stats["count-unknown"], 1)
2946 d.addCallback(_check_manifest)
2948 # now add root/subdir and root/subdir/grandchild, then make subdir
2949 # unrecoverable, then see what happens
2951 d.addCallback(lambda ign:
2952 self.rootnode.create_empty_directory(u"subdir"))
2953 d.addCallback(_stash_uri, "subdir")
2954 d.addCallback(lambda subdir_node:
2955 subdir_node.add_file(u"grandchild",
2956 upload.Data(DATA+"2",
2958 d.addCallback(_stash_uri, "grandchild")
2960 d.addCallback(lambda ign:
2961 self.delete_shares_numbered(self.uris["subdir"],
2969 # root/subdir [unrecoverable]
2970 # root/subdir/grandchild
2972 # how should a streaming-JSON API indicate fatal error?
2973 # answer: emit ERROR: instead of a JSON string
2975 d.addCallback(self.CHECK, "root", "t=stream-manifest")
2976 def _check_broken_manifest(res):
2977 lines = res.splitlines()
2979 for (i,line) in enumerate(lines)
2980 if line.startswith("ERROR:")]
2982 self.fail("no ERROR: in output: %s" % (res,))
2983 first_error = error_lines[0]
2984 error_line = lines[first_error]
2985 error_msg = lines[first_error+1:]
2986 error_msg_s = "\n".join(error_msg) + "\n"
2987 self.failUnlessIn("ERROR: UnrecoverableFileError(no recoverable versions)",
2989 self.failUnless(len(error_msg) > 2, error_msg_s) # some traceback
2990 units = [simplejson.loads(line) for line in lines[:first_error]]
2991 self.failUnlessEqual(len(units), 6) # includes subdir
2992 last_unit = units[-1]
2993 self.failUnlessEqual(last_unit["path"], ["subdir"])
2994 d.addCallback(_check_broken_manifest)
2996 d.addCallback(self.CHECK, "root", "t=stream-deep-check")
2997 def _check_broken_deepcheck(res):
2998 lines = res.splitlines()
3000 for (i,line) in enumerate(lines)
3001 if line.startswith("ERROR:")]
3003 self.fail("no ERROR: in output: %s" % (res,))
3004 first_error = error_lines[0]
3005 error_line = lines[first_error]
3006 error_msg = lines[first_error+1:]
3007 error_msg_s = "\n".join(error_msg) + "\n"
3008 self.failUnlessIn("ERROR: UnrecoverableFileError(no recoverable versions)",
3010 self.failUnless(len(error_msg) > 2, error_msg_s) # some traceback
3011 units = [simplejson.loads(line) for line in lines[:first_error]]
3012 self.failUnlessEqual(len(units), 6) # includes subdir
3013 last_unit = units[-1]
3014 self.failUnlessEqual(last_unit["path"], ["subdir"])
3015 r = last_unit["check-results"]["results"]
3016 self.failUnlessEqual(r["count-recoverable-versions"], 0)
3017 self.failUnlessEqual(r["count-shares-good"], 1)
3018 self.failUnlessEqual(r["recoverable"], False)
3019 d.addCallback(_check_broken_deepcheck)
3021 d.addErrback(self.explain_web_error)
3024 def test_deep_check_and_repair(self):
3025 self.basedir = "web/Grid/deep_check_and_repair"
3027 c0 = self.g.clients[0]
3031 d = c0.create_empty_dirnode()
3032 def _stash_root_and_create_file(n):
3034 self.fileurls["root"] = "uri/" + urllib.quote(n.get_uri()) + "/"
3035 return n.add_file(u"good", upload.Data(DATA, convergence=""))
3036 d.addCallback(_stash_root_and_create_file)
3037 def _stash_uri(fn, which):
3038 self.uris[which] = fn.get_uri()
3039 d.addCallback(_stash_uri, "good")
3040 d.addCallback(lambda ign:
3041 self.rootnode.add_file(u"small",
3042 upload.Data("literal",
3044 d.addCallback(_stash_uri, "small")
3045 d.addCallback(lambda ign:
3046 self.rootnode.add_file(u"sick",
3047 upload.Data(DATA+"1",
3049 d.addCallback(_stash_uri, "sick")
3050 #d.addCallback(lambda ign:
3051 # self.rootnode.add_file(u"dead",
3052 # upload.Data(DATA+"2",
3054 #d.addCallback(_stash_uri, "dead")
3056 #d.addCallback(lambda ign: c0.create_mutable_file("mutable"))
3057 #d.addCallback(lambda fn: self.rootnode.set_node(u"corrupt", fn))
3058 #d.addCallback(_stash_uri, "corrupt")
3060 def _clobber_shares(ignored):
3061 good_shares = self.find_shares(self.uris["good"])
3062 self.failUnlessEqual(len(good_shares), 10)
3063 sick_shares = self.find_shares(self.uris["sick"])
3064 os.unlink(sick_shares[0][2])
3065 #dead_shares = self.find_shares(self.uris["dead"])
3066 #for i in range(1, 10):
3067 # os.unlink(dead_shares[i][2])
3069 #c_shares = self.find_shares(self.uris["corrupt"])
3070 #cso = CorruptShareOptions()
3071 #cso.stdout = StringIO()
3072 #cso.parseOptions([c_shares[0][2]])
3074 d.addCallback(_clobber_shares)
3077 # root/good CHK, 10 shares
3079 # root/sick CHK, 9 shares
3081 d.addCallback(self.CHECK, "root", "t=stream-deep-check&repair=true")
3083 units = [simplejson.loads(line)
3084 for line in res.splitlines()
3086 self.failUnlessEqual(len(units), 4+1)
3087 # should be parent-first
3089 self.failUnlessEqual(u0["path"], [])
3090 self.failUnlessEqual(u0["type"], "directory")
3091 self.failUnlessEqual(u0["cap"], self.rootnode.get_uri())
3092 u0crr = u0["check-and-repair-results"]
3093 self.failUnlessEqual(u0crr["repair-attempted"], False)
3094 self.failUnlessEqual(u0crr["pre-repair-results"]["results"]["count-shares-good"], 10)
3096 ugood = [u for u in units
3097 if u["type"] == "file" and u["path"] == [u"good"]][0]
3098 self.failUnlessEqual(ugood["cap"], self.uris["good"])
3099 ugoodcrr = ugood["check-and-repair-results"]
3100 self.failUnlessEqual(u0crr["repair-attempted"], False)
3101 self.failUnlessEqual(u0crr["pre-repair-results"]["results"]["count-shares-good"], 10)
3103 usick = [u for u in units
3104 if u["type"] == "file" and u["path"] == [u"sick"]][0]
3105 self.failUnlessEqual(usick["cap"], self.uris["sick"])
3106 usickcrr = usick["check-and-repair-results"]
3107 self.failUnlessEqual(usickcrr["repair-attempted"], True)
3108 self.failUnlessEqual(usickcrr["repair-successful"], True)
3109 self.failUnlessEqual(usickcrr["pre-repair-results"]["results"]["count-shares-good"], 9)
3110 self.failUnlessEqual(usickcrr["post-repair-results"]["results"]["count-shares-good"], 10)
3113 self.failUnlessEqual(stats["type"], "stats")
3115 self.failUnlessEqual(s["count-immutable-files"], 2)
3116 self.failUnlessEqual(s["count-literal-files"], 1)
3117 self.failUnlessEqual(s["count-directories"], 1)
3118 d.addCallback(_done)
3120 d.addErrback(self.explain_web_error)
3123 def _count_leases(self, ignored, which):
3124 u = self.uris[which]
3125 shares = self.find_shares(u)
3127 for shnum, serverid, fn in shares:
3128 sf = get_share_file(fn)
3129 num_leases = len(list(sf.get_leases()))
3130 lease_counts.append( (fn, num_leases) )
3133 def _assert_leasecount(self, lease_counts, expected):
3134 for (fn, num_leases) in lease_counts:
3135 if num_leases != expected:
3136 self.fail("expected %d leases, have %d, on %s" %
3137 (expected, num_leases, fn))
3139 def test_add_lease(self):
3140 self.basedir = "web/Grid/add_lease"
3141 self.set_up_grid(num_clients=2)
3142 c0 = self.g.clients[0]
3145 d = c0.upload(upload.Data(DATA, convergence=""))
3146 def _stash_uri(ur, which):
3147 self.uris[which] = ur.uri
3148 d.addCallback(_stash_uri, "one")
3149 d.addCallback(lambda ign:
3150 c0.upload(upload.Data(DATA+"1", convergence="")))
3151 d.addCallback(_stash_uri, "two")
3152 def _stash_mutable_uri(n, which):
3153 self.uris[which] = n.get_uri()
3154 assert isinstance(self.uris[which], str)
3155 d.addCallback(lambda ign: c0.create_mutable_file(DATA+"2"))
3156 d.addCallback(_stash_mutable_uri, "mutable")
3158 def _compute_fileurls(ignored):
3160 for which in self.uris:
3161 self.fileurls[which] = "uri/" + urllib.quote(self.uris[which])
3162 d.addCallback(_compute_fileurls)
3164 d.addCallback(self._count_leases, "one")
3165 d.addCallback(self._assert_leasecount, 1)
3166 d.addCallback(self._count_leases, "two")
3167 d.addCallback(self._assert_leasecount, 1)
3168 d.addCallback(self._count_leases, "mutable")
3169 d.addCallback(self._assert_leasecount, 1)
3171 d.addCallback(self.CHECK, "one", "t=check") # no add-lease
3172 def _got_html_good(res):
3173 self.failUnless("Healthy" in res, res)
3174 self.failIf("Not Healthy" in res, res)
3175 d.addCallback(_got_html_good)
3177 d.addCallback(self._count_leases, "one")
3178 d.addCallback(self._assert_leasecount, 1)
3179 d.addCallback(self._count_leases, "two")
3180 d.addCallback(self._assert_leasecount, 1)
3181 d.addCallback(self._count_leases, "mutable")
3182 d.addCallback(self._assert_leasecount, 1)
3184 # this CHECK uses the original client, which uses the same
3185 # lease-secrets, so it will just renew the original lease
3186 d.addCallback(self.CHECK, "one", "t=check&add-lease=true")
3187 d.addCallback(_got_html_good)
3189 d.addCallback(self._count_leases, "one")
3190 d.addCallback(self._assert_leasecount, 1)
3191 d.addCallback(self._count_leases, "two")
3192 d.addCallback(self._assert_leasecount, 1)
3193 d.addCallback(self._count_leases, "mutable")
3194 d.addCallback(self._assert_leasecount, 1)
3196 # this CHECK uses an alternate client, which adds a second lease
3197 d.addCallback(self.CHECK, "one", "t=check&add-lease=true", clientnum=1)
3198 d.addCallback(_got_html_good)
3200 d.addCallback(self._count_leases, "one")
3201 d.addCallback(self._assert_leasecount, 2)
3202 d.addCallback(self._count_leases, "two")
3203 d.addCallback(self._assert_leasecount, 1)
3204 d.addCallback(self._count_leases, "mutable")
3205 d.addCallback(self._assert_leasecount, 1)
3207 d.addCallback(self.CHECK, "mutable", "t=check&add-lease=true")
3208 d.addCallback(_got_html_good)
3210 d.addCallback(self._count_leases, "one")
3211 d.addCallback(self._assert_leasecount, 2)
3212 d.addCallback(self._count_leases, "two")
3213 d.addCallback(self._assert_leasecount, 1)
3214 d.addCallback(self._count_leases, "mutable")
3215 d.addCallback(self._assert_leasecount, 1)
3217 d.addCallback(self.CHECK, "mutable", "t=check&add-lease=true",
3219 d.addCallback(_got_html_good)
3221 d.addCallback(self._count_leases, "one")
3222 d.addCallback(self._assert_leasecount, 2)
3223 d.addCallback(self._count_leases, "two")
3224 d.addCallback(self._assert_leasecount, 1)
3225 d.addCallback(self._count_leases, "mutable")
3226 d.addCallback(self._assert_leasecount, 2)
3228 d.addErrback(self.explain_web_error)
3231 def test_deep_add_lease(self):
3232 self.basedir = "web/Grid/deep_add_lease"
3233 self.set_up_grid(num_clients=2)
3234 c0 = self.g.clients[0]
3238 d = c0.create_empty_dirnode()
3239 def _stash_root_and_create_file(n):
3241 self.uris["root"] = n.get_uri()
3242 self.fileurls["root"] = "uri/" + urllib.quote(n.get_uri()) + "/"
3243 return n.add_file(u"one", upload.Data(DATA, convergence=""))
3244 d.addCallback(_stash_root_and_create_file)
3245 def _stash_uri(fn, which):
3246 self.uris[which] = fn.get_uri()
3247 d.addCallback(_stash_uri, "one")
3248 d.addCallback(lambda ign:
3249 self.rootnode.add_file(u"small",
3250 upload.Data("literal",
3252 d.addCallback(_stash_uri, "small")
3254 d.addCallback(lambda ign: c0.create_mutable_file("mutable"))
3255 d.addCallback(lambda fn: self.rootnode.set_node(u"mutable", fn))
3256 d.addCallback(_stash_uri, "mutable")
3258 d.addCallback(self.CHECK, "root", "t=stream-deep-check") # no add-lease
3260 units = [simplejson.loads(line)
3261 for line in res.splitlines()
3263 # root, one, small, mutable, stats
3264 self.failUnlessEqual(len(units), 4+1)
3265 d.addCallback(_done)
3267 d.addCallback(self._count_leases, "root")
3268 d.addCallback(self._assert_leasecount, 1)
3269 d.addCallback(self._count_leases, "one")
3270 d.addCallback(self._assert_leasecount, 1)
3271 d.addCallback(self._count_leases, "mutable")
3272 d.addCallback(self._assert_leasecount, 1)
3274 d.addCallback(self.CHECK, "root", "t=stream-deep-check&add-lease=true")
3275 d.addCallback(_done)
3277 d.addCallback(self._count_leases, "root")
3278 d.addCallback(self._assert_leasecount, 1)
3279 d.addCallback(self._count_leases, "one")
3280 d.addCallback(self._assert_leasecount, 1)
3281 d.addCallback(self._count_leases, "mutable")
3282 d.addCallback(self._assert_leasecount, 1)
3284 d.addCallback(self.CHECK, "root", "t=stream-deep-check&add-lease=true",
3286 d.addCallback(_done)
3288 d.addCallback(self._count_leases, "root")
3289 d.addCallback(self._assert_leasecount, 2)
3290 d.addCallback(self._count_leases, "one")
3291 d.addCallback(self._assert_leasecount, 2)
3292 d.addCallback(self._count_leases, "mutable")
3293 d.addCallback(self._assert_leasecount, 2)
3295 d.addErrback(self.explain_web_error)
3299 def test_exceptions(self):
3300 self.basedir = "web/Grid/exceptions"
3301 self.set_up_grid(num_clients=1, num_servers=2)
3302 c0 = self.g.clients[0]
3305 d = c0.create_empty_dirnode()
3307 self.fileurls["root"] = "uri/" + urllib.quote(n.get_uri()) + "/"
3308 self.fileurls["imaginary"] = self.fileurls["root"] + "imaginary"
3310 d.addCallback(_stash_root)
3311 d.addCallback(lambda ign: c0.upload(upload.Data(DATA, convergence="")))
3313 self.fileurls["1share"] = "uri/" + urllib.quote(ur.uri)
3314 self.delete_shares_numbered(ur.uri, range(1,10))
3316 u = uri.from_string(ur.uri)
3317 u.key = testutil.flip_bit(u.key, 0)
3318 baduri = u.to_string()
3319 self.fileurls["0shares"] = "uri/" + urllib.quote(baduri)
3320 d.addCallback(_stash_bad)
3321 d.addCallback(lambda ign: c0.create_empty_dirnode())
3322 def _mangle_dirnode_1share(n):
3324 url = self.fileurls["dir-1share"] = "uri/" + urllib.quote(u) + "/"
3325 self.fileurls["dir-1share-json"] = url + "?t=json"
3326 self.delete_shares_numbered(u, range(1,10))
3327 d.addCallback(_mangle_dirnode_1share)
3328 d.addCallback(lambda ign: c0.create_empty_dirnode())
3329 def _mangle_dirnode_0share(n):
3331 url = self.fileurls["dir-0share"] = "uri/" + urllib.quote(u) + "/"
3332 self.fileurls["dir-0share-json"] = url + "?t=json"
3333 self.delete_shares_numbered(u, range(0,10))
3334 d.addCallback(_mangle_dirnode_0share)
3336 # NotEnoughSharesError should be reported sensibly, with a
3337 # text/plain explanation of the problem, and perhaps some
3338 # information on which shares *could* be found.
3340 d.addCallback(lambda ignored:
3341 self.shouldHTTPError("GET unrecoverable",
3342 410, "Gone", "NoSharesError",
3343 self.GET, self.fileurls["0shares"]))
3344 def _check_zero_shares(body):
3345 self.failIf("<html>" in body, body)
3346 body = " ".join(body.strip().split())
3347 exp = ("NoSharesError: no shares could be found. "
3348 "Zero shares usually indicates a corrupt URI, or that "
3349 "no servers were connected, but it might also indicate "
3350 "severe corruption. You should perform a filecheck on "
3351 "this object to learn more. The full error message is: "
3352 "Failed to get enough shareholders: have 0, need 3")
3353 self.failUnlessEqual(exp, body)
3354 d.addCallback(_check_zero_shares)
3357 d.addCallback(lambda ignored:
3358 self.shouldHTTPError("GET 1share",
3359 410, "Gone", "NotEnoughSharesError",
3360 self.GET, self.fileurls["1share"]))
3361 def _check_one_share(body):
3362 self.failIf("<html>" in body, body)
3363 body = " ".join(body.strip().split())
3364 exp = ("NotEnoughSharesError: This indicates that some "
3365 "servers were unavailable, or that shares have been "
3366 "lost to server departure, hard drive failure, or disk "
3367 "corruption. You should perform a filecheck on "
3368 "this object to learn more. The full error message is:"
3369 " Failed to get enough shareholders: have 1, need 3")
3370 self.failUnlessEqual(exp, body)
3371 d.addCallback(_check_one_share)
3373 d.addCallback(lambda ignored:
3374 self.shouldHTTPError("GET imaginary",
3375 404, "Not Found", None,
3376 self.GET, self.fileurls["imaginary"]))
3377 def _missing_child(body):
3378 self.failUnless("No such child: imaginary" in body, body)
3379 d.addCallback(_missing_child)
3381 d.addCallback(lambda ignored: self.GET(self.fileurls["dir-0share"]))
3382 def _check_0shares_dir_html(body):
3383 self.failUnless("<html>" in body, body)
3384 # we should see the regular page, but without the child table or
3386 body = " ".join(body.strip().split())
3387 self.failUnlessIn('href="?t=info">More info on this directory',
3389 exp = ("UnrecoverableFileError: the directory (or mutable file) "
3390 "could not be retrieved, because there were insufficient "
3391 "good shares. This might indicate that no servers were "
3392 "connected, insufficient servers were connected, the URI "
3393 "was corrupt, or that shares have been lost due to server "
3394 "departure, hard drive failure, or disk corruption. You "
3395 "should perform a filecheck on this object to learn more.")
3396 self.failUnlessIn(exp, body)
3397 self.failUnlessIn("No upload forms: directory is unreadable", body)
3398 d.addCallback(_check_0shares_dir_html)
3400 d.addCallback(lambda ignored: self.GET(self.fileurls["dir-1share"]))
3401 def _check_1shares_dir_html(body):
3402 # at some point, we'll split UnrecoverableFileError into 0-shares
3403 # and some-shares like we did for immutable files (since there
3404 # are different sorts of advice to offer in each case). For now,
3405 # they present the same way.
3406 self.failUnless("<html>" in body, body)
3407 body = " ".join(body.strip().split())
3408 self.failUnlessIn('href="?t=info">More info on this directory',
3410 exp = ("UnrecoverableFileError: the directory (or mutable file) "
3411 "could not be retrieved, because there were insufficient "
3412 "good shares. This might indicate that no servers were "
3413 "connected, insufficient servers were connected, the URI "
3414 "was corrupt, or that shares have been lost due to server "
3415 "departure, hard drive failure, or disk corruption. You "
3416 "should perform a filecheck on this object to learn more.")
3417 self.failUnlessIn(exp, body)
3418 self.failUnlessIn("No upload forms: directory is unreadable", body)
3419 d.addCallback(_check_1shares_dir_html)
3421 d.addCallback(lambda ignored:
3422 self.shouldHTTPError("GET dir-0share-json",
3423 410, "Gone", "UnrecoverableFileError",
3425 self.fileurls["dir-0share-json"]))
3426 def _check_unrecoverable_file(body):
3427 self.failIf("<html>" in body, body)
3428 body = " ".join(body.strip().split())
3429 exp = ("UnrecoverableFileError: the directory (or mutable file) "
3430 "could not be retrieved, because there were insufficient "
3431 "good shares. This might indicate that no servers were "
3432 "connected, insufficient servers were connected, the URI "
3433 "was corrupt, or that shares have been lost due to server "
3434 "departure, hard drive failure, or disk corruption. You "
3435 "should perform a filecheck on this object to learn more.")
3436 self.failUnlessEqual(exp, body)
3437 d.addCallback(_check_unrecoverable_file)
3439 d.addCallback(lambda ignored:
3440 self.shouldHTTPError("GET dir-1share-json",
3441 410, "Gone", "UnrecoverableFileError",
3443 self.fileurls["dir-1share-json"]))
3444 d.addCallback(_check_unrecoverable_file)
3446 d.addCallback(lambda ignored:
3447 self.shouldHTTPError("GET imaginary",
3448 404, "Not Found", None,
3449 self.GET, self.fileurls["imaginary"]))
3451 # attach a webapi child that throws a random error, to test how it
3453 w = c0.getServiceNamed("webish")
3454 w.root.putChild("ERRORBOOM", ErrorBoom())
3456 d.addCallback(lambda ignored:
3457 self.shouldHTTPError("GET errorboom_html",
3458 500, "Internal Server Error", None,
3459 self.GET, "ERRORBOOM"))
3460 def _internal_error_html(body):
3461 # test that a weird exception during a webapi operation with
3462 # Accept:*/* results in a text/html stack trace, while one
3463 # without that Accept: line gets us a text/plain stack trace
3464 self.failUnless("<html>" in body, "expected HTML, not '%s'" % body)
3465 d.addCallback(_internal_error_html)
3467 d.addCallback(lambda ignored:
3468 self.shouldHTTPError("GET errorboom_text",
3469 500, "Internal Server Error", None,
3470 self.GET, "ERRORBOOM",
3471 headers={"accept": ["text/plain"]}))
3472 def _internal_error_text(body):
3473 # test that a weird exception during a webapi operation with
3474 # Accept:*/* results in a text/html stack trace, while one
3475 # without that Accept: line gets us a text/plain stack trace
3476 self.failIf("<html>" in body, body)
3477 self.failUnless(body.startswith("Traceback "), body)
3478 d.addCallback(_internal_error_text)
3480 def _flush_errors(res):
3481 # Trial: please ignore the CompletelyUnhandledError in the logs
3482 self.flushLoggedErrors(CompletelyUnhandledError)
3484 d.addBoth(_flush_errors)
3488 class CompletelyUnhandledError(Exception):
3490 class ErrorBoom(rend.Page):
3491 def beforeRender(self, ctx):
3492 raise CompletelyUnhandledError("whoops")