1 import os.path, re, urllib
3 from StringIO import StringIO
4 from twisted.application import service
5 from twisted.trial import unittest
6 from twisted.internet import defer, reactor
7 from twisted.web import client, error, http
8 from twisted.python import failure, log
10 from allmydata import interfaces, uri, webish
11 from allmydata.storage.shares import get_share_file
12 from allmydata.immutable import upload, download
13 from allmydata.web import status, common
14 from allmydata.scripts.debug import CorruptShareOptions, corrupt_share
15 from allmydata.util import fileutil, base32
16 from allmydata.util.assertutil import precondition
17 from allmydata.test.common import FakeDirectoryNode, FakeCHKFileNode, \
18 FakeMutableFileNode, create_chk_filenode, WebErrorMixin, ShouldFailMixin
19 from allmydata.interfaces import IURI, INewDirectoryURI, \
20 IReadonlyNewDirectoryURI, IFileURI, IMutableFileURI, IMutableFileNode
21 from allmydata.mutable import servermap, publish, retrieve
22 import common_util as testutil
23 from allmydata.test.no_network import GridTestMixin
25 from allmydata.test.common_web import HTTPClientGETFactory, \
28 # create a fake uploader/downloader, and a couple of fake dirnodes, then
29 # create a webserver that works against them
31 class FakeIntroducerClient:
32 def get_all_connectors(self):
34 def get_all_connections_for(self, service_name):
36 def get_all_peerids(self):
39 class FakeStatsProvider:
41 stats = {'stats': {}, 'counters': {}}
44 class FakeClient(service.MultiService):
45 nodeid = "fake_nodeid"
46 nickname = "fake_nickname"
47 basedir = "fake_basedir"
48 def get_versions(self):
49 return {'allmydata': "fake",
54 introducer_furl = "None"
55 introducer_client = FakeIntroducerClient()
56 _all_upload_status = [upload.UploadStatus()]
57 _all_download_status = [download.DownloadStatus()]
58 _all_mapupdate_statuses = [servermap.UpdateStatus()]
59 _all_publish_statuses = [publish.PublishStatus()]
60 _all_retrieve_statuses = [retrieve.RetrieveStatus()]
61 convergence = "some random string"
62 stats_provider = FakeStatsProvider()
64 def connected_to_introducer(self):
67 def get_nickname_for_peerid(self, peerid):
70 def get_permuted_peers(self, service_name, key):
73 def create_node_from_uri(self, auri):
74 precondition(isinstance(auri, str), auri)
75 u = uri.from_string(auri)
76 if (INewDirectoryURI.providedBy(u)
77 or IReadonlyNewDirectoryURI.providedBy(u)):
78 return FakeDirectoryNode(self).init_from_uri(u)
79 if IFileURI.providedBy(u):
80 return FakeCHKFileNode(u, self)
81 assert IMutableFileURI.providedBy(u), u
82 return FakeMutableFileNode(self).init_from_uri(u)
84 def create_empty_dirnode(self):
85 n = FakeDirectoryNode(self)
87 d.addCallback(lambda res: n)
90 MUTABLE_SIZELIMIT = FakeMutableFileNode.MUTABLE_SIZELIMIT
91 def create_mutable_file(self, contents=""):
92 n = FakeMutableFileNode(self)
93 return n.create(contents)
95 def upload(self, uploadable):
96 d = uploadable.get_size()
97 d.addCallback(lambda size: uploadable.read(size))
100 n = create_chk_filenode(self, data)
101 results = upload.UploadResults()
102 results.uri = n.get_uri()
104 d.addCallback(_got_data)
107 def list_all_upload_statuses(self):
108 return self._all_upload_status
109 def list_all_download_statuses(self):
110 return self._all_download_status
111 def list_all_mapupdate_statuses(self):
112 return self._all_mapupdate_statuses
113 def list_all_publish_statuses(self):
114 return self._all_publish_statuses
115 def list_all_retrieve_statuses(self):
116 return self._all_retrieve_statuses
117 def list_all_helper_statuses(self):
120 class WebMixin(object):
122 self.s = FakeClient()
123 self.s.startService()
124 self.staticdir = self.mktemp()
125 self.ws = s = webish.WebishServer(self.s, "0", staticdir=self.staticdir)
126 s.setServiceParent(self.s)
127 self.webish_port = port = s.listener._port.getHost().port
128 self.webish_url = "http://localhost:%d" % port
130 l = [ self.s.create_empty_dirnode() for x in range(6) ]
131 d = defer.DeferredList(l)
133 self.public_root = res[0][1]
134 assert interfaces.IDirectoryNode.providedBy(self.public_root), res
135 self.public_url = "/uri/" + self.public_root.get_uri()
136 self.private_root = res[1][1]
140 self._foo_uri = foo.get_uri()
141 self._foo_readonly_uri = foo.get_readonly_uri()
142 self._foo_verifycap = foo.get_verify_cap().to_string()
143 # NOTE: we ignore the deferred on all set_uri() calls, because we
144 # know the fake nodes do these synchronously
145 self.public_root.set_uri(u"foo", foo.get_uri())
147 self.BAR_CONTENTS, n, self._bar_txt_uri = self.makefile(0)
148 foo.set_uri(u"bar.txt", self._bar_txt_uri)
149 self._bar_txt_verifycap = n.get_verify_cap().to_string()
151 foo.set_uri(u"empty", res[3][1].get_uri())
152 sub_uri = res[4][1].get_uri()
153 self._sub_uri = sub_uri
154 foo.set_uri(u"sub", sub_uri)
155 sub = self.s.create_node_from_uri(sub_uri)
157 _ign, n, blocking_uri = self.makefile(1)
158 foo.set_uri(u"blockingfile", blocking_uri)
160 unicode_filename = u"n\u00fc.txt" # n u-umlaut . t x t
161 # ok, unicode calls it LATIN SMALL LETTER U WITH DIAERESIS but I
162 # still think of it as an umlaut
163 foo.set_uri(unicode_filename, self._bar_txt_uri)
165 _ign, n, baz_file = self.makefile(2)
166 self._baz_file_uri = baz_file
167 sub.set_uri(u"baz.txt", baz_file)
169 _ign, n, self._bad_file_uri = self.makefile(3)
170 # this uri should not be downloadable
171 del FakeCHKFileNode.all_contents[self._bad_file_uri]
174 self.public_root.set_uri(u"reedownlee", rodir.get_readonly_uri())
175 rodir.set_uri(u"nor", baz_file)
180 # public/foo/blockingfile
183 # public/foo/sub/baz.txt
185 # public/reedownlee/nor
186 self.NEWFILE_CONTENTS = "newfile contents\n"
188 return foo.get_metadata_for(u"bar.txt")
190 def _got_metadata(metadata):
191 self._bar_txt_metadata = metadata
192 d.addCallback(_got_metadata)
195 def makefile(self, number):
196 contents = "contents of file %s\n" % number
197 n = create_chk_filenode(self.s, contents)
198 return contents, n, n.get_uri()
201 return self.s.stopService()
203 def failUnlessIsBarDotTxt(self, res):
204 self.failUnlessEqual(res, self.BAR_CONTENTS, res)
206 def failUnlessIsBarJSON(self, res):
207 data = simplejson.loads(res)
208 self.failUnless(isinstance(data, list))
209 self.failUnlessEqual(data[0], u"filenode")
210 self.failUnless(isinstance(data[1], dict))
211 self.failIf(data[1]["mutable"])
212 self.failIf("rw_uri" in data[1]) # immutable
213 self.failUnlessEqual(data[1]["ro_uri"], self._bar_txt_uri)
214 self.failUnlessEqual(data[1]["verify_uri"], self._bar_txt_verifycap)
215 self.failUnlessEqual(data[1]["size"], len(self.BAR_CONTENTS))
217 def failUnlessIsFooJSON(self, res):
218 data = simplejson.loads(res)
219 self.failUnless(isinstance(data, list))
220 self.failUnlessEqual(data[0], "dirnode", res)
221 self.failUnless(isinstance(data[1], dict))
222 self.failUnless(data[1]["mutable"])
223 self.failUnless("rw_uri" in data[1]) # mutable
224 self.failUnlessEqual(data[1]["rw_uri"], self._foo_uri)
225 self.failUnlessEqual(data[1]["ro_uri"], self._foo_readonly_uri)
226 self.failUnlessEqual(data[1]["verify_uri"], self._foo_verifycap)
228 kidnames = sorted([unicode(n) for n in data[1]["children"]])
229 self.failUnlessEqual(kidnames,
230 [u"bar.txt", u"blockingfile", u"empty",
231 u"n\u00fc.txt", u"sub"])
232 kids = dict( [(unicode(name),value)
234 in data[1]["children"].iteritems()] )
235 self.failUnlessEqual(kids[u"sub"][0], "dirnode")
236 self.failUnless("metadata" in kids[u"sub"][1])
237 self.failUnless("ctime" in kids[u"sub"][1]["metadata"])
238 self.failUnless("mtime" in kids[u"sub"][1]["metadata"])
239 self.failUnlessEqual(kids[u"bar.txt"][0], "filenode")
240 self.failUnlessEqual(kids[u"bar.txt"][1]["size"], len(self.BAR_CONTENTS))
241 self.failUnlessEqual(kids[u"bar.txt"][1]["ro_uri"], self._bar_txt_uri)
242 self.failUnlessEqual(kids[u"bar.txt"][1]["verify_uri"],
243 self._bar_txt_verifycap)
244 self.failUnlessEqual(kids[u"bar.txt"][1]["metadata"]["ctime"],
245 self._bar_txt_metadata["ctime"])
246 self.failUnlessEqual(kids[u"n\u00fc.txt"][1]["ro_uri"],
249 def GET(self, urlpath, followRedirect=False, return_response=False,
251 # if return_response=True, this fires with (data, statuscode,
252 # respheaders) instead of just data.
253 assert not isinstance(urlpath, unicode)
254 url = self.webish_url + urlpath
255 factory = HTTPClientGETFactory(url, method="GET",
256 followRedirect=followRedirect, **kwargs)
257 reactor.connectTCP("localhost", self.webish_port, factory)
260 return (data, factory.status, factory.response_headers)
262 d.addCallback(_got_data)
263 return factory.deferred
265 def HEAD(self, urlpath, return_response=False, **kwargs):
266 # this requires some surgery, because twisted.web.client doesn't want
267 # to give us back the response headers.
268 factory = HTTPClientHEADFactory(urlpath, method="HEAD", **kwargs)
269 reactor.connectTCP("localhost", self.webish_port, factory)
272 return (data, factory.status, factory.response_headers)
274 d.addCallback(_got_data)
275 return factory.deferred
277 def PUT(self, urlpath, data, **kwargs):
278 url = self.webish_url + urlpath
279 return client.getPage(url, method="PUT", postdata=data, **kwargs)
281 def DELETE(self, urlpath):
282 url = self.webish_url + urlpath
283 return client.getPage(url, method="DELETE")
285 def POST(self, urlpath, followRedirect=False, **fields):
286 url = self.webish_url + urlpath
287 sepbase = "boogabooga"
291 form.append('Content-Disposition: form-data; name="_charset"')
295 for name, value in fields.iteritems():
296 if isinstance(value, tuple):
297 filename, value = value
298 form.append('Content-Disposition: form-data; name="%s"; '
299 'filename="%s"' % (name, filename.encode("utf-8")))
301 form.append('Content-Disposition: form-data; name="%s"' % name)
303 if isinstance(value, unicode):
304 value = value.encode("utf-8")
307 assert isinstance(value, str)
311 body = "\r\n".join(form) + "\r\n"
312 headers = {"content-type": "multipart/form-data; boundary=%s" % sepbase,
314 return client.getPage(url, method="POST", postdata=body,
315 headers=headers, followRedirect=followRedirect)
317 def shouldFail(self, res, expected_failure, which,
318 substring=None, response_substring=None):
319 if isinstance(res, failure.Failure):
320 res.trap(expected_failure)
322 self.failUnless(substring in str(res),
323 "substring '%s' not in '%s'"
324 % (substring, str(res)))
325 if response_substring:
326 self.failUnless(response_substring in res.value.response,
327 "response substring '%s' not in '%s'"
328 % (response_substring, res.value.response))
330 self.fail("%s was supposed to raise %s, not get '%s'" %
331 (which, expected_failure, res))
333 def shouldFail2(self, expected_failure, which, substring,
335 callable, *args, **kwargs):
336 assert substring is None or isinstance(substring, str)
337 assert response_substring is None or isinstance(response_substring, str)
338 d = defer.maybeDeferred(callable, *args, **kwargs)
340 if isinstance(res, failure.Failure):
341 res.trap(expected_failure)
343 self.failUnless(substring in str(res),
344 "%s: substring '%s' not in '%s'"
345 % (which, substring, str(res)))
346 if response_substring:
347 self.failUnless(response_substring in res.value.response,
348 "%s: response substring '%s' not in '%s'"
350 response_substring, res.value.response))
352 self.fail("%s was supposed to raise %s, not get '%s'" %
353 (which, expected_failure, res))
357 def should404(self, res, which):
358 if isinstance(res, failure.Failure):
359 res.trap(error.Error)
360 self.failUnlessEqual(res.value.status, "404")
362 self.fail("%s was supposed to Error(404), not get '%s'" %
366 class Web(WebMixin, WebErrorMixin, testutil.StallMixin, unittest.TestCase):
367 def test_create(self):
370 def test_welcome(self):
373 self.failUnless('Welcome To AllMyData' in res)
374 self.failUnless('Tahoe' in res)
376 self.s.basedir = 'web/test_welcome'
377 fileutil.make_dirs("web/test_welcome")
378 fileutil.make_dirs("web/test_welcome/private")
380 d.addCallback(_check)
383 def test_provisioning(self):
384 d = self.GET("/provisioning/")
386 self.failUnless('Tahoe Provisioning Tool' in res)
387 fields = {'filled': True,
388 "num_users": int(50e3),
389 "files_per_user": 1000,
390 "space_per_user": int(1e9),
391 "sharing_ratio": 1.0,
392 "encoding_parameters": "3-of-10-5",
394 "ownership_mode": "A",
395 "download_rate": 100,
400 return self.POST("/provisioning/", **fields)
402 d.addCallback(_check)
404 self.failUnless('Tahoe Provisioning Tool' in res)
405 self.failUnless("Share space consumed: 167.01TB" in res)
407 fields = {'filled': True,
408 "num_users": int(50e6),
409 "files_per_user": 1000,
410 "space_per_user": int(5e9),
411 "sharing_ratio": 1.0,
412 "encoding_parameters": "25-of-100-50",
413 "num_servers": 30000,
414 "ownership_mode": "E",
415 "drive_failure_model": "U",
417 "download_rate": 1000,
422 return self.POST("/provisioning/", **fields)
423 d.addCallback(_check2)
425 self.failUnless("Share space consumed: huge!" in res)
426 fields = {'filled': True}
427 return self.POST("/provisioning/", **fields)
428 d.addCallback(_check3)
430 self.failUnless("Share space consumed:" in res)
431 d.addCallback(_check4)
434 def test_reliability_tool(self):
436 from allmydata import reliability
437 _hush_pyflakes = reliability
439 raise unittest.SkipTest("reliability tool requires NumPy")
441 d = self.GET("/reliability/")
443 self.failUnless('Tahoe Reliability Tool' in res)
444 fields = {'drive_lifetime': "8Y",
449 "check_period": "1M",
450 "report_period": "3M",
453 return self.POST("/reliability/", **fields)
455 d.addCallback(_check)
457 self.failUnless('Tahoe Reliability Tool' in res)
458 r = r'Probability of loss \(no maintenance\):\s+<span>0.033591'
459 self.failUnless(re.search(r, res), res)
460 d.addCallback(_check2)
463 def test_status(self):
464 dl_num = self.s.list_all_download_statuses()[0].get_counter()
465 ul_num = self.s.list_all_upload_statuses()[0].get_counter()
466 mu_num = self.s.list_all_mapupdate_statuses()[0].get_counter()
467 pub_num = self.s.list_all_publish_statuses()[0].get_counter()
468 ret_num = self.s.list_all_retrieve_statuses()[0].get_counter()
469 d = self.GET("/status", followRedirect=True)
471 self.failUnless('Upload and Download Status' in res, res)
472 self.failUnless('"down-%d"' % dl_num in res, res)
473 self.failUnless('"up-%d"' % ul_num in res, res)
474 self.failUnless('"mapupdate-%d"' % mu_num in res, res)
475 self.failUnless('"publish-%d"' % pub_num in res, res)
476 self.failUnless('"retrieve-%d"' % ret_num in res, res)
477 d.addCallback(_check)
478 d.addCallback(lambda res: self.GET("/status/?t=json"))
479 def _check_json(res):
480 data = simplejson.loads(res)
481 self.failUnless(isinstance(data, dict))
482 active = data["active"]
483 # TODO: test more. We need a way to fake an active operation
485 d.addCallback(_check_json)
487 d.addCallback(lambda res: self.GET("/status/down-%d" % dl_num))
489 self.failUnless("File Download Status" in res, res)
490 d.addCallback(_check_dl)
491 d.addCallback(lambda res: self.GET("/status/up-%d" % ul_num))
493 self.failUnless("File Upload Status" in res, res)
494 d.addCallback(_check_ul)
495 d.addCallback(lambda res: self.GET("/status/mapupdate-%d" % mu_num))
496 def _check_mapupdate(res):
497 self.failUnless("Mutable File Servermap Update Status" in res, res)
498 d.addCallback(_check_mapupdate)
499 d.addCallback(lambda res: self.GET("/status/publish-%d" % pub_num))
500 def _check_publish(res):
501 self.failUnless("Mutable File Publish Status" in res, res)
502 d.addCallback(_check_publish)
503 d.addCallback(lambda res: self.GET("/status/retrieve-%d" % ret_num))
504 def _check_retrieve(res):
505 self.failUnless("Mutable File Retrieve Status" in res, res)
506 d.addCallback(_check_retrieve)
510 def test_status_numbers(self):
511 drrm = status.DownloadResultsRendererMixin()
512 self.failUnlessEqual(drrm.render_time(None, None), "")
513 self.failUnlessEqual(drrm.render_time(None, 2.5), "2.50s")
514 self.failUnlessEqual(drrm.render_time(None, 0.25), "250ms")
515 self.failUnlessEqual(drrm.render_time(None, 0.0021), "2.1ms")
516 self.failUnlessEqual(drrm.render_time(None, 0.000123), "123us")
517 self.failUnlessEqual(drrm.render_rate(None, None), "")
518 self.failUnlessEqual(drrm.render_rate(None, 2500000), "2.50MBps")
519 self.failUnlessEqual(drrm.render_rate(None, 30100), "30.1kBps")
520 self.failUnlessEqual(drrm.render_rate(None, 123), "123Bps")
522 urrm = status.UploadResultsRendererMixin()
523 self.failUnlessEqual(urrm.render_time(None, None), "")
524 self.failUnlessEqual(urrm.render_time(None, 2.5), "2.50s")
525 self.failUnlessEqual(urrm.render_time(None, 0.25), "250ms")
526 self.failUnlessEqual(urrm.render_time(None, 0.0021), "2.1ms")
527 self.failUnlessEqual(urrm.render_time(None, 0.000123), "123us")
528 self.failUnlessEqual(urrm.render_rate(None, None), "")
529 self.failUnlessEqual(urrm.render_rate(None, 2500000), "2.50MBps")
530 self.failUnlessEqual(urrm.render_rate(None, 30100), "30.1kBps")
531 self.failUnlessEqual(urrm.render_rate(None, 123), "123Bps")
533 def test_GET_FILEURL(self):
534 d = self.GET(self.public_url + "/foo/bar.txt")
535 d.addCallback(self.failUnlessIsBarDotTxt)
538 def test_GET_FILEURL_range(self):
539 headers = {"range": "bytes=1-10"}
540 d = self.GET(self.public_url + "/foo/bar.txt", headers=headers,
541 return_response=True)
542 def _got((res, status, headers)):
543 self.failUnlessEqual(int(status), 206)
544 self.failUnless(headers.has_key("content-range"))
545 self.failUnlessEqual(headers["content-range"][0],
546 "bytes 1-10/%d" % len(self.BAR_CONTENTS))
547 self.failUnlessEqual(res, self.BAR_CONTENTS[1:11])
551 def test_GET_FILEURL_partial_range(self):
552 headers = {"range": "bytes=5-"}
553 length = len(self.BAR_CONTENTS)
554 d = self.GET(self.public_url + "/foo/bar.txt", headers=headers,
555 return_response=True)
556 def _got((res, status, headers)):
557 self.failUnlessEqual(int(status), 206)
558 self.failUnless(headers.has_key("content-range"))
559 self.failUnlessEqual(headers["content-range"][0],
560 "bytes 5-%d/%d" % (length-1, length))
561 self.failUnlessEqual(res, self.BAR_CONTENTS[5:])
565 def test_HEAD_FILEURL_range(self):
566 headers = {"range": "bytes=1-10"}
567 d = self.HEAD(self.public_url + "/foo/bar.txt", headers=headers,
568 return_response=True)
569 def _got((res, status, headers)):
570 self.failUnlessEqual(res, "")
571 self.failUnlessEqual(int(status), 206)
572 self.failUnless(headers.has_key("content-range"))
573 self.failUnlessEqual(headers["content-range"][0],
574 "bytes 1-10/%d" % len(self.BAR_CONTENTS))
578 def test_HEAD_FILEURL_partial_range(self):
579 headers = {"range": "bytes=5-"}
580 length = len(self.BAR_CONTENTS)
581 d = self.HEAD(self.public_url + "/foo/bar.txt", headers=headers,
582 return_response=True)
583 def _got((res, status, headers)):
584 self.failUnlessEqual(int(status), 206)
585 self.failUnless(headers.has_key("content-range"))
586 self.failUnlessEqual(headers["content-range"][0],
587 "bytes 5-%d/%d" % (length-1, length))
591 def test_GET_FILEURL_range_bad(self):
592 headers = {"range": "BOGUS=fizbop-quarnak"}
593 d = self.shouldFail2(error.Error, "test_GET_FILEURL_range_bad",
595 "Syntactically invalid http range header",
596 self.GET, self.public_url + "/foo/bar.txt",
600 def test_HEAD_FILEURL(self):
601 d = self.HEAD(self.public_url + "/foo/bar.txt", return_response=True)
602 def _got((res, status, headers)):
603 self.failUnlessEqual(res, "")
604 self.failUnlessEqual(headers["content-length"][0],
605 str(len(self.BAR_CONTENTS)))
606 self.failUnlessEqual(headers["content-type"], ["text/plain"])
610 def test_GET_FILEURL_named(self):
611 base = "/file/%s" % urllib.quote(self._bar_txt_uri)
612 base2 = "/named/%s" % urllib.quote(self._bar_txt_uri)
613 d = self.GET(base + "/@@name=/blah.txt")
614 d.addCallback(self.failUnlessIsBarDotTxt)
615 d.addCallback(lambda res: self.GET(base + "/blah.txt"))
616 d.addCallback(self.failUnlessIsBarDotTxt)
617 d.addCallback(lambda res: self.GET(base + "/ignore/lots/blah.txt"))
618 d.addCallback(self.failUnlessIsBarDotTxt)
619 d.addCallback(lambda res: self.GET(base2 + "/@@name=/blah.txt"))
620 d.addCallback(self.failUnlessIsBarDotTxt)
621 save_url = base + "?save=true&filename=blah.txt"
622 d.addCallback(lambda res: self.GET(save_url))
623 d.addCallback(self.failUnlessIsBarDotTxt) # TODO: check headers
624 u_filename = u"n\u00e9wer.txt" # n e-acute w e r . t x t
625 u_fn_e = urllib.quote(u_filename.encode("utf-8"))
626 u_url = base + "?save=true&filename=" + u_fn_e
627 d.addCallback(lambda res: self.GET(u_url))
628 d.addCallback(self.failUnlessIsBarDotTxt) # TODO: check headers
631 def test_PUT_FILEURL_named_bad(self):
632 base = "/file/%s" % urllib.quote(self._bar_txt_uri)
633 d = self.shouldFail2(error.Error, "test_PUT_FILEURL_named_bad",
635 "/file can only be used with GET or HEAD",
636 self.PUT, base + "/@@name=/blah.txt", "")
639 def test_GET_DIRURL_named_bad(self):
640 base = "/file/%s" % urllib.quote(self._foo_uri)
641 d = self.shouldFail2(error.Error, "test_PUT_DIRURL_named_bad",
644 self.GET, base + "/@@name=/blah.txt")
647 def test_GET_slash_file_bad(self):
648 d = self.shouldFail2(error.Error, "test_GET_slash_file_bad",
650 "/file must be followed by a file-cap and a name",
654 def test_GET_unhandled_URI_named(self):
655 contents, n, newuri = self.makefile(12)
656 verifier_cap = n.get_verify_cap().to_string()
657 base = "/file/%s" % urllib.quote(verifier_cap)
658 # client.create_node_from_uri() can't handle verify-caps
659 d = self.shouldFail2(error.Error, "GET_unhandled_URI_named",
661 "is not a valid file- or directory- cap",
665 def test_GET_unhandled_URI(self):
666 contents, n, newuri = self.makefile(12)
667 verifier_cap = n.get_verify_cap().to_string()
668 base = "/uri/%s" % urllib.quote(verifier_cap)
669 # client.create_node_from_uri() can't handle verify-caps
670 d = self.shouldFail2(error.Error, "test_GET_unhandled_URI",
672 "is not a valid file- or directory- cap",
676 def test_GET_FILE_URI(self):
677 base = "/uri/%s" % urllib.quote(self._bar_txt_uri)
679 d.addCallback(self.failUnlessIsBarDotTxt)
682 def test_GET_FILE_URI_badchild(self):
683 base = "/uri/%s/boguschild" % urllib.quote(self._bar_txt_uri)
684 errmsg = "Files have no children, certainly not named 'boguschild'"
685 d = self.shouldFail2(error.Error, "test_GET_FILE_URI_badchild",
686 "400 Bad Request", errmsg,
690 def test_PUT_FILE_URI_badchild(self):
691 base = "/uri/%s/boguschild" % urllib.quote(self._bar_txt_uri)
692 errmsg = "Cannot create directory 'boguschild', because its parent is a file, not a directory"
693 d = self.shouldFail2(error.Error, "test_GET_FILE_URI_badchild",
694 "400 Bad Request", errmsg,
698 def test_GET_FILEURL_save(self):
699 d = self.GET(self.public_url + "/foo/bar.txt?filename=bar.txt&save=true")
700 # TODO: look at the headers, expect a Content-Disposition: attachment
702 d.addCallback(self.failUnlessIsBarDotTxt)
705 def test_GET_FILEURL_missing(self):
706 d = self.GET(self.public_url + "/foo/missing")
707 d.addBoth(self.should404, "test_GET_FILEURL_missing")
710 def test_PUT_NEWFILEURL(self):
711 d = self.PUT(self.public_url + "/foo/new.txt", self.NEWFILE_CONTENTS)
712 # TODO: we lose the response code, so we can't check this
713 #self.failUnlessEqual(responsecode, 201)
714 d.addCallback(self.failUnlessURIMatchesChild, self._foo_node, u"new.txt")
715 d.addCallback(lambda res:
716 self.failUnlessChildContentsAre(self._foo_node, u"new.txt",
717 self.NEWFILE_CONTENTS))
720 def test_PUT_NEWFILEURL_range_bad(self):
721 headers = {"content-range": "bytes 1-10/%d" % len(self.NEWFILE_CONTENTS)}
722 target = self.public_url + "/foo/new.txt"
723 d = self.shouldFail2(error.Error, "test_PUT_NEWFILEURL_range_bad",
724 "501 Not Implemented",
725 "Content-Range in PUT not yet supported",
726 # (and certainly not for immutable files)
727 self.PUT, target, self.NEWFILE_CONTENTS[1:11],
729 d.addCallback(lambda res:
730 self.failIfNodeHasChild(self._foo_node, u"new.txt"))
733 def test_PUT_NEWFILEURL_mutable(self):
734 d = self.PUT(self.public_url + "/foo/new.txt?mutable=true",
735 self.NEWFILE_CONTENTS)
736 # TODO: we lose the response code, so we can't check this
737 #self.failUnlessEqual(responsecode, 201)
739 u = uri.from_string_mutable_filenode(res)
740 self.failUnless(u.is_mutable())
741 self.failIf(u.is_readonly())
743 d.addCallback(_check_uri)
744 d.addCallback(self.failUnlessURIMatchesChild, self._foo_node, u"new.txt")
745 d.addCallback(lambda res:
746 self.failUnlessMutableChildContentsAre(self._foo_node,
748 self.NEWFILE_CONTENTS))
751 def test_PUT_NEWFILEURL_mutable_toobig(self):
752 d = self.shouldFail2(error.Error, "test_PUT_NEWFILEURL_mutable_toobig",
753 "413 Request Entity Too Large",
754 "SDMF is limited to one segment, and 10001 > 10000",
756 self.public_url + "/foo/new.txt?mutable=true",
757 "b" * (self.s.MUTABLE_SIZELIMIT+1))
760 def test_PUT_NEWFILEURL_replace(self):
761 d = self.PUT(self.public_url + "/foo/bar.txt", self.NEWFILE_CONTENTS)
762 # TODO: we lose the response code, so we can't check this
763 #self.failUnlessEqual(responsecode, 200)
764 d.addCallback(self.failUnlessURIMatchesChild, self._foo_node, u"bar.txt")
765 d.addCallback(lambda res:
766 self.failUnlessChildContentsAre(self._foo_node, u"bar.txt",
767 self.NEWFILE_CONTENTS))
770 def test_PUT_NEWFILEURL_bad_t(self):
771 d = self.shouldFail2(error.Error, "PUT_bad_t", "400 Bad Request",
772 "PUT to a file: bad t=bogus",
773 self.PUT, self.public_url + "/foo/bar.txt?t=bogus",
777 def test_PUT_NEWFILEURL_no_replace(self):
778 d = self.PUT(self.public_url + "/foo/bar.txt?replace=false",
779 self.NEWFILE_CONTENTS)
780 d.addBoth(self.shouldFail, error.Error, "PUT_NEWFILEURL_no_replace",
782 "There was already a child by that name, and you asked me "
786 def test_PUT_NEWFILEURL_mkdirs(self):
787 d = self.PUT(self.public_url + "/foo/newdir/new.txt", self.NEWFILE_CONTENTS)
789 d.addCallback(self.failUnlessURIMatchesChild, fn, u"newdir/new.txt")
790 d.addCallback(lambda res: self.failIfNodeHasChild(fn, u"new.txt"))
791 d.addCallback(lambda res: self.failUnlessNodeHasChild(fn, u"newdir"))
792 d.addCallback(lambda res:
793 self.failUnlessChildContentsAre(fn, u"newdir/new.txt",
794 self.NEWFILE_CONTENTS))
797 def test_PUT_NEWFILEURL_blocked(self):
798 d = self.PUT(self.public_url + "/foo/blockingfile/new.txt",
799 self.NEWFILE_CONTENTS)
800 d.addBoth(self.shouldFail, error.Error, "PUT_NEWFILEURL_blocked",
802 "Unable to create directory 'blockingfile': a file was in the way")
805 def test_DELETE_FILEURL(self):
806 d = self.DELETE(self.public_url + "/foo/bar.txt")
807 d.addCallback(lambda res:
808 self.failIfNodeHasChild(self._foo_node, u"bar.txt"))
811 def test_DELETE_FILEURL_missing(self):
812 d = self.DELETE(self.public_url + "/foo/missing")
813 d.addBoth(self.should404, "test_DELETE_FILEURL_missing")
816 def test_DELETE_FILEURL_missing2(self):
817 d = self.DELETE(self.public_url + "/missing/missing")
818 d.addBoth(self.should404, "test_DELETE_FILEURL_missing2")
821 def test_GET_FILEURL_json(self):
822 # twisted.web.http.parse_qs ignores any query args without an '=', so
823 # I can't do "GET /path?json", I have to do "GET /path/t=json"
824 # instead. This may make it tricky to emulate the S3 interface
826 d = self.GET(self.public_url + "/foo/bar.txt?t=json")
827 d.addCallback(self.failUnlessIsBarJSON)
830 def test_GET_FILEURL_json_missing(self):
831 d = self.GET(self.public_url + "/foo/missing?json")
832 d.addBoth(self.should404, "test_GET_FILEURL_json_missing")
835 def test_GET_FILEURL_uri(self):
836 d = self.GET(self.public_url + "/foo/bar.txt?t=uri")
838 self.failUnlessEqual(res, self._bar_txt_uri)
839 d.addCallback(_check)
840 d.addCallback(lambda res:
841 self.GET(self.public_url + "/foo/bar.txt?t=readonly-uri"))
843 # for now, for files, uris and readonly-uris are the same
844 self.failUnlessEqual(res, self._bar_txt_uri)
845 d.addCallback(_check2)
848 def test_GET_FILEURL_badtype(self):
849 d = self.shouldHTTPError("GET t=bogus", 400, "Bad Request",
852 self.public_url + "/foo/bar.txt?t=bogus")
855 def test_GET_FILEURL_uri_missing(self):
856 d = self.GET(self.public_url + "/foo/missing?t=uri")
857 d.addBoth(self.should404, "test_GET_FILEURL_uri_missing")
860 def test_GET_DIRURL(self):
861 # the addSlash means we get a redirect here
862 # from /uri/$URI/foo/ , we need ../../../ to get back to the root
864 d = self.GET(self.public_url + "/foo", followRedirect=True)
866 self.failUnless(('<a href="%s">Return to Welcome page' % ROOT)
868 # the FILE reference points to a URI, but it should end in bar.txt
869 bar_url = ("%s/file/%s/@@named=/bar.txt" %
870 (ROOT, urllib.quote(self._bar_txt_uri)))
871 get_bar = "".join([r'<td>',
872 r'<a href="%s">bar.txt</a>' % bar_url,
875 r'\s+<td>%d</td>' % len(self.BAR_CONTENTS),
877 self.failUnless(re.search(get_bar, res), res)
878 for line in res.split("\n"):
879 # find the line that contains the delete button for bar.txt
880 if ("form action" in line and
881 'value="delete"' in line and
882 'value="bar.txt"' in line):
883 # the form target should use a relative URL
884 foo_url = urllib.quote("%s/uri/%s/" % (ROOT, self._foo_uri))
885 self.failUnless(('action="%s"' % foo_url) in line, line)
886 # and the when_done= should too
887 #done_url = urllib.quote(???)
888 #self.failUnless(('name="when_done" value="%s"' % done_url)
892 self.fail("unable to find delete-bar.txt line", res)
894 # the DIR reference just points to a URI
895 sub_url = ("%s/uri/%s/" % (ROOT, urllib.quote(self._sub_uri)))
896 get_sub = ((r'<td><a href="%s">sub</a></td>' % sub_url)
897 + r'\s+<td>DIR</td>')
898 self.failUnless(re.search(get_sub, res), res)
899 d.addCallback(_check)
901 # look at a directory which is readonly
902 d.addCallback(lambda res:
903 self.GET(self.public_url + "/reedownlee", followRedirect=True))
905 self.failUnless("(readonly)" in res, res)
906 self.failIf("Upload a file" in res, res)
907 d.addCallback(_check2)
909 # and at a directory that contains a readonly directory
910 d.addCallback(lambda res:
911 self.GET(self.public_url, followRedirect=True))
913 self.failUnless(re.search(r'<td><a href="[\.\/]+/uri/URI%3ADIR2-RO%3A[^"]+">reedownlee</a>'
914 '</td>\s+<td>DIR-RO</td>', res))
915 d.addCallback(_check3)
917 # and an empty directory
918 d.addCallback(lambda res: self.GET(self.public_url + "/foo/empty/"))
920 self.failUnless("directory is empty" in res, res)
921 MKDIR_BUTTON_RE=re.compile('<input type="hidden" name="t" value="mkdir" />.*<legend class="freeform-form-label">Create a new directory</legend>.*<input type="submit" value="Create" />', re.I)
922 self.failUnless(MKDIR_BUTTON_RE.search(res), res)
923 d.addCallback(_check4)
927 def test_GET_DIRURL_badtype(self):
928 d = self.shouldHTTPError("test_GET_DIRURL_badtype",
932 self.public_url + "/foo?t=bogus")
935 def test_GET_DIRURL_json(self):
936 d = self.GET(self.public_url + "/foo?t=json")
937 d.addCallback(self.failUnlessIsFooJSON)
941 def test_POST_DIRURL_manifest_no_ophandle(self):
942 d = self.shouldFail2(error.Error,
943 "test_POST_DIRURL_manifest_no_ophandle",
945 "slow operation requires ophandle=",
946 self.POST, self.public_url, t="start-manifest")
949 def test_POST_DIRURL_manifest(self):
950 d = defer.succeed(None)
951 def getman(ignored, output):
952 d = self.POST(self.public_url + "/foo/?t=start-manifest&ophandle=125",
954 d.addCallback(self.wait_for_operation, "125")
955 d.addCallback(self.get_operation_results, "125", output)
957 d.addCallback(getman, None)
958 def _got_html(manifest):
959 self.failUnless("Manifest of SI=" in manifest)
960 self.failUnless("<td>sub</td>" in manifest)
961 self.failUnless(self._sub_uri in manifest)
962 self.failUnless("<td>sub/baz.txt</td>" in manifest)
963 d.addCallback(_got_html)
965 # both t=status and unadorned GET should be identical
966 d.addCallback(lambda res: self.GET("/operations/125"))
967 d.addCallback(_got_html)
969 d.addCallback(getman, "html")
970 d.addCallback(_got_html)
971 d.addCallback(getman, "text")
972 def _got_text(manifest):
973 self.failUnless("\nsub " + self._sub_uri + "\n" in manifest)
974 self.failUnless("\nsub/baz.txt URI:CHK:" in manifest)
975 d.addCallback(_got_text)
976 d.addCallback(getman, "JSON")
978 data = res["manifest"]
980 for (path_list, cap) in data:
981 got[tuple(path_list)] = cap
982 self.failUnlessEqual(got[(u"sub",)], self._sub_uri)
983 self.failUnless((u"sub",u"baz.txt") in got)
984 self.failUnless("finished" in res)
985 self.failUnless("origin" in res)
986 self.failUnless("storage-index" in res)
987 self.failUnless("verifycaps" in res)
988 self.failUnless("stats" in res)
989 d.addCallback(_got_json)
992 def test_POST_DIRURL_deepsize_no_ophandle(self):
993 d = self.shouldFail2(error.Error,
994 "test_POST_DIRURL_deepsize_no_ophandle",
996 "slow operation requires ophandle=",
997 self.POST, self.public_url, t="start-deep-size")
1000 def test_POST_DIRURL_deepsize(self):
1001 d = self.POST(self.public_url + "/foo/?t=start-deep-size&ophandle=126",
1002 followRedirect=True)
1003 d.addCallback(self.wait_for_operation, "126")
1004 d.addCallback(self.get_operation_results, "126", "json")
1005 def _got_json(data):
1006 self.failUnlessEqual(data["finished"], True)
1008 self.failUnless(size > 1000)
1009 d.addCallback(_got_json)
1010 d.addCallback(self.get_operation_results, "126", "text")
1012 mo = re.search(r'^size: (\d+)$', res, re.M)
1013 self.failUnless(mo, res)
1014 size = int(mo.group(1))
1015 # with directories, the size varies.
1016 self.failUnless(size > 1000)
1017 d.addCallback(_got_text)
1020 def test_POST_DIRURL_deepstats_no_ophandle(self):
1021 d = self.shouldFail2(error.Error,
1022 "test_POST_DIRURL_deepstats_no_ophandle",
1024 "slow operation requires ophandle=",
1025 self.POST, self.public_url, t="start-deep-stats")
1028 def test_POST_DIRURL_deepstats(self):
1029 d = self.POST(self.public_url + "/foo/?t=start-deep-stats&ophandle=127",
1030 followRedirect=True)
1031 d.addCallback(self.wait_for_operation, "127")
1032 d.addCallback(self.get_operation_results, "127", "json")
1033 def _got_json(stats):
1034 expected = {"count-immutable-files": 3,
1035 "count-mutable-files": 0,
1036 "count-literal-files": 0,
1038 "count-directories": 3,
1039 "size-immutable-files": 57,
1040 "size-literal-files": 0,
1041 #"size-directories": 1912, # varies
1042 #"largest-directory": 1590,
1043 "largest-directory-children": 5,
1044 "largest-immutable-file": 19,
1046 for k,v in expected.iteritems():
1047 self.failUnlessEqual(stats[k], v,
1048 "stats[%s] was %s, not %s" %
1050 self.failUnlessEqual(stats["size-files-histogram"],
1052 d.addCallback(_got_json)
1055 def test_POST_DIRURL_stream_manifest(self):
1056 d = self.POST(self.public_url + "/foo/?t=stream-manifest")
1058 self.failUnless(res.endswith("\n"))
1059 units = [simplejson.loads(t) for t in res[:-1].split("\n")]
1060 self.failUnlessEqual(len(units), 7)
1061 self.failUnlessEqual(units[-1]["type"], "stats")
1063 self.failUnlessEqual(first["path"], [])
1064 self.failUnlessEqual(first["cap"], self._foo_uri)
1065 self.failUnlessEqual(first["type"], "directory")
1066 baz = [u for u in units[:-1] if u["cap"] == self._baz_file_uri][0]
1067 self.failUnlessEqual(baz["path"], ["sub", "baz.txt"])
1068 self.failIfEqual(baz["storage-index"], None)
1069 self.failIfEqual(baz["verifycap"], None)
1070 self.failIfEqual(baz["repaircap"], None)
1072 d.addCallback(_check)
1075 def test_GET_DIRURL_uri(self):
1076 d = self.GET(self.public_url + "/foo?t=uri")
1078 self.failUnlessEqual(res, self._foo_uri)
1079 d.addCallback(_check)
1082 def test_GET_DIRURL_readonly_uri(self):
1083 d = self.GET(self.public_url + "/foo?t=readonly-uri")
1085 self.failUnlessEqual(res, self._foo_readonly_uri)
1086 d.addCallback(_check)
1089 def test_PUT_NEWDIRURL(self):
1090 d = self.PUT(self.public_url + "/foo/newdir?t=mkdir", "")
1091 d.addCallback(lambda res:
1092 self.failUnlessNodeHasChild(self._foo_node, u"newdir"))
1093 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
1094 d.addCallback(self.failUnlessNodeKeysAre, [])
1097 def test_PUT_NEWDIRURL_exists(self):
1098 d = self.PUT(self.public_url + "/foo/sub?t=mkdir", "")
1099 d.addCallback(lambda res:
1100 self.failUnlessNodeHasChild(self._foo_node, u"sub"))
1101 d.addCallback(lambda res: self._foo_node.get(u"sub"))
1102 d.addCallback(self.failUnlessNodeKeysAre, [u"baz.txt"])
1105 def test_PUT_NEWDIRURL_blocked(self):
1106 d = self.shouldFail2(error.Error, "PUT_NEWDIRURL_blocked",
1107 "409 Conflict", "Unable to create directory 'bar.txt': a file was in the way",
1109 self.public_url + "/foo/bar.txt/sub?t=mkdir", "")
1110 d.addCallback(lambda res:
1111 self.failUnlessNodeHasChild(self._foo_node, u"sub"))
1112 d.addCallback(lambda res: self._foo_node.get(u"sub"))
1113 d.addCallback(self.failUnlessNodeKeysAre, [u"baz.txt"])
1116 def test_PUT_NEWDIRURL_mkdir_p(self):
1117 d = defer.succeed(None)
1118 d.addCallback(lambda res: self.POST(self.public_url + "/foo", t='mkdir', name='mkp'))
1119 d.addCallback(lambda res: self.failUnlessNodeHasChild(self._foo_node, u"mkp"))
1120 d.addCallback(lambda res: self._foo_node.get(u"mkp"))
1121 def mkdir_p(mkpnode):
1122 url = '/uri/%s?t=mkdir-p&path=/sub1/sub2' % urllib.quote(mkpnode.get_uri())
1124 def made_subsub(ssuri):
1125 d = self._foo_node.get_child_at_path(u"mkp/sub1/sub2")
1126 d.addCallback(lambda ssnode: self.failUnlessEqual(ssnode.get_uri(), ssuri))
1128 d.addCallback(lambda uri2: self.failUnlessEqual(uri2, ssuri))
1130 d.addCallback(made_subsub)
1132 d.addCallback(mkdir_p)
1135 def test_PUT_NEWDIRURL_mkdirs(self):
1136 d = self.PUT(self.public_url + "/foo/subdir/newdir?t=mkdir", "")
1137 d.addCallback(lambda res:
1138 self.failIfNodeHasChild(self._foo_node, u"newdir"))
1139 d.addCallback(lambda res:
1140 self.failUnlessNodeHasChild(self._foo_node, u"subdir"))
1141 d.addCallback(lambda res:
1142 self._foo_node.get_child_at_path(u"subdir/newdir"))
1143 d.addCallback(self.failUnlessNodeKeysAre, [])
1146 def test_DELETE_DIRURL(self):
1147 d = self.DELETE(self.public_url + "/foo")
1148 d.addCallback(lambda res:
1149 self.failIfNodeHasChild(self.public_root, u"foo"))
1152 def test_DELETE_DIRURL_missing(self):
1153 d = self.DELETE(self.public_url + "/foo/missing")
1154 d.addBoth(self.should404, "test_DELETE_DIRURL_missing")
1155 d.addCallback(lambda res:
1156 self.failUnlessNodeHasChild(self.public_root, u"foo"))
1159 def test_DELETE_DIRURL_missing2(self):
1160 d = self.DELETE(self.public_url + "/missing")
1161 d.addBoth(self.should404, "test_DELETE_DIRURL_missing2")
1164 def dump_root(self):
1166 w = webish.DirnodeWalkerMixin()
1167 def visitor(childpath, childnode, metadata):
1169 d = w.walk(self.public_root, visitor)
1172 def failUnlessNodeKeysAre(self, node, expected_keys):
1173 for k in expected_keys:
1174 assert isinstance(k, unicode)
1176 def _check(children):
1177 self.failUnlessEqual(sorted(children.keys()), sorted(expected_keys))
1178 d.addCallback(_check)
1180 def failUnlessNodeHasChild(self, node, name):
1181 assert isinstance(name, unicode)
1183 def _check(children):
1184 self.failUnless(name in children)
1185 d.addCallback(_check)
1187 def failIfNodeHasChild(self, node, name):
1188 assert isinstance(name, unicode)
1190 def _check(children):
1191 self.failIf(name in children)
1192 d.addCallback(_check)
1195 def failUnlessChildContentsAre(self, node, name, expected_contents):
1196 assert isinstance(name, unicode)
1197 d = node.get_child_at_path(name)
1198 d.addCallback(lambda node: node.download_to_data())
1199 def _check(contents):
1200 self.failUnlessEqual(contents, expected_contents)
1201 d.addCallback(_check)
1204 def failUnlessMutableChildContentsAre(self, node, name, expected_contents):
1205 assert isinstance(name, unicode)
1206 d = node.get_child_at_path(name)
1207 d.addCallback(lambda node: node.download_best_version())
1208 def _check(contents):
1209 self.failUnlessEqual(contents, expected_contents)
1210 d.addCallback(_check)
1213 def failUnlessChildURIIs(self, node, name, expected_uri):
1214 assert isinstance(name, unicode)
1215 d = node.get_child_at_path(name)
1217 self.failUnlessEqual(child.get_uri(), expected_uri.strip())
1218 d.addCallback(_check)
1221 def failUnlessURIMatchesChild(self, got_uri, node, name):
1222 assert isinstance(name, unicode)
1223 d = node.get_child_at_path(name)
1225 self.failUnlessEqual(got_uri.strip(), child.get_uri())
1226 d.addCallback(_check)
1229 def failUnlessCHKURIHasContents(self, got_uri, contents):
1230 self.failUnless(FakeCHKFileNode.all_contents[got_uri] == contents)
1232 def test_POST_upload(self):
1233 d = self.POST(self.public_url + "/foo", t="upload",
1234 file=("new.txt", self.NEWFILE_CONTENTS))
1236 d.addCallback(self.failUnlessURIMatchesChild, fn, u"new.txt")
1237 d.addCallback(lambda res:
1238 self.failUnlessChildContentsAre(fn, u"new.txt",
1239 self.NEWFILE_CONTENTS))
1242 def test_POST_upload_unicode(self):
1243 filename = u"n\u00e9wer.txt" # n e-acute w e r . t x t
1244 d = self.POST(self.public_url + "/foo", t="upload",
1245 file=(filename, self.NEWFILE_CONTENTS))
1247 d.addCallback(self.failUnlessURIMatchesChild, fn, filename)
1248 d.addCallback(lambda res:
1249 self.failUnlessChildContentsAre(fn, filename,
1250 self.NEWFILE_CONTENTS))
1251 target_url = self.public_url + "/foo/" + filename.encode("utf-8")
1252 d.addCallback(lambda res: self.GET(target_url))
1253 d.addCallback(lambda contents: self.failUnlessEqual(contents,
1254 self.NEWFILE_CONTENTS,
1258 def test_POST_upload_unicode_named(self):
1259 filename = u"n\u00e9wer.txt" # n e-acute w e r . t x t
1260 d = self.POST(self.public_url + "/foo", t="upload",
1262 file=("overridden", self.NEWFILE_CONTENTS))
1264 d.addCallback(self.failUnlessURIMatchesChild, fn, filename)
1265 d.addCallback(lambda res:
1266 self.failUnlessChildContentsAre(fn, filename,
1267 self.NEWFILE_CONTENTS))
1268 target_url = self.public_url + "/foo/" + filename.encode("utf-8")
1269 d.addCallback(lambda res: self.GET(target_url))
1270 d.addCallback(lambda contents: self.failUnlessEqual(contents,
1271 self.NEWFILE_CONTENTS,
1275 def test_POST_upload_no_link(self):
1276 d = self.POST("/uri", t="upload",
1277 file=("new.txt", self.NEWFILE_CONTENTS))
1278 def _check_upload_results(page):
1279 # this should be a page which describes the results of the upload
1280 # that just finished.
1281 self.failUnless("Upload Results:" in page)
1282 self.failUnless("URI:" in page)
1283 uri_re = re.compile("URI: <tt><span>(.*)</span>")
1284 mo = uri_re.search(page)
1285 self.failUnless(mo, page)
1286 new_uri = mo.group(1)
1288 d.addCallback(_check_upload_results)
1289 d.addCallback(self.failUnlessCHKURIHasContents, self.NEWFILE_CONTENTS)
1292 def test_POST_upload_no_link_whendone(self):
1293 d = self.POST("/uri", t="upload", when_done="/",
1294 file=("new.txt", self.NEWFILE_CONTENTS))
1295 d.addBoth(self.shouldRedirect, "/")
1298 def shouldRedirect2(self, which, checker, callable, *args, **kwargs):
1299 d = defer.maybeDeferred(callable, *args, **kwargs)
1301 if isinstance(res, failure.Failure):
1302 res.trap(error.PageRedirect)
1303 statuscode = res.value.status
1304 target = res.value.location
1305 return checker(statuscode, target)
1306 self.fail("%s: callable was supposed to redirect, not return '%s'"
1311 def test_POST_upload_no_link_whendone_results(self):
1312 def check(statuscode, target):
1313 self.failUnlessEqual(statuscode, str(http.FOUND))
1314 self.failUnless(target.startswith(self.webish_url), target)
1315 return client.getPage(target, method="GET")
1316 d = self.shouldRedirect2("test_POST_upload_no_link_whendone_results",
1318 self.POST, "/uri", t="upload",
1319 when_done="/uri/%(uri)s",
1320 file=("new.txt", self.NEWFILE_CONTENTS))
1321 d.addCallback(lambda res:
1322 self.failUnlessEqual(res, self.NEWFILE_CONTENTS))
1325 def test_POST_upload_no_link_mutable(self):
1326 d = self.POST("/uri", t="upload", mutable="true",
1327 file=("new.txt", self.NEWFILE_CONTENTS))
1328 def _check(new_uri):
1329 new_uri = new_uri.strip()
1330 self.new_uri = new_uri
1332 self.failUnless(IMutableFileURI.providedBy(u))
1333 self.failUnless(u.storage_index in FakeMutableFileNode.all_contents)
1334 n = self.s.create_node_from_uri(new_uri)
1335 return n.download_best_version()
1336 d.addCallback(_check)
1338 self.failUnlessEqual(data, self.NEWFILE_CONTENTS)
1339 return self.GET("/uri/%s" % urllib.quote(self.new_uri))
1340 d.addCallback(_check2)
1342 self.failUnlessEqual(data, self.NEWFILE_CONTENTS)
1343 return self.GET("/file/%s" % urllib.quote(self.new_uri))
1344 d.addCallback(_check3)
1346 self.failUnlessEqual(data, self.NEWFILE_CONTENTS)
1347 d.addCallback(_check4)
1350 def test_POST_upload_no_link_mutable_toobig(self):
1351 d = self.shouldFail2(error.Error,
1352 "test_POST_upload_no_link_mutable_toobig",
1353 "413 Request Entity Too Large",
1354 "SDMF is limited to one segment, and 10001 > 10000",
1356 "/uri", t="upload", mutable="true",
1358 "b" * (self.s.MUTABLE_SIZELIMIT+1)) )
1361 def test_POST_upload_mutable(self):
1362 # this creates a mutable file
1363 d = self.POST(self.public_url + "/foo", t="upload", mutable="true",
1364 file=("new.txt", self.NEWFILE_CONTENTS))
1366 d.addCallback(self.failUnlessURIMatchesChild, fn, u"new.txt")
1367 d.addCallback(lambda res:
1368 self.failUnlessMutableChildContentsAre(fn, u"new.txt",
1369 self.NEWFILE_CONTENTS))
1370 d.addCallback(lambda res: self._foo_node.get(u"new.txt"))
1372 self.failUnless(IMutableFileNode.providedBy(newnode))
1373 self.failUnless(newnode.is_mutable())
1374 self.failIf(newnode.is_readonly())
1375 self._mutable_node = newnode
1376 self._mutable_uri = newnode.get_uri()
1379 # now upload it again and make sure that the URI doesn't change
1380 NEWER_CONTENTS = self.NEWFILE_CONTENTS + "newer\n"
1381 d.addCallback(lambda res:
1382 self.POST(self.public_url + "/foo", t="upload",
1384 file=("new.txt", NEWER_CONTENTS)))
1385 d.addCallback(self.failUnlessURIMatchesChild, fn, u"new.txt")
1386 d.addCallback(lambda res:
1387 self.failUnlessMutableChildContentsAre(fn, u"new.txt",
1389 d.addCallback(lambda res: self._foo_node.get(u"new.txt"))
1391 self.failUnless(IMutableFileNode.providedBy(newnode))
1392 self.failUnless(newnode.is_mutable())
1393 self.failIf(newnode.is_readonly())
1394 self.failUnlessEqual(self._mutable_uri, newnode.get_uri())
1395 d.addCallback(_got2)
1397 # upload a second time, using PUT instead of POST
1398 NEW2_CONTENTS = NEWER_CONTENTS + "overwrite with PUT\n"
1399 d.addCallback(lambda res:
1400 self.PUT(self.public_url + "/foo/new.txt", NEW2_CONTENTS))
1401 d.addCallback(self.failUnlessURIMatchesChild, fn, u"new.txt")
1402 d.addCallback(lambda res:
1403 self.failUnlessMutableChildContentsAre(fn, u"new.txt",
1406 # finally list the directory, since mutable files are displayed
1407 # slightly differently
1409 d.addCallback(lambda res:
1410 self.GET(self.public_url + "/foo/",
1411 followRedirect=True))
1412 def _check_page(res):
1413 # TODO: assert more about the contents
1414 self.failUnless("SSK" in res)
1416 d.addCallback(_check_page)
1418 d.addCallback(lambda res: self._foo_node.get(u"new.txt"))
1420 self.failUnless(IMutableFileNode.providedBy(newnode))
1421 self.failUnless(newnode.is_mutable())
1422 self.failIf(newnode.is_readonly())
1423 self.failUnlessEqual(self._mutable_uri, newnode.get_uri())
1424 d.addCallback(_got3)
1426 # look at the JSON form of the enclosing directory
1427 d.addCallback(lambda res:
1428 self.GET(self.public_url + "/foo/?t=json",
1429 followRedirect=True))
1430 def _check_page_json(res):
1431 parsed = simplejson.loads(res)
1432 self.failUnlessEqual(parsed[0], "dirnode")
1433 children = dict( [(unicode(name),value)
1435 in parsed[1]["children"].iteritems()] )
1436 self.failUnless("new.txt" in children)
1437 new_json = children["new.txt"]
1438 self.failUnlessEqual(new_json[0], "filenode")
1439 self.failUnless(new_json[1]["mutable"])
1440 self.failUnlessEqual(new_json[1]["rw_uri"], self._mutable_uri)
1441 ro_uri = unicode(self._mutable_node.get_readonly().to_string())
1442 self.failUnlessEqual(new_json[1]["ro_uri"], ro_uri)
1443 d.addCallback(_check_page_json)
1445 # and the JSON form of the file
1446 d.addCallback(lambda res:
1447 self.GET(self.public_url + "/foo/new.txt?t=json"))
1448 def _check_file_json(res):
1449 parsed = simplejson.loads(res)
1450 self.failUnlessEqual(parsed[0], "filenode")
1451 self.failUnless(parsed[1]["mutable"])
1452 self.failUnlessEqual(parsed[1]["rw_uri"], self._mutable_uri)
1453 ro_uri = unicode(self._mutable_node.get_readonly().to_string())
1454 self.failUnlessEqual(parsed[1]["ro_uri"], ro_uri)
1455 d.addCallback(_check_file_json)
1457 # and look at t=uri and t=readonly-uri
1458 d.addCallback(lambda res:
1459 self.GET(self.public_url + "/foo/new.txt?t=uri"))
1460 d.addCallback(lambda res: self.failUnlessEqual(res, self._mutable_uri))
1461 d.addCallback(lambda res:
1462 self.GET(self.public_url + "/foo/new.txt?t=readonly-uri"))
1463 def _check_ro_uri(res):
1464 ro_uri = unicode(self._mutable_node.get_readonly().to_string())
1465 self.failUnlessEqual(res, ro_uri)
1466 d.addCallback(_check_ro_uri)
1468 # make sure we can get to it from /uri/URI
1469 d.addCallback(lambda res:
1470 self.GET("/uri/%s" % urllib.quote(self._mutable_uri)))
1471 d.addCallback(lambda res:
1472 self.failUnlessEqual(res, NEW2_CONTENTS))
1474 # and that HEAD computes the size correctly
1475 d.addCallback(lambda res:
1476 self.HEAD(self.public_url + "/foo/new.txt",
1477 return_response=True))
1478 def _got_headers((res, status, headers)):
1479 self.failUnlessEqual(res, "")
1480 self.failUnlessEqual(headers["content-length"][0],
1481 str(len(NEW2_CONTENTS)))
1482 self.failUnlessEqual(headers["content-type"], ["text/plain"])
1483 d.addCallback(_got_headers)
1485 # make sure that size errors are displayed correctly for overwrite
1486 d.addCallback(lambda res:
1487 self.shouldFail2(error.Error,
1488 "test_POST_upload_mutable-toobig",
1489 "413 Request Entity Too Large",
1490 "SDMF is limited to one segment, and 10001 > 10000",
1492 self.public_url + "/foo", t="upload",
1495 "b" * (self.s.MUTABLE_SIZELIMIT+1)),
1498 d.addErrback(self.dump_error)
1501 def test_POST_upload_mutable_toobig(self):
1502 d = self.shouldFail2(error.Error,
1503 "test_POST_upload_no_link_mutable_toobig",
1504 "413 Request Entity Too Large",
1505 "SDMF is limited to one segment, and 10001 > 10000",
1507 self.public_url + "/foo",
1508 t="upload", mutable="true",
1510 "b" * (self.s.MUTABLE_SIZELIMIT+1)) )
1513 def dump_error(self, f):
1514 # if the web server returns an error code (like 400 Bad Request),
1515 # web.client.getPage puts the HTTP response body into the .response
1516 # attribute of the exception object that it gives back. It does not
1517 # appear in the Failure's repr(), so the ERROR that trial displays
1518 # will be rather terse and unhelpful. addErrback this method to the
1519 # end of your chain to get more information out of these errors.
1520 if f.check(error.Error):
1521 print "web.error.Error:"
1523 print f.value.response
1526 def test_POST_upload_replace(self):
1527 d = self.POST(self.public_url + "/foo", t="upload",
1528 file=("bar.txt", self.NEWFILE_CONTENTS))
1530 d.addCallback(self.failUnlessURIMatchesChild, fn, u"bar.txt")
1531 d.addCallback(lambda res:
1532 self.failUnlessChildContentsAre(fn, u"bar.txt",
1533 self.NEWFILE_CONTENTS))
1536 def test_POST_upload_no_replace_ok(self):
1537 d = self.POST(self.public_url + "/foo?replace=false", t="upload",
1538 file=("new.txt", self.NEWFILE_CONTENTS))
1539 d.addCallback(lambda res: self.GET(self.public_url + "/foo/new.txt"))
1540 d.addCallback(lambda res: self.failUnlessEqual(res,
1541 self.NEWFILE_CONTENTS))
1544 def test_POST_upload_no_replace_queryarg(self):
1545 d = self.POST(self.public_url + "/foo?replace=false", t="upload",
1546 file=("bar.txt", self.NEWFILE_CONTENTS))
1547 d.addBoth(self.shouldFail, error.Error,
1548 "POST_upload_no_replace_queryarg",
1550 "There was already a child by that name, and you asked me "
1551 "to not replace it")
1552 d.addCallback(lambda res: self.GET(self.public_url + "/foo/bar.txt"))
1553 d.addCallback(self.failUnlessIsBarDotTxt)
1556 def test_POST_upload_no_replace_field(self):
1557 d = self.POST(self.public_url + "/foo", t="upload", replace="false",
1558 file=("bar.txt", self.NEWFILE_CONTENTS))
1559 d.addBoth(self.shouldFail, error.Error, "POST_upload_no_replace_field",
1561 "There was already a child by that name, and you asked me "
1562 "to not replace it")
1563 d.addCallback(lambda res: self.GET(self.public_url + "/foo/bar.txt"))
1564 d.addCallback(self.failUnlessIsBarDotTxt)
1567 def test_POST_upload_whendone(self):
1568 d = self.POST(self.public_url + "/foo", t="upload", when_done="/THERE",
1569 file=("new.txt", self.NEWFILE_CONTENTS))
1570 d.addBoth(self.shouldRedirect, "/THERE")
1572 d.addCallback(lambda res:
1573 self.failUnlessChildContentsAre(fn, u"new.txt",
1574 self.NEWFILE_CONTENTS))
1577 def test_POST_upload_named(self):
1579 d = self.POST(self.public_url + "/foo", t="upload",
1580 name="new.txt", file=self.NEWFILE_CONTENTS)
1581 d.addCallback(self.failUnlessURIMatchesChild, fn, u"new.txt")
1582 d.addCallback(lambda res:
1583 self.failUnlessChildContentsAre(fn, u"new.txt",
1584 self.NEWFILE_CONTENTS))
1587 def test_POST_upload_named_badfilename(self):
1588 d = self.POST(self.public_url + "/foo", t="upload",
1589 name="slashes/are/bad.txt", file=self.NEWFILE_CONTENTS)
1590 d.addBoth(self.shouldFail, error.Error,
1591 "test_POST_upload_named_badfilename",
1593 "name= may not contain a slash",
1595 # make sure that nothing was added
1596 d.addCallback(lambda res:
1597 self.failUnlessNodeKeysAre(self._foo_node,
1598 [u"bar.txt", u"blockingfile",
1599 u"empty", u"n\u00fc.txt",
1603 def test_POST_FILEURL_check(self):
1604 bar_url = self.public_url + "/foo/bar.txt"
1605 d = self.POST(bar_url, t="check")
1607 self.failUnless("Healthy :" in res)
1608 d.addCallback(_check)
1609 redir_url = "http://allmydata.org/TARGET"
1610 def _check2(statuscode, target):
1611 self.failUnlessEqual(statuscode, str(http.FOUND))
1612 self.failUnlessEqual(target, redir_url)
1613 d.addCallback(lambda res:
1614 self.shouldRedirect2("test_POST_FILEURL_check",
1618 when_done=redir_url))
1619 d.addCallback(lambda res:
1620 self.POST(bar_url, t="check", return_to=redir_url))
1622 self.failUnless("Healthy :" in res)
1623 self.failUnless("Return to parent directory" in res)
1624 self.failUnless(redir_url in res)
1625 d.addCallback(_check3)
1627 d.addCallback(lambda res:
1628 self.POST(bar_url, t="check", output="JSON"))
1629 def _check_json(res):
1630 data = simplejson.loads(res)
1631 self.failUnless("storage-index" in data)
1632 self.failUnless(data["results"]["healthy"])
1633 d.addCallback(_check_json)
1637 def test_POST_FILEURL_check_and_repair(self):
1638 bar_url = self.public_url + "/foo/bar.txt"
1639 d = self.POST(bar_url, t="check", repair="true")
1641 self.failUnless("Healthy :" in res)
1642 d.addCallback(_check)
1643 redir_url = "http://allmydata.org/TARGET"
1644 def _check2(statuscode, target):
1645 self.failUnlessEqual(statuscode, str(http.FOUND))
1646 self.failUnlessEqual(target, redir_url)
1647 d.addCallback(lambda res:
1648 self.shouldRedirect2("test_POST_FILEURL_check_and_repair",
1651 t="check", repair="true",
1652 when_done=redir_url))
1653 d.addCallback(lambda res:
1654 self.POST(bar_url, t="check", return_to=redir_url))
1656 self.failUnless("Healthy :" in res)
1657 self.failUnless("Return to parent directory" in res)
1658 self.failUnless(redir_url in res)
1659 d.addCallback(_check3)
1662 def test_POST_DIRURL_check(self):
1663 foo_url = self.public_url + "/foo/"
1664 d = self.POST(foo_url, t="check")
1666 self.failUnless("Healthy :" in res, res)
1667 d.addCallback(_check)
1668 redir_url = "http://allmydata.org/TARGET"
1669 def _check2(statuscode, target):
1670 self.failUnlessEqual(statuscode, str(http.FOUND))
1671 self.failUnlessEqual(target, redir_url)
1672 d.addCallback(lambda res:
1673 self.shouldRedirect2("test_POST_DIRURL_check",
1677 when_done=redir_url))
1678 d.addCallback(lambda res:
1679 self.POST(foo_url, t="check", return_to=redir_url))
1681 self.failUnless("Healthy :" in res, res)
1682 self.failUnless("Return to parent directory" in res)
1683 self.failUnless(redir_url in res)
1684 d.addCallback(_check3)
1686 d.addCallback(lambda res:
1687 self.POST(foo_url, t="check", output="JSON"))
1688 def _check_json(res):
1689 data = simplejson.loads(res)
1690 self.failUnless("storage-index" in data)
1691 self.failUnless(data["results"]["healthy"])
1692 d.addCallback(_check_json)
1696 def test_POST_DIRURL_check_and_repair(self):
1697 foo_url = self.public_url + "/foo/"
1698 d = self.POST(foo_url, t="check", repair="true")
1700 self.failUnless("Healthy :" in res, res)
1701 d.addCallback(_check)
1702 redir_url = "http://allmydata.org/TARGET"
1703 def _check2(statuscode, target):
1704 self.failUnlessEqual(statuscode, str(http.FOUND))
1705 self.failUnlessEqual(target, redir_url)
1706 d.addCallback(lambda res:
1707 self.shouldRedirect2("test_POST_DIRURL_check_and_repair",
1710 t="check", repair="true",
1711 when_done=redir_url))
1712 d.addCallback(lambda res:
1713 self.POST(foo_url, t="check", return_to=redir_url))
1715 self.failUnless("Healthy :" in res)
1716 self.failUnless("Return to parent directory" in res)
1717 self.failUnless(redir_url in res)
1718 d.addCallback(_check3)
1721 def wait_for_operation(self, ignored, ophandle):
1722 url = "/operations/" + ophandle
1723 url += "?t=status&output=JSON"
1726 data = simplejson.loads(res)
1727 if not data["finished"]:
1728 d = self.stall(delay=1.0)
1729 d.addCallback(self.wait_for_operation, ophandle)
1735 def get_operation_results(self, ignored, ophandle, output=None):
1736 url = "/operations/" + ophandle
1739 url += "&output=" + output
1742 if output and output.lower() == "json":
1743 return simplejson.loads(res)
1748 def test_POST_DIRURL_deepcheck_no_ophandle(self):
1749 d = self.shouldFail2(error.Error,
1750 "test_POST_DIRURL_deepcheck_no_ophandle",
1752 "slow operation requires ophandle=",
1753 self.POST, self.public_url, t="start-deep-check")
1756 def test_POST_DIRURL_deepcheck(self):
1757 def _check_redirect(statuscode, target):
1758 self.failUnlessEqual(statuscode, str(http.FOUND))
1759 self.failUnless(target.endswith("/operations/123"))
1760 d = self.shouldRedirect2("test_POST_DIRURL_deepcheck", _check_redirect,
1761 self.POST, self.public_url,
1762 t="start-deep-check", ophandle="123")
1763 d.addCallback(self.wait_for_operation, "123")
1764 def _check_json(data):
1765 self.failUnlessEqual(data["finished"], True)
1766 self.failUnlessEqual(data["count-objects-checked"], 8)
1767 self.failUnlessEqual(data["count-objects-healthy"], 8)
1768 d.addCallback(_check_json)
1769 d.addCallback(self.get_operation_results, "123", "html")
1770 def _check_html(res):
1771 self.failUnless("Objects Checked: <span>8</span>" in res)
1772 self.failUnless("Objects Healthy: <span>8</span>" in res)
1773 d.addCallback(_check_html)
1775 d.addCallback(lambda res:
1776 self.GET("/operations/123/"))
1777 d.addCallback(_check_html) # should be the same as without the slash
1779 d.addCallback(lambda res:
1780 self.shouldFail2(error.Error, "one", "404 Not Found",
1781 "No detailed results for SI bogus",
1782 self.GET, "/operations/123/bogus"))
1784 foo_si = self._foo_node.get_storage_index()
1785 foo_si_s = base32.b2a(foo_si)
1786 d.addCallback(lambda res:
1787 self.GET("/operations/123/%s?output=JSON" % foo_si_s))
1788 def _check_foo_json(res):
1789 data = simplejson.loads(res)
1790 self.failUnlessEqual(data["storage-index"], foo_si_s)
1791 self.failUnless(data["results"]["healthy"])
1792 d.addCallback(_check_foo_json)
1795 def test_POST_DIRURL_deepcheck_and_repair(self):
1796 d = self.POST(self.public_url, t="start-deep-check", repair="true",
1797 ophandle="124", output="json", followRedirect=True)
1798 d.addCallback(self.wait_for_operation, "124")
1799 def _check_json(data):
1800 self.failUnlessEqual(data["finished"], True)
1801 self.failUnlessEqual(data["count-objects-checked"], 8)
1802 self.failUnlessEqual(data["count-objects-healthy-pre-repair"], 8)
1803 self.failUnlessEqual(data["count-objects-unhealthy-pre-repair"], 0)
1804 self.failUnlessEqual(data["count-corrupt-shares-pre-repair"], 0)
1805 self.failUnlessEqual(data["count-repairs-attempted"], 0)
1806 self.failUnlessEqual(data["count-repairs-successful"], 0)
1807 self.failUnlessEqual(data["count-repairs-unsuccessful"], 0)
1808 self.failUnlessEqual(data["count-objects-healthy-post-repair"], 8)
1809 self.failUnlessEqual(data["count-objects-unhealthy-post-repair"], 0)
1810 self.failUnlessEqual(data["count-corrupt-shares-post-repair"], 0)
1811 d.addCallback(_check_json)
1812 d.addCallback(self.get_operation_results, "124", "html")
1813 def _check_html(res):
1814 self.failUnless("Objects Checked: <span>8</span>" in res)
1816 self.failUnless("Objects Healthy (before repair): <span>8</span>" in res)
1817 self.failUnless("Objects Unhealthy (before repair): <span>0</span>" in res)
1818 self.failUnless("Corrupt Shares (before repair): <span>0</span>" in res)
1820 self.failUnless("Repairs Attempted: <span>0</span>" in res)
1821 self.failUnless("Repairs Successful: <span>0</span>" in res)
1822 self.failUnless("Repairs Unsuccessful: <span>0</span>" in res)
1824 self.failUnless("Objects Healthy (after repair): <span>8</span>" in res)
1825 self.failUnless("Objects Unhealthy (after repair): <span>0</span>" in res)
1826 self.failUnless("Corrupt Shares (after repair): <span>0</span>" in res)
1827 d.addCallback(_check_html)
1830 def test_POST_FILEURL_bad_t(self):
1831 d = self.shouldFail2(error.Error, "POST_bad_t", "400 Bad Request",
1832 "POST to file: bad t=bogus",
1833 self.POST, self.public_url + "/foo/bar.txt",
1837 def test_POST_mkdir(self): # return value?
1838 d = self.POST(self.public_url + "/foo", t="mkdir", name="newdir")
1839 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
1840 d.addCallback(self.failUnlessNodeKeysAre, [])
1843 def test_POST_mkdir_2(self):
1844 d = self.POST(self.public_url + "/foo/newdir?t=mkdir", "")
1845 d.addCallback(lambda res:
1846 self.failUnlessNodeHasChild(self._foo_node, u"newdir"))
1847 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
1848 d.addCallback(self.failUnlessNodeKeysAre, [])
1851 def test_POST_mkdirs_2(self):
1852 d = self.POST(self.public_url + "/foo/bardir/newdir?t=mkdir", "")
1853 d.addCallback(lambda res:
1854 self.failUnlessNodeHasChild(self._foo_node, u"bardir"))
1855 d.addCallback(lambda res: self._foo_node.get(u"bardir"))
1856 d.addCallback(lambda bardirnode: bardirnode.get(u"newdir"))
1857 d.addCallback(self.failUnlessNodeKeysAre, [])
1860 def test_POST_mkdir_no_parentdir_noredirect(self):
1861 d = self.POST("/uri?t=mkdir")
1862 def _after_mkdir(res):
1863 uri.NewDirectoryURI.init_from_string(res)
1864 d.addCallback(_after_mkdir)
1867 def test_POST_mkdir_no_parentdir_redirect(self):
1868 d = self.POST("/uri?t=mkdir&redirect_to_result=true")
1869 d.addBoth(self.shouldRedirect, None, statuscode='303')
1870 def _check_target(target):
1871 target = urllib.unquote(target)
1872 self.failUnless(target.startswith("uri/URI:DIR2:"), target)
1873 d.addCallback(_check_target)
1876 def test_POST_noparent_bad(self):
1877 d = self.shouldHTTPError("POST /uri?t=bogus", 400, "Bad Request",
1878 "/uri accepts only PUT, PUT?t=mkdir, "
1879 "POST?t=upload, and POST?t=mkdir",
1880 self.POST, "/uri?t=bogus")
1883 def test_welcome_page_mkdir_button(self):
1884 # Fetch the welcome page.
1886 def _after_get_welcome_page(res):
1887 MKDIR_BUTTON_RE=re.compile('<form action="([^"]*)" method="post".*<input type="hidden" name="t" value="([^"]*)" /><input type="hidden" name="([^"]*)" value="([^"]*)" /><input type="submit" value="Create Directory!" />', re.I)
1888 mo = MKDIR_BUTTON_RE.search(res)
1889 formaction = mo.group(1)
1891 formaname = mo.group(3)
1892 formavalue = mo.group(4)
1893 return (formaction, formt, formaname, formavalue)
1894 d.addCallback(_after_get_welcome_page)
1895 def _after_parse_form(res):
1896 (formaction, formt, formaname, formavalue) = res
1897 return self.POST("/%s?t=%s&%s=%s" % (formaction, formt, formaname, formavalue))
1898 d.addCallback(_after_parse_form)
1899 d.addBoth(self.shouldRedirect, None, statuscode='303')
1902 def test_POST_mkdir_replace(self): # return value?
1903 d = self.POST(self.public_url + "/foo", t="mkdir", name="sub")
1904 d.addCallback(lambda res: self._foo_node.get(u"sub"))
1905 d.addCallback(self.failUnlessNodeKeysAre, [])
1908 def test_POST_mkdir_no_replace_queryarg(self): # return value?
1909 d = self.POST(self.public_url + "/foo?replace=false", t="mkdir", name="sub")
1910 d.addBoth(self.shouldFail, error.Error,
1911 "POST_mkdir_no_replace_queryarg",
1913 "There was already a child by that name, and you asked me "
1914 "to not replace it")
1915 d.addCallback(lambda res: self._foo_node.get(u"sub"))
1916 d.addCallback(self.failUnlessNodeKeysAre, [u"baz.txt"])
1919 def test_POST_mkdir_no_replace_field(self): # return value?
1920 d = self.POST(self.public_url + "/foo", t="mkdir", name="sub",
1922 d.addBoth(self.shouldFail, error.Error, "POST_mkdir_no_replace_field",
1924 "There was already a child by that name, and you asked me "
1925 "to not replace it")
1926 d.addCallback(lambda res: self._foo_node.get(u"sub"))
1927 d.addCallback(self.failUnlessNodeKeysAre, [u"baz.txt"])
1930 def test_POST_mkdir_whendone_field(self):
1931 d = self.POST(self.public_url + "/foo",
1932 t="mkdir", name="newdir", when_done="/THERE")
1933 d.addBoth(self.shouldRedirect, "/THERE")
1934 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
1935 d.addCallback(self.failUnlessNodeKeysAre, [])
1938 def test_POST_mkdir_whendone_queryarg(self):
1939 d = self.POST(self.public_url + "/foo?when_done=/THERE",
1940 t="mkdir", name="newdir")
1941 d.addBoth(self.shouldRedirect, "/THERE")
1942 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
1943 d.addCallback(self.failUnlessNodeKeysAre, [])
1946 def test_POST_bad_t(self):
1947 d = self.shouldFail2(error.Error, "POST_bad_t", "400 Bad Request",
1948 "POST to a directory with bad t=BOGUS",
1949 self.POST, self.public_url + "/foo", t="BOGUS")
1952 def test_POST_set_children(self):
1953 contents9, n9, newuri9 = self.makefile(9)
1954 contents10, n10, newuri10 = self.makefile(10)
1955 contents11, n11, newuri11 = self.makefile(11)
1958 "atomic_added_1": [ "filenode", { "rw_uri": "%s",
1961 "ctime": 1002777696.7564139,
1962 "mtime": 1002777696.7564139
1965 "atomic_added_2": [ "filenode", { "rw_uri": "%s",
1968 "ctime": 1002777696.7564139,
1969 "mtime": 1002777696.7564139
1972 "atomic_added_3": [ "filenode", { "rw_uri": "%s",
1975 "ctime": 1002777696.7564139,
1976 "mtime": 1002777696.7564139
1979 }""" % (newuri9, newuri10, newuri11)
1981 url = self.webish_url + self.public_url + "/foo" + "?t=set_children"
1983 d = client.getPage(url, method="POST", postdata=reqbody)
1985 self.failUnlessURIMatchesChild(newuri9, self._foo_node, u"atomic_added_1")
1986 self.failUnlessURIMatchesChild(newuri10, self._foo_node, u"atomic_added_2")
1987 self.failUnlessURIMatchesChild(newuri11, self._foo_node, u"atomic_added_3")
1989 d.addCallback(_then)
1990 d.addErrback(self.dump_error)
1993 def test_POST_put_uri(self):
1994 contents, n, newuri = self.makefile(8)
1995 d = self.POST(self.public_url + "/foo", t="uri", name="new.txt", uri=newuri)
1996 d.addCallback(self.failUnlessURIMatchesChild, self._foo_node, u"new.txt")
1997 d.addCallback(lambda res:
1998 self.failUnlessChildContentsAre(self._foo_node, u"new.txt",
2002 def test_POST_put_uri_replace(self):
2003 contents, n, newuri = self.makefile(8)
2004 d = self.POST(self.public_url + "/foo", t="uri", name="bar.txt", uri=newuri)
2005 d.addCallback(self.failUnlessURIMatchesChild, self._foo_node, u"bar.txt")
2006 d.addCallback(lambda res:
2007 self.failUnlessChildContentsAre(self._foo_node, u"bar.txt",
2011 def test_POST_put_uri_no_replace_queryarg(self):
2012 contents, n, newuri = self.makefile(8)
2013 d = self.POST(self.public_url + "/foo?replace=false", t="uri",
2014 name="bar.txt", uri=newuri)
2015 d.addBoth(self.shouldFail, error.Error,
2016 "POST_put_uri_no_replace_queryarg",
2018 "There was already a child by that name, and you asked me "
2019 "to not replace it")
2020 d.addCallback(lambda res: self.GET(self.public_url + "/foo/bar.txt"))
2021 d.addCallback(self.failUnlessIsBarDotTxt)
2024 def test_POST_put_uri_no_replace_field(self):
2025 contents, n, newuri = self.makefile(8)
2026 d = self.POST(self.public_url + "/foo", t="uri", replace="false",
2027 name="bar.txt", uri=newuri)
2028 d.addBoth(self.shouldFail, error.Error,
2029 "POST_put_uri_no_replace_field",
2031 "There was already a child by that name, and you asked me "
2032 "to not replace it")
2033 d.addCallback(lambda res: self.GET(self.public_url + "/foo/bar.txt"))
2034 d.addCallback(self.failUnlessIsBarDotTxt)
2037 def test_POST_delete(self):
2038 d = self.POST(self.public_url + "/foo", t="delete", name="bar.txt")
2039 d.addCallback(lambda res: self._foo_node.list())
2040 def _check(children):
2041 self.failIf(u"bar.txt" in children)
2042 d.addCallback(_check)
2045 def test_POST_rename_file(self):
2046 d = self.POST(self.public_url + "/foo", t="rename",
2047 from_name="bar.txt", to_name='wibble.txt')
2048 d.addCallback(lambda res:
2049 self.failIfNodeHasChild(self._foo_node, u"bar.txt"))
2050 d.addCallback(lambda res:
2051 self.failUnlessNodeHasChild(self._foo_node, u"wibble.txt"))
2052 d.addCallback(lambda res: self.GET(self.public_url + "/foo/wibble.txt"))
2053 d.addCallback(self.failUnlessIsBarDotTxt)
2054 d.addCallback(lambda res: self.GET(self.public_url + "/foo/wibble.txt?t=json"))
2055 d.addCallback(self.failUnlessIsBarJSON)
2058 def test_POST_rename_file_redundant(self):
2059 d = self.POST(self.public_url + "/foo", t="rename",
2060 from_name="bar.txt", to_name='bar.txt')
2061 d.addCallback(lambda res:
2062 self.failUnlessNodeHasChild(self._foo_node, u"bar.txt"))
2063 d.addCallback(lambda res: self.GET(self.public_url + "/foo/bar.txt"))
2064 d.addCallback(self.failUnlessIsBarDotTxt)
2065 d.addCallback(lambda res: self.GET(self.public_url + "/foo/bar.txt?t=json"))
2066 d.addCallback(self.failUnlessIsBarJSON)
2069 def test_POST_rename_file_replace(self):
2070 # rename a file and replace a directory with it
2071 d = self.POST(self.public_url + "/foo", t="rename",
2072 from_name="bar.txt", to_name='empty')
2073 d.addCallback(lambda res:
2074 self.failIfNodeHasChild(self._foo_node, u"bar.txt"))
2075 d.addCallback(lambda res:
2076 self.failUnlessNodeHasChild(self._foo_node, u"empty"))
2077 d.addCallback(lambda res: self.GET(self.public_url + "/foo/empty"))
2078 d.addCallback(self.failUnlessIsBarDotTxt)
2079 d.addCallback(lambda res: self.GET(self.public_url + "/foo/empty?t=json"))
2080 d.addCallback(self.failUnlessIsBarJSON)
2083 def test_POST_rename_file_no_replace_queryarg(self):
2084 # rename a file and replace a directory with it
2085 d = self.POST(self.public_url + "/foo?replace=false", t="rename",
2086 from_name="bar.txt", to_name='empty')
2087 d.addBoth(self.shouldFail, error.Error,
2088 "POST_rename_file_no_replace_queryarg",
2090 "There was already a child by that name, and you asked me "
2091 "to not replace it")
2092 d.addCallback(lambda res: self.GET(self.public_url + "/foo/empty?t=json"))
2093 d.addCallback(self.failUnlessIsEmptyJSON)
2096 def test_POST_rename_file_no_replace_field(self):
2097 # rename a file and replace a directory with it
2098 d = self.POST(self.public_url + "/foo", t="rename", replace="false",
2099 from_name="bar.txt", to_name='empty')
2100 d.addBoth(self.shouldFail, error.Error,
2101 "POST_rename_file_no_replace_field",
2103 "There was already a child by that name, and you asked me "
2104 "to not replace it")
2105 d.addCallback(lambda res: self.GET(self.public_url + "/foo/empty?t=json"))
2106 d.addCallback(self.failUnlessIsEmptyJSON)
2109 def failUnlessIsEmptyJSON(self, res):
2110 data = simplejson.loads(res)
2111 self.failUnlessEqual(data[0], "dirnode", data)
2112 self.failUnlessEqual(len(data[1]["children"]), 0)
2114 def test_POST_rename_file_slash_fail(self):
2115 d = self.POST(self.public_url + "/foo", t="rename",
2116 from_name="bar.txt", to_name='kirk/spock.txt')
2117 d.addBoth(self.shouldFail, error.Error,
2118 "test_POST_rename_file_slash_fail",
2120 "to_name= may not contain a slash",
2122 d.addCallback(lambda res:
2123 self.failUnlessNodeHasChild(self._foo_node, u"bar.txt"))
2126 def test_POST_rename_dir(self):
2127 d = self.POST(self.public_url, t="rename",
2128 from_name="foo", to_name='plunk')
2129 d.addCallback(lambda res:
2130 self.failIfNodeHasChild(self.public_root, u"foo"))
2131 d.addCallback(lambda res:
2132 self.failUnlessNodeHasChild(self.public_root, u"plunk"))
2133 d.addCallback(lambda res: self.GET(self.public_url + "/plunk?t=json"))
2134 d.addCallback(self.failUnlessIsFooJSON)
2137 def shouldRedirect(self, res, target=None, statuscode=None, which=""):
2138 """ If target is not None then the redirection has to go to target. If
2139 statuscode is not None then the redirection has to be accomplished with
2140 that HTTP status code."""
2141 if not isinstance(res, failure.Failure):
2142 to_where = (target is None) and "somewhere" or ("to " + target)
2143 self.fail("%s: we were expecting to get redirected %s, not get an"
2144 " actual page: %s" % (which, to_where, res))
2145 res.trap(error.PageRedirect)
2146 if statuscode is not None:
2147 self.failUnlessEqual(res.value.status, statuscode,
2148 "%s: not a redirect" % which)
2149 if target is not None:
2150 # the PageRedirect does not seem to capture the uri= query arg
2151 # properly, so we can't check for it.
2152 realtarget = self.webish_url + target
2153 self.failUnlessEqual(res.value.location, realtarget,
2154 "%s: wrong target" % which)
2155 return res.value.location
2157 def test_GET_URI_form(self):
2158 base = "/uri?uri=%s" % self._bar_txt_uri
2159 # this is supposed to give us a redirect to /uri/$URI, plus arguments
2160 targetbase = "/uri/%s" % urllib.quote(self._bar_txt_uri)
2162 d.addBoth(self.shouldRedirect, targetbase)
2163 d.addCallback(lambda res: self.GET(base+"&filename=bar.txt"))
2164 d.addBoth(self.shouldRedirect, targetbase+"?filename=bar.txt")
2165 d.addCallback(lambda res: self.GET(base+"&t=json"))
2166 d.addBoth(self.shouldRedirect, targetbase+"?t=json")
2167 d.addCallback(self.log, "about to get file by uri")
2168 d.addCallback(lambda res: self.GET(base, followRedirect=True))
2169 d.addCallback(self.failUnlessIsBarDotTxt)
2170 d.addCallback(self.log, "got file by uri, about to get dir by uri")
2171 d.addCallback(lambda res: self.GET("/uri?uri=%s&t=json" % self._foo_uri,
2172 followRedirect=True))
2173 d.addCallback(self.failUnlessIsFooJSON)
2174 d.addCallback(self.log, "got dir by uri")
2178 def test_GET_URI_form_bad(self):
2179 d = self.shouldFail2(error.Error, "test_GET_URI_form_bad",
2180 "400 Bad Request", "GET /uri requires uri=",
2184 def test_GET_rename_form(self):
2185 d = self.GET(self.public_url + "/foo?t=rename-form&name=bar.txt",
2186 followRedirect=True)
2188 self.failUnless('name="when_done" value="."' in res, res)
2189 self.failUnless(re.search(r'name="from_name" value="bar\.txt"', res))
2190 d.addCallback(_check)
2193 def log(self, res, msg):
2194 #print "MSG: %s RES: %s" % (msg, res)
2198 def test_GET_URI_URL(self):
2199 base = "/uri/%s" % self._bar_txt_uri
2201 d.addCallback(self.failUnlessIsBarDotTxt)
2202 d.addCallback(lambda res: self.GET(base+"?filename=bar.txt"))
2203 d.addCallback(self.failUnlessIsBarDotTxt)
2204 d.addCallback(lambda res: self.GET(base+"?filename=bar.txt&save=true"))
2205 d.addCallback(self.failUnlessIsBarDotTxt)
2208 def test_GET_URI_URL_dir(self):
2209 base = "/uri/%s?t=json" % self._foo_uri
2211 d.addCallback(self.failUnlessIsFooJSON)
2214 def test_GET_URI_URL_missing(self):
2215 base = "/uri/%s" % self._bad_file_uri
2216 d = self.shouldHTTPError("test_GET_URI_URL_missing",
2217 http.GONE, None, "NotEnoughSharesError",
2219 # TODO: how can we exercise both sides of WebDownloadTarget.fail
2220 # here? we must arrange for a download to fail after target.open()
2221 # has been called, and then inspect the response to see that it is
2222 # shorter than we expected.
2225 def test_PUT_DIRURL_uri(self):
2226 d = self.s.create_empty_dirnode()
2228 new_uri = dn.get_uri()
2229 # replace /foo with a new (empty) directory
2230 d = self.PUT(self.public_url + "/foo?t=uri", new_uri)
2231 d.addCallback(lambda res:
2232 self.failUnlessEqual(res.strip(), new_uri))
2233 d.addCallback(lambda res:
2234 self.failUnlessChildURIIs(self.public_root,
2238 d.addCallback(_made_dir)
2241 def test_PUT_DIRURL_uri_noreplace(self):
2242 d = self.s.create_empty_dirnode()
2244 new_uri = dn.get_uri()
2245 # replace /foo with a new (empty) directory, but ask that
2246 # replace=false, so it should fail
2247 d = self.shouldFail2(error.Error, "test_PUT_DIRURL_uri_noreplace",
2248 "409 Conflict", "There was already a child by that name, and you asked me to not replace it",
2250 self.public_url + "/foo?t=uri&replace=false",
2252 d.addCallback(lambda res:
2253 self.failUnlessChildURIIs(self.public_root,
2257 d.addCallback(_made_dir)
2260 def test_PUT_DIRURL_bad_t(self):
2261 d = self.shouldFail2(error.Error, "test_PUT_DIRURL_bad_t",
2262 "400 Bad Request", "PUT to a directory",
2263 self.PUT, self.public_url + "/foo?t=BOGUS", "")
2264 d.addCallback(lambda res:
2265 self.failUnlessChildURIIs(self.public_root,
2270 def test_PUT_NEWFILEURL_uri(self):
2271 contents, n, new_uri = self.makefile(8)
2272 d = self.PUT(self.public_url + "/foo/new.txt?t=uri", new_uri)
2273 d.addCallback(lambda res: self.failUnlessEqual(res.strip(), new_uri))
2274 d.addCallback(lambda res:
2275 self.failUnlessChildContentsAre(self._foo_node, u"new.txt",
2279 def test_PUT_NEWFILEURL_uri_replace(self):
2280 contents, n, new_uri = self.makefile(8)
2281 d = self.PUT(self.public_url + "/foo/bar.txt?t=uri", new_uri)
2282 d.addCallback(lambda res: self.failUnlessEqual(res.strip(), new_uri))
2283 d.addCallback(lambda res:
2284 self.failUnlessChildContentsAre(self._foo_node, u"bar.txt",
2288 def test_PUT_NEWFILEURL_uri_no_replace(self):
2289 contents, n, new_uri = self.makefile(8)
2290 d = self.PUT(self.public_url + "/foo/bar.txt?t=uri&replace=false", new_uri)
2291 d.addBoth(self.shouldFail, error.Error, "PUT_NEWFILEURL_uri_no_replace",
2293 "There was already a child by that name, and you asked me "
2294 "to not replace it")
2297 def test_PUT_NEWFILE_URI(self):
2298 file_contents = "New file contents here\n"
2299 d = self.PUT("/uri", file_contents)
2301 assert isinstance(uri, str), uri
2302 self.failUnless(uri in FakeCHKFileNode.all_contents)
2303 self.failUnlessEqual(FakeCHKFileNode.all_contents[uri],
2305 return self.GET("/uri/%s" % uri)
2306 d.addCallback(_check)
2308 self.failUnlessEqual(res, file_contents)
2309 d.addCallback(_check2)
2312 def test_PUT_NEWFILE_URI_only_PUT(self):
2313 d = self.PUT("/uri?t=bogus", "")
2314 d.addBoth(self.shouldFail, error.Error,
2315 "PUT_NEWFILE_URI_only_PUT",
2317 "/uri accepts only PUT, PUT?t=mkdir, POST?t=upload, and POST?t=mkdir")
2320 def test_PUT_NEWFILE_URI_mutable(self):
2321 file_contents = "New file contents here\n"
2322 d = self.PUT("/uri?mutable=true", file_contents)
2323 def _check_mutable(uri):
2326 self.failUnless(IMutableFileURI.providedBy(u))
2327 self.failUnless(u.storage_index in FakeMutableFileNode.all_contents)
2328 n = self.s.create_node_from_uri(uri)
2329 return n.download_best_version()
2330 d.addCallback(_check_mutable)
2331 def _check2_mutable(data):
2332 self.failUnlessEqual(data, file_contents)
2333 d.addCallback(_check2_mutable)
2337 self.failUnless(uri.to_string() in FakeCHKFileNode.all_contents)
2338 self.failUnlessEqual(FakeCHKFileNode.all_contents[uri.to_string()],
2340 return self.GET("/uri/%s" % uri)
2341 d.addCallback(_check)
2343 self.failUnlessEqual(res, file_contents)
2344 d.addCallback(_check2)
2347 def test_PUT_mkdir(self):
2348 d = self.PUT("/uri?t=mkdir", "")
2350 n = self.s.create_node_from_uri(uri.strip())
2351 d2 = self.failUnlessNodeKeysAre(n, [])
2352 d2.addCallback(lambda res:
2353 self.GET("/uri/%s?t=json" % uri))
2355 d.addCallback(_check)
2356 d.addCallback(self.failUnlessIsEmptyJSON)
2359 def test_POST_check(self):
2360 d = self.POST(self.public_url + "/foo", t="check", name="bar.txt")
2362 # this returns a string form of the results, which are probably
2363 # None since we're using fake filenodes.
2364 # TODO: verify that the check actually happened, by changing
2365 # FakeCHKFileNode to count how many times .check() has been
2368 d.addCallback(_done)
2371 def test_bad_method(self):
2372 url = self.webish_url + self.public_url + "/foo/bar.txt"
2373 d = self.shouldHTTPError("test_bad_method",
2374 501, "Not Implemented",
2375 "I don't know how to treat a BOGUS request.",
2376 client.getPage, url, method="BOGUS")
2379 def test_short_url(self):
2380 url = self.webish_url + "/uri"
2381 d = self.shouldHTTPError("test_short_url", 501, "Not Implemented",
2382 "I don't know how to treat a DELETE request.",
2383 client.getPage, url, method="DELETE")
2386 def test_ophandle_bad(self):
2387 url = self.webish_url + "/operations/bogus?t=status"
2388 d = self.shouldHTTPError("test_ophandle_bad", 404, "404 Not Found",
2389 "unknown/expired handle 'bogus'",
2390 client.getPage, url)
2393 def test_ophandle_cancel(self):
2394 d = self.POST(self.public_url + "/foo/?t=start-manifest&ophandle=128",
2395 followRedirect=True)
2396 d.addCallback(lambda ignored:
2397 self.GET("/operations/128?t=status&output=JSON"))
2399 data = simplejson.loads(res)
2400 self.failUnless("finished" in data, res)
2401 monitor = self.ws.root.child_operations.handles["128"][0]
2402 d = self.POST("/operations/128?t=cancel&output=JSON")
2404 data = simplejson.loads(res)
2405 self.failUnless("finished" in data, res)
2406 # t=cancel causes the handle to be forgotten
2407 self.failUnless(monitor.is_cancelled())
2408 d.addCallback(_check2)
2410 d.addCallback(_check1)
2411 d.addCallback(lambda ignored:
2412 self.shouldHTTPError("test_ophandle_cancel",
2413 404, "404 Not Found",
2414 "unknown/expired handle '128'",
2416 "/operations/128?t=status&output=JSON"))
2419 def test_ophandle_retainfor(self):
2420 d = self.POST(self.public_url + "/foo/?t=start-manifest&ophandle=129&retain-for=60",
2421 followRedirect=True)
2422 d.addCallback(lambda ignored:
2423 self.GET("/operations/129?t=status&output=JSON&retain-for=0"))
2425 data = simplejson.loads(res)
2426 self.failUnless("finished" in data, res)
2427 d.addCallback(_check1)
2428 # the retain-for=0 will cause the handle to be expired very soon
2429 d.addCallback(self.stall, 2.0)
2430 d.addCallback(lambda ignored:
2431 self.shouldHTTPError("test_ophandle_retainfor",
2432 404, "404 Not Found",
2433 "unknown/expired handle '129'",
2435 "/operations/129?t=status&output=JSON"))
2438 def test_ophandle_release_after_complete(self):
2439 d = self.POST(self.public_url + "/foo/?t=start-manifest&ophandle=130",
2440 followRedirect=True)
2441 d.addCallback(self.wait_for_operation, "130")
2442 d.addCallback(lambda ignored:
2443 self.GET("/operations/130?t=status&output=JSON&release-after-complete=true"))
2444 # the release-after-complete=true will cause the handle to be expired
2445 d.addCallback(lambda ignored:
2446 self.shouldHTTPError("test_ophandle_release_after_complete",
2447 404, "404 Not Found",
2448 "unknown/expired handle '130'",
2450 "/operations/130?t=status&output=JSON"))
2453 def test_incident(self):
2454 d = self.POST("/report_incident", details="eek")
2456 self.failUnless("Thank you for your report!" in res, res)
2457 d.addCallback(_done)
2460 def test_static(self):
2461 webdir = os.path.join(self.staticdir, "subdir")
2462 fileutil.make_dirs(webdir)
2463 f = open(os.path.join(webdir, "hello.txt"), "wb")
2467 d = self.GET("/static/subdir/hello.txt")
2469 self.failUnlessEqual(res, "hello")
2470 d.addCallback(_check)
2474 class Util(unittest.TestCase):
2475 def test_abbreviate_time(self):
2476 self.failUnlessEqual(common.abbreviate_time(None), "")
2477 self.failUnlessEqual(common.abbreviate_time(1.234), "1.23s")
2478 self.failUnlessEqual(common.abbreviate_time(0.123), "123ms")
2479 self.failUnlessEqual(common.abbreviate_time(0.00123), "1.2ms")
2480 self.failUnlessEqual(common.abbreviate_time(0.000123), "123us")
2482 def test_abbreviate_rate(self):
2483 self.failUnlessEqual(common.abbreviate_rate(None), "")
2484 self.failUnlessEqual(common.abbreviate_rate(1234000), "1.23MBps")
2485 self.failUnlessEqual(common.abbreviate_rate(12340), "12.3kBps")
2486 self.failUnlessEqual(common.abbreviate_rate(123), "123Bps")
2488 def test_abbreviate_size(self):
2489 self.failUnlessEqual(common.abbreviate_size(None), "")
2490 self.failUnlessEqual(common.abbreviate_size(1.23*1000*1000*1000), "1.23GB")
2491 self.failUnlessEqual(common.abbreviate_size(1.23*1000*1000), "1.23MB")
2492 self.failUnlessEqual(common.abbreviate_size(1230), "1.2kB")
2493 self.failUnlessEqual(common.abbreviate_size(123), "123B")
2495 def test_plural(self):
2497 return "%d second%s" % (s, status.plural(s))
2498 self.failUnlessEqual(convert(0), "0 seconds")
2499 self.failUnlessEqual(convert(1), "1 second")
2500 self.failUnlessEqual(convert(2), "2 seconds")
2502 return "has share%s: %s" % (status.plural(s), ",".join(s))
2503 self.failUnlessEqual(convert2([]), "has shares: ")
2504 self.failUnlessEqual(convert2(["1"]), "has share: 1")
2505 self.failUnlessEqual(convert2(["1","2"]), "has shares: 1,2")
2508 class Grid(GridTestMixin, WebErrorMixin, unittest.TestCase, ShouldFailMixin):
2510 def CHECK(self, ign, which, args, clientnum=0):
2511 fileurl = self.fileurls[which]
2512 url = fileurl + "?" + args
2513 return self.GET(url, method="POST", clientnum=clientnum)
2515 def test_filecheck(self):
2516 self.basedir = "web/Grid/filecheck"
2518 c0 = self.g.clients[0]
2521 d = c0.upload(upload.Data(DATA, convergence=""))
2522 def _stash_uri(ur, which):
2523 self.uris[which] = ur.uri
2524 d.addCallback(_stash_uri, "good")
2525 d.addCallback(lambda ign:
2526 c0.upload(upload.Data(DATA+"1", convergence="")))
2527 d.addCallback(_stash_uri, "sick")
2528 d.addCallback(lambda ign:
2529 c0.upload(upload.Data(DATA+"2", convergence="")))
2530 d.addCallback(_stash_uri, "dead")
2531 def _stash_mutable_uri(n, which):
2532 self.uris[which] = n.get_uri()
2533 assert isinstance(self.uris[which], str)
2534 d.addCallback(lambda ign: c0.create_mutable_file(DATA+"3"))
2535 d.addCallback(_stash_mutable_uri, "corrupt")
2536 d.addCallback(lambda ign:
2537 c0.upload(upload.Data("literal", convergence="")))
2538 d.addCallback(_stash_uri, "small")
2540 def _compute_fileurls(ignored):
2542 for which in self.uris:
2543 self.fileurls[which] = "uri/" + urllib.quote(self.uris[which])
2544 d.addCallback(_compute_fileurls)
2546 def _clobber_shares(ignored):
2547 good_shares = self.find_shares(self.uris["good"])
2548 self.failUnlessEqual(len(good_shares), 10)
2549 sick_shares = self.find_shares(self.uris["sick"])
2550 os.unlink(sick_shares[0][2])
2551 dead_shares = self.find_shares(self.uris["dead"])
2552 for i in range(1, 10):
2553 os.unlink(dead_shares[i][2])
2554 c_shares = self.find_shares(self.uris["corrupt"])
2555 cso = CorruptShareOptions()
2556 cso.stdout = StringIO()
2557 cso.parseOptions([c_shares[0][2]])
2559 d.addCallback(_clobber_shares)
2561 d.addCallback(self.CHECK, "good", "t=check")
2562 def _got_html_good(res):
2563 self.failUnless("Healthy" in res, res)
2564 self.failIf("Not Healthy" in res, res)
2565 d.addCallback(_got_html_good)
2566 d.addCallback(self.CHECK, "good", "t=check&return_to=somewhere")
2567 def _got_html_good_return_to(res):
2568 self.failUnless("Healthy" in res, res)
2569 self.failIf("Not Healthy" in res, res)
2570 self.failUnless('<a href="somewhere">Return to parent directory'
2572 d.addCallback(_got_html_good_return_to)
2573 d.addCallback(self.CHECK, "good", "t=check&output=json")
2574 def _got_json_good(res):
2575 r = simplejson.loads(res)
2576 self.failUnlessEqual(r["summary"], "Healthy")
2577 self.failUnless(r["results"]["healthy"])
2578 self.failIf(r["results"]["needs-rebalancing"])
2579 self.failUnless(r["results"]["recoverable"])
2580 d.addCallback(_got_json_good)
2582 d.addCallback(self.CHECK, "small", "t=check")
2583 def _got_html_small(res):
2584 self.failUnless("Literal files are always healthy" in res, res)
2585 self.failIf("Not Healthy" in res, res)
2586 d.addCallback(_got_html_small)
2587 d.addCallback(self.CHECK, "small", "t=check&return_to=somewhere")
2588 def _got_html_small_return_to(res):
2589 self.failUnless("Literal files are always healthy" in res, res)
2590 self.failIf("Not Healthy" in res, res)
2591 self.failUnless('<a href="somewhere">Return to parent directory'
2593 d.addCallback(_got_html_small_return_to)
2594 d.addCallback(self.CHECK, "small", "t=check&output=json")
2595 def _got_json_small(res):
2596 r = simplejson.loads(res)
2597 self.failUnlessEqual(r["storage-index"], "")
2598 self.failUnless(r["results"]["healthy"])
2599 d.addCallback(_got_json_small)
2601 d.addCallback(self.CHECK, "sick", "t=check")
2602 def _got_html_sick(res):
2603 self.failUnless("Not Healthy" in res, res)
2604 d.addCallback(_got_html_sick)
2605 d.addCallback(self.CHECK, "sick", "t=check&output=json")
2606 def _got_json_sick(res):
2607 r = simplejson.loads(res)
2608 self.failUnlessEqual(r["summary"],
2609 "Not Healthy: 9 shares (enc 3-of-10)")
2610 self.failIf(r["results"]["healthy"])
2611 self.failIf(r["results"]["needs-rebalancing"])
2612 self.failUnless(r["results"]["recoverable"])
2613 d.addCallback(_got_json_sick)
2615 d.addCallback(self.CHECK, "dead", "t=check")
2616 def _got_html_dead(res):
2617 self.failUnless("Not Healthy" in res, res)
2618 d.addCallback(_got_html_dead)
2619 d.addCallback(self.CHECK, "dead", "t=check&output=json")
2620 def _got_json_dead(res):
2621 r = simplejson.loads(res)
2622 self.failUnlessEqual(r["summary"],
2623 "Not Healthy: 1 shares (enc 3-of-10)")
2624 self.failIf(r["results"]["healthy"])
2625 self.failIf(r["results"]["needs-rebalancing"])
2626 self.failIf(r["results"]["recoverable"])
2627 d.addCallback(_got_json_dead)
2629 d.addCallback(self.CHECK, "corrupt", "t=check&verify=true")
2630 def _got_html_corrupt(res):
2631 self.failUnless("Not Healthy! : Unhealthy" in res, res)
2632 d.addCallback(_got_html_corrupt)
2633 d.addCallback(self.CHECK, "corrupt", "t=check&verify=true&output=json")
2634 def _got_json_corrupt(res):
2635 r = simplejson.loads(res)
2636 self.failUnless("Unhealthy: 9 shares (enc 3-of-10)" in r["summary"],
2638 self.failIf(r["results"]["healthy"])
2639 self.failUnless(r["results"]["recoverable"])
2640 self.failUnlessEqual(r["results"]["count-shares-good"], 9)
2641 self.failUnlessEqual(r["results"]["count-corrupt-shares"], 1)
2642 d.addCallback(_got_json_corrupt)
2644 d.addErrback(self.explain_web_error)
2647 def test_repair_html(self):
2648 self.basedir = "web/Grid/repair_html"
2650 c0 = self.g.clients[0]
2653 d = c0.upload(upload.Data(DATA, convergence=""))
2654 def _stash_uri(ur, which):
2655 self.uris[which] = ur.uri
2656 d.addCallback(_stash_uri, "good")
2657 d.addCallback(lambda ign:
2658 c0.upload(upload.Data(DATA+"1", convergence="")))
2659 d.addCallback(_stash_uri, "sick")
2660 d.addCallback(lambda ign:
2661 c0.upload(upload.Data(DATA+"2", convergence="")))
2662 d.addCallback(_stash_uri, "dead")
2663 def _stash_mutable_uri(n, which):
2664 self.uris[which] = n.get_uri()
2665 assert isinstance(self.uris[which], str)
2666 d.addCallback(lambda ign: c0.create_mutable_file(DATA+"3"))
2667 d.addCallback(_stash_mutable_uri, "corrupt")
2669 def _compute_fileurls(ignored):
2671 for which in self.uris:
2672 self.fileurls[which] = "uri/" + urllib.quote(self.uris[which])
2673 d.addCallback(_compute_fileurls)
2675 def _clobber_shares(ignored):
2676 good_shares = self.find_shares(self.uris["good"])
2677 self.failUnlessEqual(len(good_shares), 10)
2678 sick_shares = self.find_shares(self.uris["sick"])
2679 os.unlink(sick_shares[0][2])
2680 dead_shares = self.find_shares(self.uris["dead"])
2681 for i in range(1, 10):
2682 os.unlink(dead_shares[i][2])
2683 c_shares = self.find_shares(self.uris["corrupt"])
2684 cso = CorruptShareOptions()
2685 cso.stdout = StringIO()
2686 cso.parseOptions([c_shares[0][2]])
2688 d.addCallback(_clobber_shares)
2690 d.addCallback(self.CHECK, "good", "t=check&repair=true")
2691 def _got_html_good(res):
2692 self.failUnless("Healthy" in res, res)
2693 self.failIf("Not Healthy" in res, res)
2694 self.failUnless("No repair necessary" in res, res)
2695 d.addCallback(_got_html_good)
2697 d.addCallback(self.CHECK, "sick", "t=check&repair=true")
2698 def _got_html_sick(res):
2699 self.failUnless("Healthy : healthy" in res, res)
2700 self.failIf("Not Healthy" in res, res)
2701 self.failUnless("Repair successful" in res, res)
2702 d.addCallback(_got_html_sick)
2704 # repair of a dead file will fail, of course, but it isn't yet
2705 # clear how this should be reported. Right now it shows up as
2708 #d.addCallback(self.CHECK, "dead", "t=check&repair=true")
2709 #def _got_html_dead(res):
2711 # self.failUnless("Healthy : healthy" in res, res)
2712 # self.failIf("Not Healthy" in res, res)
2713 # self.failUnless("No repair necessary" in res, res)
2714 #d.addCallback(_got_html_dead)
2716 d.addCallback(self.CHECK, "corrupt", "t=check&verify=true&repair=true")
2717 def _got_html_corrupt(res):
2718 self.failUnless("Healthy : Healthy" in res, res)
2719 self.failIf("Not Healthy" in res, res)
2720 self.failUnless("Repair successful" in res, res)
2721 d.addCallback(_got_html_corrupt)
2723 d.addErrback(self.explain_web_error)
2726 def test_repair_json(self):
2727 self.basedir = "web/Grid/repair_json"
2729 c0 = self.g.clients[0]
2732 d = c0.upload(upload.Data(DATA+"1", convergence=""))
2733 def _stash_uri(ur, which):
2734 self.uris[which] = ur.uri
2735 d.addCallback(_stash_uri, "sick")
2737 def _compute_fileurls(ignored):
2739 for which in self.uris:
2740 self.fileurls[which] = "uri/" + urllib.quote(self.uris[which])
2741 d.addCallback(_compute_fileurls)
2743 def _clobber_shares(ignored):
2744 sick_shares = self.find_shares(self.uris["sick"])
2745 os.unlink(sick_shares[0][2])
2746 d.addCallback(_clobber_shares)
2748 d.addCallback(self.CHECK, "sick", "t=check&repair=true&output=json")
2749 def _got_json_sick(res):
2750 r = simplejson.loads(res)
2751 self.failUnlessEqual(r["repair-attempted"], True)
2752 self.failUnlessEqual(r["repair-successful"], True)
2753 self.failUnlessEqual(r["pre-repair-results"]["summary"],
2754 "Not Healthy: 9 shares (enc 3-of-10)")
2755 self.failIf(r["pre-repair-results"]["results"]["healthy"])
2756 self.failUnlessEqual(r["post-repair-results"]["summary"], "healthy")
2757 self.failUnless(r["post-repair-results"]["results"]["healthy"])
2758 d.addCallback(_got_json_sick)
2760 d.addErrback(self.explain_web_error)
2763 def test_deep_check(self):
2764 self.basedir = "web/Grid/deep_check"
2766 c0 = self.g.clients[0]
2770 d = c0.create_empty_dirnode()
2771 def _stash_root_and_create_file(n):
2773 self.fileurls["root"] = "uri/" + urllib.quote(n.get_uri()) + "/"
2774 return n.add_file(u"good", upload.Data(DATA, convergence=""))
2775 d.addCallback(_stash_root_and_create_file)
2776 def _stash_uri(fn, which):
2777 self.uris[which] = fn.get_uri()
2779 d.addCallback(_stash_uri, "good")
2780 d.addCallback(lambda ign:
2781 self.rootnode.add_file(u"small",
2782 upload.Data("literal",
2784 d.addCallback(_stash_uri, "small")
2785 d.addCallback(lambda ign:
2786 self.rootnode.add_file(u"sick",
2787 upload.Data(DATA+"1",
2789 d.addCallback(_stash_uri, "sick")
2791 def _clobber_shares(ignored):
2792 self.delete_shares_numbered(self.uris["sick"], [0,1])
2793 d.addCallback(_clobber_shares)
2800 d.addCallback(self.CHECK, "root", "t=stream-deep-check")
2802 units = [simplejson.loads(line)
2803 for line in res.splitlines()
2805 self.failUnlessEqual(len(units), 4+1)
2806 # should be parent-first
2808 self.failUnlessEqual(u0["path"], [])
2809 self.failUnlessEqual(u0["type"], "directory")
2810 self.failUnlessEqual(u0["cap"], self.rootnode.get_uri())
2811 u0cr = u0["check-results"]
2812 self.failUnlessEqual(u0cr["results"]["count-shares-good"], 10)
2814 ugood = [u for u in units
2815 if u["type"] == "file" and u["path"] == [u"good"]][0]
2816 self.failUnlessEqual(ugood["cap"], self.uris["good"])
2817 ugoodcr = ugood["check-results"]
2818 self.failUnlessEqual(ugoodcr["results"]["count-shares-good"], 10)
2821 self.failUnlessEqual(stats["type"], "stats")
2823 self.failUnlessEqual(s["count-immutable-files"], 2)
2824 self.failUnlessEqual(s["count-literal-files"], 1)
2825 self.failUnlessEqual(s["count-directories"], 1)
2826 d.addCallback(_done)
2828 # now add root/subdir and root/subdir/grandchild, then make subdir
2829 # unrecoverable, then see what happens
2831 d.addCallback(lambda ign:
2832 self.rootnode.create_empty_directory(u"subdir"))
2833 d.addCallback(_stash_uri, "subdir")
2834 d.addCallback(lambda subdir_node:
2835 subdir_node.add_file(u"grandchild",
2836 upload.Data(DATA+"2",
2838 d.addCallback(_stash_uri, "grandchild")
2840 d.addCallback(lambda ign:
2841 self.delete_shares_numbered(self.uris["subdir"],
2848 # root/subdir [unrecoverable]
2849 # root/subdir/grandchild
2851 # how should a streaming-JSON API indicate fatal error?
2852 # answer: emit ERROR: instead of a JSON string
2854 d.addCallback(self.CHECK, "root", "t=stream-manifest")
2855 def _check_broken_manifest(res):
2856 lines = res.splitlines()
2858 for (i,line) in enumerate(lines)
2859 if line.startswith("ERROR:")]
2861 self.fail("no ERROR: in output: %s" % (res,))
2862 first_error = error_lines[0]
2863 error_line = lines[first_error]
2864 error_msg = lines[first_error+1:]
2865 error_msg_s = "\n".join(error_msg) + "\n"
2866 self.failUnlessIn("ERROR: UnrecoverableFileError(no recoverable versions)",
2868 self.failUnless(len(error_msg) > 2, error_msg_s) # some traceback
2869 units = [simplejson.loads(line) for line in lines[:first_error]]
2870 self.failUnlessEqual(len(units), 5) # includes subdir
2871 last_unit = units[-1]
2872 self.failUnlessEqual(last_unit["path"], ["subdir"])
2873 d.addCallback(_check_broken_manifest)
2875 d.addCallback(self.CHECK, "root", "t=stream-deep-check")
2876 def _check_broken_deepcheck(res):
2877 lines = res.splitlines()
2879 for (i,line) in enumerate(lines)
2880 if line.startswith("ERROR:")]
2882 self.fail("no ERROR: in output: %s" % (res,))
2883 first_error = error_lines[0]
2884 error_line = lines[first_error]
2885 error_msg = lines[first_error+1:]
2886 error_msg_s = "\n".join(error_msg) + "\n"
2887 self.failUnlessIn("ERROR: UnrecoverableFileError(no recoverable versions)",
2889 self.failUnless(len(error_msg) > 2, error_msg_s) # some traceback
2890 units = [simplejson.loads(line) for line in lines[:first_error]]
2891 self.failUnlessEqual(len(units), 5) # includes subdir
2892 last_unit = units[-1]
2893 self.failUnlessEqual(last_unit["path"], ["subdir"])
2894 r = last_unit["check-results"]["results"]
2895 self.failUnlessEqual(r["count-recoverable-versions"], 0)
2896 self.failUnlessEqual(r["count-shares-good"], 1)
2897 self.failUnlessEqual(r["recoverable"], False)
2898 d.addCallback(_check_broken_deepcheck)
2900 d.addErrback(self.explain_web_error)
2903 def test_deep_check_and_repair(self):
2904 self.basedir = "web/Grid/deep_check_and_repair"
2906 c0 = self.g.clients[0]
2910 d = c0.create_empty_dirnode()
2911 def _stash_root_and_create_file(n):
2913 self.fileurls["root"] = "uri/" + urllib.quote(n.get_uri()) + "/"
2914 return n.add_file(u"good", upload.Data(DATA, convergence=""))
2915 d.addCallback(_stash_root_and_create_file)
2916 def _stash_uri(fn, which):
2917 self.uris[which] = fn.get_uri()
2918 d.addCallback(_stash_uri, "good")
2919 d.addCallback(lambda ign:
2920 self.rootnode.add_file(u"small",
2921 upload.Data("literal",
2923 d.addCallback(_stash_uri, "small")
2924 d.addCallback(lambda ign:
2925 self.rootnode.add_file(u"sick",
2926 upload.Data(DATA+"1",
2928 d.addCallback(_stash_uri, "sick")
2929 #d.addCallback(lambda ign:
2930 # self.rootnode.add_file(u"dead",
2931 # upload.Data(DATA+"2",
2933 #d.addCallback(_stash_uri, "dead")
2935 #d.addCallback(lambda ign: c0.create_mutable_file("mutable"))
2936 #d.addCallback(lambda fn: self.rootnode.set_node(u"corrupt", fn))
2937 #d.addCallback(_stash_uri, "corrupt")
2939 def _clobber_shares(ignored):
2940 good_shares = self.find_shares(self.uris["good"])
2941 self.failUnlessEqual(len(good_shares), 10)
2942 sick_shares = self.find_shares(self.uris["sick"])
2943 os.unlink(sick_shares[0][2])
2944 #dead_shares = self.find_shares(self.uris["dead"])
2945 #for i in range(1, 10):
2946 # os.unlink(dead_shares[i][2])
2948 #c_shares = self.find_shares(self.uris["corrupt"])
2949 #cso = CorruptShareOptions()
2950 #cso.stdout = StringIO()
2951 #cso.parseOptions([c_shares[0][2]])
2953 d.addCallback(_clobber_shares)
2956 # root/good CHK, 10 shares
2958 # root/sick CHK, 9 shares
2960 d.addCallback(self.CHECK, "root", "t=stream-deep-check&repair=true")
2962 units = [simplejson.loads(line)
2963 for line in res.splitlines()
2965 self.failUnlessEqual(len(units), 4+1)
2966 # should be parent-first
2968 self.failUnlessEqual(u0["path"], [])
2969 self.failUnlessEqual(u0["type"], "directory")
2970 self.failUnlessEqual(u0["cap"], self.rootnode.get_uri())
2971 u0crr = u0["check-and-repair-results"]
2972 self.failUnlessEqual(u0crr["repair-attempted"], False)
2973 self.failUnlessEqual(u0crr["pre-repair-results"]["results"]["count-shares-good"], 10)
2975 ugood = [u for u in units
2976 if u["type"] == "file" and u["path"] == [u"good"]][0]
2977 self.failUnlessEqual(ugood["cap"], self.uris["good"])
2978 ugoodcrr = ugood["check-and-repair-results"]
2979 self.failUnlessEqual(u0crr["repair-attempted"], False)
2980 self.failUnlessEqual(u0crr["pre-repair-results"]["results"]["count-shares-good"], 10)
2982 usick = [u for u in units
2983 if u["type"] == "file" and u["path"] == [u"sick"]][0]
2984 self.failUnlessEqual(usick["cap"], self.uris["sick"])
2985 usickcrr = usick["check-and-repair-results"]
2986 self.failUnlessEqual(usickcrr["repair-attempted"], True)
2987 self.failUnlessEqual(usickcrr["repair-successful"], True)
2988 self.failUnlessEqual(usickcrr["pre-repair-results"]["results"]["count-shares-good"], 9)
2989 self.failUnlessEqual(usickcrr["post-repair-results"]["results"]["count-shares-good"], 10)
2992 self.failUnlessEqual(stats["type"], "stats")
2994 self.failUnlessEqual(s["count-immutable-files"], 2)
2995 self.failUnlessEqual(s["count-literal-files"], 1)
2996 self.failUnlessEqual(s["count-directories"], 1)
2997 d.addCallback(_done)
2999 d.addErrback(self.explain_web_error)
3002 def _count_leases(self, ignored, which):
3003 u = self.uris[which]
3004 shares = self.find_shares(u)
3006 for shnum, serverid, fn in shares:
3007 sf = get_share_file(fn)
3008 num_leases = len(list(sf.get_leases()))
3009 lease_counts.append( (fn, num_leases) )
3012 def _assert_leasecount(self, lease_counts, expected):
3013 for (fn, num_leases) in lease_counts:
3014 if num_leases != expected:
3015 self.fail("expected %d leases, have %d, on %s" %
3016 (expected, num_leases, fn))
3018 def test_add_lease(self):
3019 self.basedir = "web/Grid/add_lease"
3020 self.set_up_grid(num_clients=2)
3021 c0 = self.g.clients[0]
3024 d = c0.upload(upload.Data(DATA, convergence=""))
3025 def _stash_uri(ur, which):
3026 self.uris[which] = ur.uri
3027 d.addCallback(_stash_uri, "one")
3028 d.addCallback(lambda ign:
3029 c0.upload(upload.Data(DATA+"1", convergence="")))
3030 d.addCallback(_stash_uri, "two")
3031 def _stash_mutable_uri(n, which):
3032 self.uris[which] = n.get_uri()
3033 assert isinstance(self.uris[which], str)
3034 d.addCallback(lambda ign: c0.create_mutable_file(DATA+"2"))
3035 d.addCallback(_stash_mutable_uri, "mutable")
3037 def _compute_fileurls(ignored):
3039 for which in self.uris:
3040 self.fileurls[which] = "uri/" + urllib.quote(self.uris[which])
3041 d.addCallback(_compute_fileurls)
3043 d.addCallback(self._count_leases, "one")
3044 d.addCallback(self._assert_leasecount, 1)
3045 d.addCallback(self._count_leases, "two")
3046 d.addCallback(self._assert_leasecount, 1)
3047 d.addCallback(self._count_leases, "mutable")
3048 d.addCallback(self._assert_leasecount, 1)
3050 d.addCallback(self.CHECK, "one", "t=check") # no add-lease
3051 def _got_html_good(res):
3052 self.failUnless("Healthy" in res, res)
3053 self.failIf("Not Healthy" in res, res)
3054 d.addCallback(_got_html_good)
3056 d.addCallback(self._count_leases, "one")
3057 d.addCallback(self._assert_leasecount, 1)
3058 d.addCallback(self._count_leases, "two")
3059 d.addCallback(self._assert_leasecount, 1)
3060 d.addCallback(self._count_leases, "mutable")
3061 d.addCallback(self._assert_leasecount, 1)
3063 # this CHECK uses the original client, which uses the same
3064 # lease-secrets, so it will just renew the original lease
3065 d.addCallback(self.CHECK, "one", "t=check&add-lease=true")
3066 d.addCallback(_got_html_good)
3068 d.addCallback(self._count_leases, "one")
3069 d.addCallback(self._assert_leasecount, 1)
3070 d.addCallback(self._count_leases, "two")
3071 d.addCallback(self._assert_leasecount, 1)
3072 d.addCallback(self._count_leases, "mutable")
3073 d.addCallback(self._assert_leasecount, 1)
3075 # this CHECK uses an alternate client, which adds a second lease
3076 d.addCallback(self.CHECK, "one", "t=check&add-lease=true", clientnum=1)
3077 d.addCallback(_got_html_good)
3079 d.addCallback(self._count_leases, "one")
3080 d.addCallback(self._assert_leasecount, 2)
3081 d.addCallback(self._count_leases, "two")
3082 d.addCallback(self._assert_leasecount, 1)
3083 d.addCallback(self._count_leases, "mutable")
3084 d.addCallback(self._assert_leasecount, 1)
3086 d.addCallback(self.CHECK, "mutable", "t=check&add-lease=true")
3087 d.addCallback(_got_html_good)
3089 d.addCallback(self._count_leases, "one")
3090 d.addCallback(self._assert_leasecount, 2)
3091 d.addCallback(self._count_leases, "two")
3092 d.addCallback(self._assert_leasecount, 1)
3093 d.addCallback(self._count_leases, "mutable")
3094 d.addCallback(self._assert_leasecount, 1)
3096 d.addCallback(self.CHECK, "mutable", "t=check&add-lease=true",
3098 d.addCallback(_got_html_good)
3100 d.addCallback(self._count_leases, "one")
3101 d.addCallback(self._assert_leasecount, 2)
3102 d.addCallback(self._count_leases, "two")
3103 d.addCallback(self._assert_leasecount, 1)
3104 d.addCallback(self._count_leases, "mutable")
3105 d.addCallback(self._assert_leasecount, 2)
3107 d.addErrback(self.explain_web_error)
3110 def test_deep_add_lease(self):
3111 self.basedir = "web/Grid/deep_add_lease"
3112 self.set_up_grid(num_clients=2)
3113 c0 = self.g.clients[0]
3117 d = c0.create_empty_dirnode()
3118 def _stash_root_and_create_file(n):
3120 self.uris["root"] = n.get_uri()
3121 self.fileurls["root"] = "uri/" + urllib.quote(n.get_uri()) + "/"
3122 return n.add_file(u"one", upload.Data(DATA, convergence=""))
3123 d.addCallback(_stash_root_and_create_file)
3124 def _stash_uri(fn, which):
3125 self.uris[which] = fn.get_uri()
3126 d.addCallback(_stash_uri, "one")
3127 d.addCallback(lambda ign:
3128 self.rootnode.add_file(u"small",
3129 upload.Data("literal",
3131 d.addCallback(_stash_uri, "small")
3133 d.addCallback(lambda ign: c0.create_mutable_file("mutable"))
3134 d.addCallback(lambda fn: self.rootnode.set_node(u"mutable", fn))
3135 d.addCallback(_stash_uri, "mutable")
3137 d.addCallback(self.CHECK, "root", "t=stream-deep-check") # no add-lease
3139 units = [simplejson.loads(line)
3140 for line in res.splitlines()
3142 # root, one, small, mutable, stats
3143 self.failUnlessEqual(len(units), 4+1)
3144 d.addCallback(_done)
3146 d.addCallback(self._count_leases, "root")
3147 d.addCallback(self._assert_leasecount, 1)
3148 d.addCallback(self._count_leases, "one")
3149 d.addCallback(self._assert_leasecount, 1)
3150 d.addCallback(self._count_leases, "mutable")
3151 d.addCallback(self._assert_leasecount, 1)
3153 d.addCallback(self.CHECK, "root", "t=stream-deep-check&add-lease=true")
3154 d.addCallback(_done)
3156 d.addCallback(self._count_leases, "root")
3157 d.addCallback(self._assert_leasecount, 1)
3158 d.addCallback(self._count_leases, "one")
3159 d.addCallback(self._assert_leasecount, 1)
3160 d.addCallback(self._count_leases, "mutable")
3161 d.addCallback(self._assert_leasecount, 1)
3163 d.addCallback(self.CHECK, "root", "t=stream-deep-check&add-lease=true",
3165 d.addCallback(_done)
3167 d.addCallback(self._count_leases, "root")
3168 d.addCallback(self._assert_leasecount, 2)
3169 d.addCallback(self._count_leases, "one")
3170 d.addCallback(self._assert_leasecount, 2)
3171 d.addCallback(self._count_leases, "mutable")
3172 d.addCallback(self._assert_leasecount, 2)
3174 d.addErrback(self.explain_web_error)
3178 def test_exceptions(self):
3179 self.basedir = "web/Grid/exceptions"
3180 self.set_up_grid(num_clients=1, num_servers=2)
3181 c0 = self.g.clients[0]
3184 d = c0.create_empty_dirnode()
3186 self.fileurls["root"] = "uri/" + urllib.quote(n.get_uri()) + "/"
3187 self.fileurls["imaginary"] = self.fileurls["root"] + "imaginary"
3189 d.addCallback(_stash_root)
3190 d.addCallback(lambda ign: c0.upload(upload.Data(DATA, convergence="")))
3192 self.fileurls["1share"] = "uri/" + urllib.quote(ur.uri)
3193 self.delete_shares_numbered(ur.uri, range(1,10))
3195 u = uri.from_string(ur.uri)
3196 u.key = testutil.flip_bit(u.key, 0)
3197 baduri = u.to_string()
3198 self.fileurls["0shares"] = "uri/" + urllib.quote(baduri)
3199 d.addCallback(_stash_bad)
3200 d.addCallback(lambda ign: c0.create_empty_dirnode())
3201 def _mangle_dirnode_1share(n):
3203 url = self.fileurls["dir-1share"] = "uri/" + urllib.quote(u) + "/"
3204 self.fileurls["dir-1share-json"] = url + "?t=json"
3205 self.delete_shares_numbered(u, range(1,10))
3206 d.addCallback(_mangle_dirnode_1share)
3207 d.addCallback(lambda ign: c0.create_empty_dirnode())
3208 def _mangle_dirnode_0share(n):
3210 url = self.fileurls["dir-0share"] = "uri/" + urllib.quote(u) + "/"
3211 self.fileurls["dir-0share-json"] = url + "?t=json"
3212 self.delete_shares_numbered(u, range(0,10))
3213 d.addCallback(_mangle_dirnode_0share)
3215 # NotEnoughSharesError should be reported sensibly, with a
3216 # text/plain explanation of the problem, and perhaps some
3217 # information on which shares *could* be found.
3219 d.addCallback(lambda ignored:
3220 self.shouldHTTPError("GET unrecoverable",
3221 410, "Gone", "NotEnoughSharesError",
3222 self.GET, self.fileurls["0shares"]))
3223 def _check_zero_shares(body):
3224 self.failIf("<html>" in body, body)
3225 body = " ".join(body.strip().split())
3226 exp = ("NotEnoughSharesError: no shares could be found. "
3227 "Zero shares usually indicates a corrupt URI, or that "
3228 "no servers were connected, but it might also indicate "
3229 "severe corruption. You should perform a filecheck on "
3230 "this object to learn more.")
3231 self.failUnlessEqual(exp, body)
3232 d.addCallback(_check_zero_shares)
3235 d.addCallback(lambda ignored:
3236 self.shouldHTTPError("GET 1share",
3237 410, "Gone", "NotEnoughSharesError",
3238 self.GET, self.fileurls["1share"]))
3239 def _check_one_share(body):
3240 self.failIf("<html>" in body, body)
3241 body = " ".join(body.strip().split())
3242 exp = ("NotEnoughSharesError: 1 share found, but we need "
3243 "3 to recover the file. This indicates that some "
3244 "servers were unavailable, or that shares have been "
3245 "lost to server departure, hard drive failure, or disk "
3246 "corruption. You should perform a filecheck on "
3247 "this object to learn more.")
3248 self.failUnlessEqual(exp, body)
3249 d.addCallback(_check_one_share)
3251 d.addCallback(lambda ignored:
3252 self.shouldHTTPError("GET imaginary",
3253 404, "Not Found", None,
3254 self.GET, self.fileurls["imaginary"]))
3255 def _missing_child(body):
3256 self.failUnless("No such child: imaginary" in body, body)
3257 d.addCallback(_missing_child)
3259 d.addCallback(lambda ignored: self.GET(self.fileurls["dir-0share"]))
3260 def _check_0shares_dir_html(body):
3261 self.failUnless("<html>" in body, body)
3262 # we should see the regular page, but without the child table or
3264 body = " ".join(body.strip().split())
3265 self.failUnlessIn('href="?t=info">More info on this directory',
3267 exp = ("UnrecoverableFileError: the directory (or mutable file) "
3268 "could not be retrieved, because there were insufficient "
3269 "good shares. This might indicate that no servers were "
3270 "connected, insufficient servers were connected, the URI "
3271 "was corrupt, or that shares have been lost due to server "
3272 "departure, hard drive failure, or disk corruption. You "
3273 "should perform a filecheck on this object to learn more.")
3274 self.failUnlessIn(exp, body)
3275 self.failUnlessIn("No upload forms: directory is unreadable", body)
3276 d.addCallback(_check_0shares_dir_html)
3278 d.addCallback(lambda ignored: self.GET(self.fileurls["dir-1share"]))
3279 def _check_1shares_dir_html(body):
3280 # at some point, we'll split UnrecoverableFileError into 0-shares
3281 # and some-shares like we did for immutable files (since there
3282 # are different sorts of advice to offer in each case). For now,
3283 # they present the same way.
3284 self.failUnless("<html>" in body, body)
3285 body = " ".join(body.strip().split())
3286 self.failUnlessIn('href="?t=info">More info on this directory',
3288 exp = ("UnrecoverableFileError: the directory (or mutable file) "
3289 "could not be retrieved, because there were insufficient "
3290 "good shares. This might indicate that no servers were "
3291 "connected, insufficient servers were connected, the URI "
3292 "was corrupt, or that shares have been lost due to server "
3293 "departure, hard drive failure, or disk corruption. You "
3294 "should perform a filecheck on this object to learn more.")
3295 self.failUnlessIn(exp, body)
3296 self.failUnlessIn("No upload forms: directory is unreadable", body)
3297 d.addCallback(_check_1shares_dir_html)
3299 d.addCallback(lambda ignored:
3300 self.shouldHTTPError("GET dir-0share-json",
3301 410, "Gone", "UnrecoverableFileError",
3303 self.fileurls["dir-0share-json"]))
3304 def _check_unrecoverable_file(body):
3305 self.failIf("<html>" in body, body)
3306 body = " ".join(body.strip().split())
3307 exp = ("UnrecoverableFileError: the directory (or mutable file) "
3308 "could not be retrieved, because there were insufficient "
3309 "good shares. This might indicate that no servers were "
3310 "connected, insufficient servers were connected, the URI "
3311 "was corrupt, or that shares have been lost due to server "
3312 "departure, hard drive failure, or disk corruption. You "
3313 "should perform a filecheck on this object to learn more.")
3314 self.failUnlessEqual(exp, body)
3315 d.addCallback(_check_unrecoverable_file)
3317 d.addCallback(lambda ignored:
3318 self.shouldHTTPError("GET dir-1share-json",
3319 410, "Gone", "UnrecoverableFileError",
3321 self.fileurls["dir-1share-json"]))
3322 d.addCallback(_check_unrecoverable_file)
3324 d.addCallback(lambda ignored:
3325 self.shouldHTTPError("GET imaginary",
3326 404, "Not Found", None,
3327 self.GET, self.fileurls["imaginary"]))
3329 # attach a webapi child that throws a random error, to test how it
3331 w = c0.getServiceNamed("webish")
3332 w.root.putChild("ERRORBOOM", ErrorBoom())
3334 d.addCallback(lambda ignored:
3335 self.shouldHTTPError("GET errorboom_html",
3336 500, "Internal Server Error", None,
3337 self.GET, "ERRORBOOM"))
3338 def _internal_error_html(body):
3339 # test that a weird exception during a webapi operation with
3340 # Accept:*/* results in a text/html stack trace, while one
3341 # without that Accept: line gets us a text/plain stack trace
3342 self.failUnless("<html>" in body, "expected HTML, not '%s'" % body)
3343 d.addCallback(_internal_error_html)
3345 d.addCallback(lambda ignored:
3346 self.shouldHTTPError("GET errorboom_text",
3347 500, "Internal Server Error", None,
3348 self.GET, "ERRORBOOM",
3349 headers={"accept": ["text/plain"]}))
3350 def _internal_error_text(body):
3351 # test that a weird exception during a webapi operation with
3352 # Accept:*/* results in a text/html stack trace, while one
3353 # without that Accept: line gets us a text/plain stack trace
3354 self.failIf("<html>" in body, body)
3355 self.failUnless(body.startswith("Traceback "), body)
3356 d.addCallback(_internal_error_text)
3358 def _flush_errors(res):
3359 # Trial: please ignore the CompletelyUnhandledError in the logs
3360 self.flushLoggedErrors(CompletelyUnhandledError)
3362 d.addBoth(_flush_errors)
3366 class CompletelyUnhandledError(Exception):
3368 class ErrorBoom(rend.Page):
3369 def beforeRender(self, ctx):
3370 raise CompletelyUnhandledError("whoops")