1 import os.path, re, urllib
3 from StringIO import StringIO
4 from twisted.application import service
5 from twisted.trial import unittest
6 from twisted.internet import defer, reactor
7 from twisted.web import client, error, http
8 from twisted.python import failure, log
10 from allmydata import interfaces, uri, webish
11 from allmydata.storage.shares import get_share_file
12 from allmydata.immutable import upload, download
13 from allmydata.web import status, common
14 from allmydata.scripts.debug import CorruptShareOptions, corrupt_share
15 from allmydata.util import fileutil, base32
16 from allmydata.util.assertutil import precondition
17 from allmydata.test.common import FakeDirectoryNode, FakeCHKFileNode, \
18 FakeMutableFileNode, create_chk_filenode, WebErrorMixin, ShouldFailMixin
19 from allmydata.interfaces import IURI, INewDirectoryURI, \
20 IReadonlyNewDirectoryURI, IFileURI, IMutableFileURI, IMutableFileNode
21 from allmydata.mutable import servermap, publish, retrieve
22 import common_util as testutil
23 from allmydata.test.no_network import GridTestMixin
25 from allmydata.test.common_web import HTTPClientGETFactory, \
28 # create a fake uploader/downloader, and a couple of fake dirnodes, then
29 # create a webserver that works against them
31 class FakeIntroducerClient:
32 def get_all_connectors(self):
34 def get_all_connections_for(self, service_name):
36 def get_all_peerids(self):
39 class FakeStatsProvider:
41 stats = {'stats': {}, 'counters': {}}
44 class FakeClient(service.MultiService):
45 nodeid = "fake_nodeid"
46 nickname = "fake_nickname"
47 basedir = "fake_basedir"
48 def get_versions(self):
49 return {'allmydata': "fake",
54 introducer_furl = "None"
55 introducer_client = FakeIntroducerClient()
56 _all_upload_status = [upload.UploadStatus()]
57 _all_download_status = [download.DownloadStatus()]
58 _all_mapupdate_statuses = [servermap.UpdateStatus()]
59 _all_publish_statuses = [publish.PublishStatus()]
60 _all_retrieve_statuses = [retrieve.RetrieveStatus()]
61 convergence = "some random string"
62 stats_provider = FakeStatsProvider()
64 def connected_to_introducer(self):
67 def get_nickname_for_peerid(self, peerid):
70 def get_permuted_peers(self, service_name, key):
73 def create_node_from_uri(self, auri):
74 precondition(isinstance(auri, str), auri)
75 u = uri.from_string(auri)
76 if (INewDirectoryURI.providedBy(u)
77 or IReadonlyNewDirectoryURI.providedBy(u)):
78 return FakeDirectoryNode(self).init_from_uri(u)
79 if IFileURI.providedBy(u):
80 return FakeCHKFileNode(u, self)
81 assert IMutableFileURI.providedBy(u), u
82 return FakeMutableFileNode(self).init_from_uri(u)
84 def create_empty_dirnode(self):
85 n = FakeDirectoryNode(self)
87 d.addCallback(lambda res: n)
90 MUTABLE_SIZELIMIT = FakeMutableFileNode.MUTABLE_SIZELIMIT
91 def create_mutable_file(self, contents=""):
92 n = FakeMutableFileNode(self)
93 return n.create(contents)
95 def upload(self, uploadable):
96 d = uploadable.get_size()
97 d.addCallback(lambda size: uploadable.read(size))
100 n = create_chk_filenode(self, data)
101 results = upload.UploadResults()
102 results.uri = n.get_uri()
104 d.addCallback(_got_data)
107 def list_all_upload_statuses(self):
108 return self._all_upload_status
109 def list_all_download_statuses(self):
110 return self._all_download_status
111 def list_all_mapupdate_statuses(self):
112 return self._all_mapupdate_statuses
113 def list_all_publish_statuses(self):
114 return self._all_publish_statuses
115 def list_all_retrieve_statuses(self):
116 return self._all_retrieve_statuses
117 def list_all_helper_statuses(self):
120 class WebMixin(object):
122 self.s = FakeClient()
123 self.s.startService()
124 self.staticdir = self.mktemp()
125 self.ws = s = webish.WebishServer(self.s, "0", staticdir=self.staticdir)
126 s.setServiceParent(self.s)
127 self.webish_port = port = s.listener._port.getHost().port
128 self.webish_url = "http://localhost:%d" % port
130 l = [ self.s.create_empty_dirnode() for x in range(6) ]
131 d = defer.DeferredList(l)
133 self.public_root = res[0][1]
134 assert interfaces.IDirectoryNode.providedBy(self.public_root), res
135 self.public_url = "/uri/" + self.public_root.get_uri()
136 self.private_root = res[1][1]
140 self._foo_uri = foo.get_uri()
141 self._foo_readonly_uri = foo.get_readonly_uri()
142 self._foo_verifycap = foo.get_verify_cap().to_string()
143 # NOTE: we ignore the deferred on all set_uri() calls, because we
144 # know the fake nodes do these synchronously
145 self.public_root.set_uri(u"foo", foo.get_uri())
147 self.BAR_CONTENTS, n, self._bar_txt_uri = self.makefile(0)
148 foo.set_uri(u"bar.txt", self._bar_txt_uri)
149 self._bar_txt_verifycap = n.get_verify_cap().to_string()
151 foo.set_uri(u"empty", res[3][1].get_uri())
152 sub_uri = res[4][1].get_uri()
153 self._sub_uri = sub_uri
154 foo.set_uri(u"sub", sub_uri)
155 sub = self.s.create_node_from_uri(sub_uri)
157 _ign, n, blocking_uri = self.makefile(1)
158 foo.set_uri(u"blockingfile", blocking_uri)
160 unicode_filename = u"n\u00fc.txt" # n u-umlaut . t x t
161 # ok, unicode calls it LATIN SMALL LETTER U WITH DIAERESIS but I
162 # still think of it as an umlaut
163 foo.set_uri(unicode_filename, self._bar_txt_uri)
165 _ign, n, baz_file = self.makefile(2)
166 self._baz_file_uri = baz_file
167 sub.set_uri(u"baz.txt", baz_file)
169 _ign, n, self._bad_file_uri = self.makefile(3)
170 # this uri should not be downloadable
171 del FakeCHKFileNode.all_contents[self._bad_file_uri]
174 self.public_root.set_uri(u"reedownlee", rodir.get_readonly_uri())
175 rodir.set_uri(u"nor", baz_file)
180 # public/foo/blockingfile
183 # public/foo/sub/baz.txt
185 # public/reedownlee/nor
186 self.NEWFILE_CONTENTS = "newfile contents\n"
188 return foo.get_metadata_for(u"bar.txt")
190 def _got_metadata(metadata):
191 self._bar_txt_metadata = metadata
192 d.addCallback(_got_metadata)
195 def makefile(self, number):
196 contents = "contents of file %s\n" % number
197 n = create_chk_filenode(self.s, contents)
198 return contents, n, n.get_uri()
201 return self.s.stopService()
203 def failUnlessIsBarDotTxt(self, res):
204 self.failUnlessEqual(res, self.BAR_CONTENTS, res)
206 def failUnlessIsBarJSON(self, res):
207 data = simplejson.loads(res)
208 self.failUnless(isinstance(data, list))
209 self.failUnlessEqual(data[0], u"filenode")
210 self.failUnless(isinstance(data[1], dict))
211 self.failIf(data[1]["mutable"])
212 self.failIf("rw_uri" in data[1]) # immutable
213 self.failUnlessEqual(data[1]["ro_uri"], self._bar_txt_uri)
214 self.failUnlessEqual(data[1]["verify_uri"], self._bar_txt_verifycap)
215 self.failUnlessEqual(data[1]["size"], len(self.BAR_CONTENTS))
217 def failUnlessIsFooJSON(self, res):
218 data = simplejson.loads(res)
219 self.failUnless(isinstance(data, list))
220 self.failUnlessEqual(data[0], "dirnode", res)
221 self.failUnless(isinstance(data[1], dict))
222 self.failUnless(data[1]["mutable"])
223 self.failUnless("rw_uri" in data[1]) # mutable
224 self.failUnlessEqual(data[1]["rw_uri"], self._foo_uri)
225 self.failUnlessEqual(data[1]["ro_uri"], self._foo_readonly_uri)
226 self.failUnlessEqual(data[1]["verify_uri"], self._foo_verifycap)
228 kidnames = sorted([unicode(n) for n in data[1]["children"]])
229 self.failUnlessEqual(kidnames,
230 [u"bar.txt", u"blockingfile", u"empty",
231 u"n\u00fc.txt", u"sub"])
232 kids = dict( [(unicode(name),value)
234 in data[1]["children"].iteritems()] )
235 self.failUnlessEqual(kids[u"sub"][0], "dirnode")
236 self.failUnless("metadata" in kids[u"sub"][1])
237 self.failUnless("ctime" in kids[u"sub"][1]["metadata"])
238 self.failUnless("mtime" in kids[u"sub"][1]["metadata"])
239 self.failUnlessEqual(kids[u"bar.txt"][0], "filenode")
240 self.failUnlessEqual(kids[u"bar.txt"][1]["size"], len(self.BAR_CONTENTS))
241 self.failUnlessEqual(kids[u"bar.txt"][1]["ro_uri"], self._bar_txt_uri)
242 self.failUnlessEqual(kids[u"bar.txt"][1]["verify_uri"],
243 self._bar_txt_verifycap)
244 self.failUnlessEqual(kids[u"bar.txt"][1]["metadata"]["ctime"],
245 self._bar_txt_metadata["ctime"])
246 self.failUnlessEqual(kids[u"n\u00fc.txt"][1]["ro_uri"],
249 def GET(self, urlpath, followRedirect=False, return_response=False,
251 # if return_response=True, this fires with (data, statuscode,
252 # respheaders) instead of just data.
253 assert not isinstance(urlpath, unicode)
254 url = self.webish_url + urlpath
255 factory = HTTPClientGETFactory(url, method="GET",
256 followRedirect=followRedirect, **kwargs)
257 reactor.connectTCP("localhost", self.webish_port, factory)
260 return (data, factory.status, factory.response_headers)
262 d.addCallback(_got_data)
263 return factory.deferred
265 def HEAD(self, urlpath, return_response=False, **kwargs):
266 # this requires some surgery, because twisted.web.client doesn't want
267 # to give us back the response headers.
268 factory = HTTPClientHEADFactory(urlpath, method="HEAD", **kwargs)
269 reactor.connectTCP("localhost", self.webish_port, factory)
272 return (data, factory.status, factory.response_headers)
274 d.addCallback(_got_data)
275 return factory.deferred
277 def PUT(self, urlpath, data, **kwargs):
278 url = self.webish_url + urlpath
279 return client.getPage(url, method="PUT", postdata=data, **kwargs)
281 def DELETE(self, urlpath):
282 url = self.webish_url + urlpath
283 return client.getPage(url, method="DELETE")
285 def POST(self, urlpath, followRedirect=False, **fields):
286 url = self.webish_url + urlpath
287 sepbase = "boogabooga"
291 form.append('Content-Disposition: form-data; name="_charset"')
295 for name, value in fields.iteritems():
296 if isinstance(value, tuple):
297 filename, value = value
298 form.append('Content-Disposition: form-data; name="%s"; '
299 'filename="%s"' % (name, filename.encode("utf-8")))
301 form.append('Content-Disposition: form-data; name="%s"' % name)
303 if isinstance(value, unicode):
304 value = value.encode("utf-8")
307 assert isinstance(value, str)
311 body = "\r\n".join(form) + "\r\n"
312 headers = {"content-type": "multipart/form-data; boundary=%s" % sepbase,
314 return client.getPage(url, method="POST", postdata=body,
315 headers=headers, followRedirect=followRedirect)
317 def shouldFail(self, res, expected_failure, which,
318 substring=None, response_substring=None):
319 if isinstance(res, failure.Failure):
320 res.trap(expected_failure)
322 self.failUnless(substring in str(res),
323 "substring '%s' not in '%s'"
324 % (substring, str(res)))
325 if response_substring:
326 self.failUnless(response_substring in res.value.response,
327 "response substring '%s' not in '%s'"
328 % (response_substring, res.value.response))
330 self.fail("%s was supposed to raise %s, not get '%s'" %
331 (which, expected_failure, res))
333 def shouldFail2(self, expected_failure, which, substring,
335 callable, *args, **kwargs):
336 assert substring is None or isinstance(substring, str)
337 assert response_substring is None or isinstance(response_substring, str)
338 d = defer.maybeDeferred(callable, *args, **kwargs)
340 if isinstance(res, failure.Failure):
341 res.trap(expected_failure)
343 self.failUnless(substring in str(res),
344 "%s: substring '%s' not in '%s'"
345 % (which, substring, str(res)))
346 if response_substring:
347 self.failUnless(response_substring in res.value.response,
348 "%s: response substring '%s' not in '%s'"
350 response_substring, res.value.response))
352 self.fail("%s was supposed to raise %s, not get '%s'" %
353 (which, expected_failure, res))
357 def should404(self, res, which):
358 if isinstance(res, failure.Failure):
359 res.trap(error.Error)
360 self.failUnlessEqual(res.value.status, "404")
362 self.fail("%s was supposed to Error(404), not get '%s'" %
366 class Web(WebMixin, WebErrorMixin, testutil.StallMixin, unittest.TestCase):
367 def test_create(self):
370 def test_welcome(self):
373 self.failUnless('Welcome To AllMyData' in res)
374 self.failUnless('Tahoe' in res)
376 self.s.basedir = 'web/test_welcome'
377 fileutil.make_dirs("web/test_welcome")
378 fileutil.make_dirs("web/test_welcome/private")
380 d.addCallback(_check)
383 def test_provisioning(self):
384 d = self.GET("/provisioning/")
386 self.failUnless('Tahoe Provisioning Tool' in res)
387 fields = {'filled': True,
388 "num_users": int(50e3),
389 "files_per_user": 1000,
390 "space_per_user": int(1e9),
391 "sharing_ratio": 1.0,
392 "encoding_parameters": "3-of-10-5",
394 "ownership_mode": "A",
395 "download_rate": 100,
400 return self.POST("/provisioning/", **fields)
402 d.addCallback(_check)
404 self.failUnless('Tahoe Provisioning Tool' in res)
405 self.failUnless("Share space consumed: 167.01TB" in res)
407 fields = {'filled': True,
408 "num_users": int(50e6),
409 "files_per_user": 1000,
410 "space_per_user": int(5e9),
411 "sharing_ratio": 1.0,
412 "encoding_parameters": "25-of-100-50",
413 "num_servers": 30000,
414 "ownership_mode": "E",
415 "drive_failure_model": "U",
417 "download_rate": 1000,
422 return self.POST("/provisioning/", **fields)
423 d.addCallback(_check2)
425 self.failUnless("Share space consumed: huge!" in res)
426 fields = {'filled': True}
427 return self.POST("/provisioning/", **fields)
428 d.addCallback(_check3)
430 self.failUnless("Share space consumed:" in res)
431 d.addCallback(_check4)
434 def test_reliability_tool(self):
436 from allmydata import reliability
437 _hush_pyflakes = reliability
439 raise unittest.SkipTest("reliability tool requires NumPy")
441 d = self.GET("/reliability/")
443 self.failUnless('Tahoe Reliability Tool' in res)
444 fields = {'drive_lifetime': "8Y",
449 "check_period": "1M",
450 "report_period": "3M",
453 return self.POST("/reliability/", **fields)
455 d.addCallback(_check)
457 self.failUnless('Tahoe Reliability Tool' in res)
458 r = r'Probability of loss \(no maintenance\):\s+<span>0.033591'
459 self.failUnless(re.search(r, res), res)
460 d.addCallback(_check2)
463 def test_status(self):
464 dl_num = self.s.list_all_download_statuses()[0].get_counter()
465 ul_num = self.s.list_all_upload_statuses()[0].get_counter()
466 mu_num = self.s.list_all_mapupdate_statuses()[0].get_counter()
467 pub_num = self.s.list_all_publish_statuses()[0].get_counter()
468 ret_num = self.s.list_all_retrieve_statuses()[0].get_counter()
469 d = self.GET("/status", followRedirect=True)
471 self.failUnless('Upload and Download Status' in res, res)
472 self.failUnless('"down-%d"' % dl_num in res, res)
473 self.failUnless('"up-%d"' % ul_num in res, res)
474 self.failUnless('"mapupdate-%d"' % mu_num in res, res)
475 self.failUnless('"publish-%d"' % pub_num in res, res)
476 self.failUnless('"retrieve-%d"' % ret_num in res, res)
477 d.addCallback(_check)
478 d.addCallback(lambda res: self.GET("/status/?t=json"))
479 def _check_json(res):
480 data = simplejson.loads(res)
481 self.failUnless(isinstance(data, dict))
482 active = data["active"]
483 # TODO: test more. We need a way to fake an active operation
485 d.addCallback(_check_json)
487 d.addCallback(lambda res: self.GET("/status/down-%d" % dl_num))
489 self.failUnless("File Download Status" in res, res)
490 d.addCallback(_check_dl)
491 d.addCallback(lambda res: self.GET("/status/up-%d" % ul_num))
493 self.failUnless("File Upload Status" in res, res)
494 d.addCallback(_check_ul)
495 d.addCallback(lambda res: self.GET("/status/mapupdate-%d" % mu_num))
496 def _check_mapupdate(res):
497 self.failUnless("Mutable File Servermap Update Status" in res, res)
498 d.addCallback(_check_mapupdate)
499 d.addCallback(lambda res: self.GET("/status/publish-%d" % pub_num))
500 def _check_publish(res):
501 self.failUnless("Mutable File Publish Status" in res, res)
502 d.addCallback(_check_publish)
503 d.addCallback(lambda res: self.GET("/status/retrieve-%d" % ret_num))
504 def _check_retrieve(res):
505 self.failUnless("Mutable File Retrieve Status" in res, res)
506 d.addCallback(_check_retrieve)
510 def test_status_numbers(self):
511 drrm = status.DownloadResultsRendererMixin()
512 self.failUnlessEqual(drrm.render_time(None, None), "")
513 self.failUnlessEqual(drrm.render_time(None, 2.5), "2.50s")
514 self.failUnlessEqual(drrm.render_time(None, 0.25), "250ms")
515 self.failUnlessEqual(drrm.render_time(None, 0.0021), "2.1ms")
516 self.failUnlessEqual(drrm.render_time(None, 0.000123), "123us")
517 self.failUnlessEqual(drrm.render_rate(None, None), "")
518 self.failUnlessEqual(drrm.render_rate(None, 2500000), "2.50MBps")
519 self.failUnlessEqual(drrm.render_rate(None, 30100), "30.1kBps")
520 self.failUnlessEqual(drrm.render_rate(None, 123), "123Bps")
522 urrm = status.UploadResultsRendererMixin()
523 self.failUnlessEqual(urrm.render_time(None, None), "")
524 self.failUnlessEqual(urrm.render_time(None, 2.5), "2.50s")
525 self.failUnlessEqual(urrm.render_time(None, 0.25), "250ms")
526 self.failUnlessEqual(urrm.render_time(None, 0.0021), "2.1ms")
527 self.failUnlessEqual(urrm.render_time(None, 0.000123), "123us")
528 self.failUnlessEqual(urrm.render_rate(None, None), "")
529 self.failUnlessEqual(urrm.render_rate(None, 2500000), "2.50MBps")
530 self.failUnlessEqual(urrm.render_rate(None, 30100), "30.1kBps")
531 self.failUnlessEqual(urrm.render_rate(None, 123), "123Bps")
533 def test_GET_FILEURL(self):
534 d = self.GET(self.public_url + "/foo/bar.txt")
535 d.addCallback(self.failUnlessIsBarDotTxt)
538 def test_GET_FILEURL_range(self):
539 headers = {"range": "bytes=1-10"}
540 d = self.GET(self.public_url + "/foo/bar.txt", headers=headers,
541 return_response=True)
542 def _got((res, status, headers)):
543 self.failUnlessEqual(int(status), 206)
544 self.failUnless(headers.has_key("content-range"))
545 self.failUnlessEqual(headers["content-range"][0],
546 "bytes 1-10/%d" % len(self.BAR_CONTENTS))
547 self.failUnlessEqual(res, self.BAR_CONTENTS[1:11])
551 def test_GET_FILEURL_partial_range(self):
552 headers = {"range": "bytes=5-"}
553 length = len(self.BAR_CONTENTS)
554 d = self.GET(self.public_url + "/foo/bar.txt", headers=headers,
555 return_response=True)
556 def _got((res, status, headers)):
557 self.failUnlessEqual(int(status), 206)
558 self.failUnless(headers.has_key("content-range"))
559 self.failUnlessEqual(headers["content-range"][0],
560 "bytes 5-%d/%d" % (length-1, length))
561 self.failUnlessEqual(res, self.BAR_CONTENTS[5:])
565 def test_HEAD_FILEURL_range(self):
566 headers = {"range": "bytes=1-10"}
567 d = self.HEAD(self.public_url + "/foo/bar.txt", headers=headers,
568 return_response=True)
569 def _got((res, status, headers)):
570 self.failUnlessEqual(res, "")
571 self.failUnlessEqual(int(status), 206)
572 self.failUnless(headers.has_key("content-range"))
573 self.failUnlessEqual(headers["content-range"][0],
574 "bytes 1-10/%d" % len(self.BAR_CONTENTS))
578 def test_HEAD_FILEURL_partial_range(self):
579 headers = {"range": "bytes=5-"}
580 length = len(self.BAR_CONTENTS)
581 d = self.HEAD(self.public_url + "/foo/bar.txt", headers=headers,
582 return_response=True)
583 def _got((res, status, headers)):
584 self.failUnlessEqual(int(status), 206)
585 self.failUnless(headers.has_key("content-range"))
586 self.failUnlessEqual(headers["content-range"][0],
587 "bytes 5-%d/%d" % (length-1, length))
591 def test_GET_FILEURL_range_bad(self):
592 headers = {"range": "BOGUS=fizbop-quarnak"}
593 d = self.shouldFail2(error.Error, "test_GET_FILEURL_range_bad",
595 "Syntactically invalid http range header",
596 self.GET, self.public_url + "/foo/bar.txt",
600 def test_HEAD_FILEURL(self):
601 d = self.HEAD(self.public_url + "/foo/bar.txt", return_response=True)
602 def _got((res, status, headers)):
603 self.failUnlessEqual(res, "")
604 self.failUnlessEqual(headers["content-length"][0],
605 str(len(self.BAR_CONTENTS)))
606 self.failUnlessEqual(headers["content-type"], ["text/plain"])
610 def test_GET_FILEURL_named(self):
611 base = "/file/%s" % urllib.quote(self._bar_txt_uri)
612 base2 = "/named/%s" % urllib.quote(self._bar_txt_uri)
613 d = self.GET(base + "/@@name=/blah.txt")
614 d.addCallback(self.failUnlessIsBarDotTxt)
615 d.addCallback(lambda res: self.GET(base + "/blah.txt"))
616 d.addCallback(self.failUnlessIsBarDotTxt)
617 d.addCallback(lambda res: self.GET(base + "/ignore/lots/blah.txt"))
618 d.addCallback(self.failUnlessIsBarDotTxt)
619 d.addCallback(lambda res: self.GET(base2 + "/@@name=/blah.txt"))
620 d.addCallback(self.failUnlessIsBarDotTxt)
621 save_url = base + "?save=true&filename=blah.txt"
622 d.addCallback(lambda res: self.GET(save_url))
623 d.addCallback(self.failUnlessIsBarDotTxt) # TODO: check headers
624 u_filename = u"n\u00e9wer.txt" # n e-acute w e r . t x t
625 u_fn_e = urllib.quote(u_filename.encode("utf-8"))
626 u_url = base + "?save=true&filename=" + u_fn_e
627 d.addCallback(lambda res: self.GET(u_url))
628 d.addCallback(self.failUnlessIsBarDotTxt) # TODO: check headers
631 def test_PUT_FILEURL_named_bad(self):
632 base = "/file/%s" % urllib.quote(self._bar_txt_uri)
633 d = self.shouldFail2(error.Error, "test_PUT_FILEURL_named_bad",
635 "/file can only be used with GET or HEAD",
636 self.PUT, base + "/@@name=/blah.txt", "")
639 def test_GET_DIRURL_named_bad(self):
640 base = "/file/%s" % urllib.quote(self._foo_uri)
641 d = self.shouldFail2(error.Error, "test_PUT_DIRURL_named_bad",
644 self.GET, base + "/@@name=/blah.txt")
647 def test_GET_slash_file_bad(self):
648 d = self.shouldFail2(error.Error, "test_GET_slash_file_bad",
650 "/file must be followed by a file-cap and a name",
654 def test_GET_unhandled_URI_named(self):
655 contents, n, newuri = self.makefile(12)
656 verifier_cap = n.get_verify_cap().to_string()
657 base = "/file/%s" % urllib.quote(verifier_cap)
658 # client.create_node_from_uri() can't handle verify-caps
659 d = self.shouldFail2(error.Error, "GET_unhandled_URI_named",
661 "is not a valid file- or directory- cap",
665 def test_GET_unhandled_URI(self):
666 contents, n, newuri = self.makefile(12)
667 verifier_cap = n.get_verify_cap().to_string()
668 base = "/uri/%s" % urllib.quote(verifier_cap)
669 # client.create_node_from_uri() can't handle verify-caps
670 d = self.shouldFail2(error.Error, "test_GET_unhandled_URI",
672 "is not a valid file- or directory- cap",
676 def test_GET_FILE_URI(self):
677 base = "/uri/%s" % urllib.quote(self._bar_txt_uri)
679 d.addCallback(self.failUnlessIsBarDotTxt)
682 def test_GET_FILE_URI_badchild(self):
683 base = "/uri/%s/boguschild" % urllib.quote(self._bar_txt_uri)
684 errmsg = "Files have no children, certainly not named 'boguschild'"
685 d = self.shouldFail2(error.Error, "test_GET_FILE_URI_badchild",
686 "400 Bad Request", errmsg,
690 def test_PUT_FILE_URI_badchild(self):
691 base = "/uri/%s/boguschild" % urllib.quote(self._bar_txt_uri)
692 errmsg = "Cannot create directory 'boguschild', because its parent is a file, not a directory"
693 d = self.shouldFail2(error.Error, "test_GET_FILE_URI_badchild",
694 "400 Bad Request", errmsg,
698 def test_GET_FILEURL_save(self):
699 d = self.GET(self.public_url + "/foo/bar.txt?filename=bar.txt&save=true")
700 # TODO: look at the headers, expect a Content-Disposition: attachment
702 d.addCallback(self.failUnlessIsBarDotTxt)
705 def test_GET_FILEURL_missing(self):
706 d = self.GET(self.public_url + "/foo/missing")
707 d.addBoth(self.should404, "test_GET_FILEURL_missing")
710 def test_PUT_NEWFILEURL(self):
711 d = self.PUT(self.public_url + "/foo/new.txt", self.NEWFILE_CONTENTS)
712 # TODO: we lose the response code, so we can't check this
713 #self.failUnlessEqual(responsecode, 201)
714 d.addCallback(self.failUnlessURIMatchesChild, self._foo_node, u"new.txt")
715 d.addCallback(lambda res:
716 self.failUnlessChildContentsAre(self._foo_node, u"new.txt",
717 self.NEWFILE_CONTENTS))
720 def test_PUT_NEWFILEURL_range_bad(self):
721 headers = {"content-range": "bytes 1-10/%d" % len(self.NEWFILE_CONTENTS)}
722 target = self.public_url + "/foo/new.txt"
723 d = self.shouldFail2(error.Error, "test_PUT_NEWFILEURL_range_bad",
724 "501 Not Implemented",
725 "Content-Range in PUT not yet supported",
726 # (and certainly not for immutable files)
727 self.PUT, target, self.NEWFILE_CONTENTS[1:11],
729 d.addCallback(lambda res:
730 self.failIfNodeHasChild(self._foo_node, u"new.txt"))
733 def test_PUT_NEWFILEURL_mutable(self):
734 d = self.PUT(self.public_url + "/foo/new.txt?mutable=true",
735 self.NEWFILE_CONTENTS)
736 # TODO: we lose the response code, so we can't check this
737 #self.failUnlessEqual(responsecode, 201)
739 u = uri.from_string_mutable_filenode(res)
740 self.failUnless(u.is_mutable())
741 self.failIf(u.is_readonly())
743 d.addCallback(_check_uri)
744 d.addCallback(self.failUnlessURIMatchesChild, self._foo_node, u"new.txt")
745 d.addCallback(lambda res:
746 self.failUnlessMutableChildContentsAre(self._foo_node,
748 self.NEWFILE_CONTENTS))
751 def test_PUT_NEWFILEURL_mutable_toobig(self):
752 d = self.shouldFail2(error.Error, "test_PUT_NEWFILEURL_mutable_toobig",
753 "413 Request Entity Too Large",
754 "SDMF is limited to one segment, and 10001 > 10000",
756 self.public_url + "/foo/new.txt?mutable=true",
757 "b" * (self.s.MUTABLE_SIZELIMIT+1))
760 def test_PUT_NEWFILEURL_replace(self):
761 d = self.PUT(self.public_url + "/foo/bar.txt", self.NEWFILE_CONTENTS)
762 # TODO: we lose the response code, so we can't check this
763 #self.failUnlessEqual(responsecode, 200)
764 d.addCallback(self.failUnlessURIMatchesChild, self._foo_node, u"bar.txt")
765 d.addCallback(lambda res:
766 self.failUnlessChildContentsAre(self._foo_node, u"bar.txt",
767 self.NEWFILE_CONTENTS))
770 def test_PUT_NEWFILEURL_bad_t(self):
771 d = self.shouldFail2(error.Error, "PUT_bad_t", "400 Bad Request",
772 "PUT to a file: bad t=bogus",
773 self.PUT, self.public_url + "/foo/bar.txt?t=bogus",
777 def test_PUT_NEWFILEURL_no_replace(self):
778 d = self.PUT(self.public_url + "/foo/bar.txt?replace=false",
779 self.NEWFILE_CONTENTS)
780 d.addBoth(self.shouldFail, error.Error, "PUT_NEWFILEURL_no_replace",
782 "There was already a child by that name, and you asked me "
786 def test_PUT_NEWFILEURL_mkdirs(self):
787 d = self.PUT(self.public_url + "/foo/newdir/new.txt", self.NEWFILE_CONTENTS)
789 d.addCallback(self.failUnlessURIMatchesChild, fn, u"newdir/new.txt")
790 d.addCallback(lambda res: self.failIfNodeHasChild(fn, u"new.txt"))
791 d.addCallback(lambda res: self.failUnlessNodeHasChild(fn, u"newdir"))
792 d.addCallback(lambda res:
793 self.failUnlessChildContentsAre(fn, u"newdir/new.txt",
794 self.NEWFILE_CONTENTS))
797 def test_PUT_NEWFILEURL_blocked(self):
798 d = self.PUT(self.public_url + "/foo/blockingfile/new.txt",
799 self.NEWFILE_CONTENTS)
800 d.addBoth(self.shouldFail, error.Error, "PUT_NEWFILEURL_blocked",
802 "Unable to create directory 'blockingfile': a file was in the way")
805 def test_DELETE_FILEURL(self):
806 d = self.DELETE(self.public_url + "/foo/bar.txt")
807 d.addCallback(lambda res:
808 self.failIfNodeHasChild(self._foo_node, u"bar.txt"))
811 def test_DELETE_FILEURL_missing(self):
812 d = self.DELETE(self.public_url + "/foo/missing")
813 d.addBoth(self.should404, "test_DELETE_FILEURL_missing")
816 def test_DELETE_FILEURL_missing2(self):
817 d = self.DELETE(self.public_url + "/missing/missing")
818 d.addBoth(self.should404, "test_DELETE_FILEURL_missing2")
821 def test_GET_FILEURL_json(self):
822 # twisted.web.http.parse_qs ignores any query args without an '=', so
823 # I can't do "GET /path?json", I have to do "GET /path/t=json"
824 # instead. This may make it tricky to emulate the S3 interface
826 d = self.GET(self.public_url + "/foo/bar.txt?t=json")
827 d.addCallback(self.failUnlessIsBarJSON)
830 def test_GET_FILEURL_json_missing(self):
831 d = self.GET(self.public_url + "/foo/missing?json")
832 d.addBoth(self.should404, "test_GET_FILEURL_json_missing")
835 def test_GET_FILEURL_uri(self):
836 d = self.GET(self.public_url + "/foo/bar.txt?t=uri")
838 self.failUnlessEqual(res, self._bar_txt_uri)
839 d.addCallback(_check)
840 d.addCallback(lambda res:
841 self.GET(self.public_url + "/foo/bar.txt?t=readonly-uri"))
843 # for now, for files, uris and readonly-uris are the same
844 self.failUnlessEqual(res, self._bar_txt_uri)
845 d.addCallback(_check2)
848 def test_GET_FILEURL_badtype(self):
849 d = self.shouldHTTPError("GET t=bogus", 400, "Bad Request",
852 self.public_url + "/foo/bar.txt?t=bogus")
855 def test_GET_FILEURL_uri_missing(self):
856 d = self.GET(self.public_url + "/foo/missing?t=uri")
857 d.addBoth(self.should404, "test_GET_FILEURL_uri_missing")
860 def test_GET_DIRURL(self):
861 # the addSlash means we get a redirect here
862 # from /uri/$URI/foo/ , we need ../../../ to get back to the root
864 d = self.GET(self.public_url + "/foo", followRedirect=True)
866 self.failUnless(('<a href="%s">Return to Welcome page' % ROOT)
868 # the FILE reference points to a URI, but it should end in bar.txt
869 bar_url = ("%s/file/%s/@@named=/bar.txt" %
870 (ROOT, urllib.quote(self._bar_txt_uri)))
871 get_bar = "".join([r'<td>',
872 r'<a href="%s">bar.txt</a>' % bar_url,
875 r'\s+<td>%d</td>' % len(self.BAR_CONTENTS),
877 self.failUnless(re.search(get_bar, res), res)
878 for line in res.split("\n"):
879 # find the line that contains the delete button for bar.txt
880 if ("form action" in line and
881 'value="delete"' in line and
882 'value="bar.txt"' in line):
883 # the form target should use a relative URL
884 foo_url = urllib.quote("%s/uri/%s/" % (ROOT, self._foo_uri))
885 self.failUnless(('action="%s"' % foo_url) in line, line)
886 # and the when_done= should too
887 #done_url = urllib.quote(???)
888 #self.failUnless(('name="when_done" value="%s"' % done_url)
892 self.fail("unable to find delete-bar.txt line", res)
894 # the DIR reference just points to a URI
895 sub_url = ("%s/uri/%s/" % (ROOT, urllib.quote(self._sub_uri)))
896 get_sub = ((r'<td><a href="%s">sub</a></td>' % sub_url)
897 + r'\s+<td>DIR</td>')
898 self.failUnless(re.search(get_sub, res), res)
899 d.addCallback(_check)
901 # look at a directory which is readonly
902 d.addCallback(lambda res:
903 self.GET(self.public_url + "/reedownlee", followRedirect=True))
905 self.failUnless("(readonly)" in res, res)
906 self.failIf("Upload a file" in res, res)
907 d.addCallback(_check2)
909 # and at a directory that contains a readonly directory
910 d.addCallback(lambda res:
911 self.GET(self.public_url, followRedirect=True))
913 self.failUnless(re.search(r'<td><a href="[\.\/]+/uri/URI%3ADIR2-RO%3A[^"]+">reedownlee</a>'
914 '</td>\s+<td>DIR-RO</td>', res))
915 d.addCallback(_check3)
919 def test_GET_DIRURL_badtype(self):
920 d = self.shouldHTTPError("test_GET_DIRURL_badtype",
924 self.public_url + "/foo?t=bogus")
927 def test_GET_DIRURL_json(self):
928 d = self.GET(self.public_url + "/foo?t=json")
929 d.addCallback(self.failUnlessIsFooJSON)
933 def test_POST_DIRURL_manifest_no_ophandle(self):
934 d = self.shouldFail2(error.Error,
935 "test_POST_DIRURL_manifest_no_ophandle",
937 "slow operation requires ophandle=",
938 self.POST, self.public_url, t="start-manifest")
941 def test_POST_DIRURL_manifest(self):
942 d = defer.succeed(None)
943 def getman(ignored, output):
944 d = self.POST(self.public_url + "/foo/?t=start-manifest&ophandle=125",
946 d.addCallback(self.wait_for_operation, "125")
947 d.addCallback(self.get_operation_results, "125", output)
949 d.addCallback(getman, None)
950 def _got_html(manifest):
951 self.failUnless("Manifest of SI=" in manifest)
952 self.failUnless("<td>sub</td>" in manifest)
953 self.failUnless(self._sub_uri in manifest)
954 self.failUnless("<td>sub/baz.txt</td>" in manifest)
955 d.addCallback(_got_html)
957 # both t=status and unadorned GET should be identical
958 d.addCallback(lambda res: self.GET("/operations/125"))
959 d.addCallback(_got_html)
961 d.addCallback(getman, "html")
962 d.addCallback(_got_html)
963 d.addCallback(getman, "text")
964 def _got_text(manifest):
965 self.failUnless("\nsub " + self._sub_uri + "\n" in manifest)
966 self.failUnless("\nsub/baz.txt URI:CHK:" in manifest)
967 d.addCallback(_got_text)
968 d.addCallback(getman, "JSON")
970 data = res["manifest"]
972 for (path_list, cap) in data:
973 got[tuple(path_list)] = cap
974 self.failUnlessEqual(got[(u"sub",)], self._sub_uri)
975 self.failUnless((u"sub",u"baz.txt") in got)
976 self.failUnless("finished" in res)
977 self.failUnless("origin" in res)
978 self.failUnless("storage-index" in res)
979 self.failUnless("verifycaps" in res)
980 self.failUnless("stats" in res)
981 d.addCallback(_got_json)
984 def test_POST_DIRURL_deepsize_no_ophandle(self):
985 d = self.shouldFail2(error.Error,
986 "test_POST_DIRURL_deepsize_no_ophandle",
988 "slow operation requires ophandle=",
989 self.POST, self.public_url, t="start-deep-size")
992 def test_POST_DIRURL_deepsize(self):
993 d = self.POST(self.public_url + "/foo/?t=start-deep-size&ophandle=126",
995 d.addCallback(self.wait_for_operation, "126")
996 d.addCallback(self.get_operation_results, "126", "json")
998 self.failUnlessEqual(data["finished"], True)
1000 self.failUnless(size > 1000)
1001 d.addCallback(_got_json)
1002 d.addCallback(self.get_operation_results, "126", "text")
1004 mo = re.search(r'^size: (\d+)$', res, re.M)
1005 self.failUnless(mo, res)
1006 size = int(mo.group(1))
1007 # with directories, the size varies.
1008 self.failUnless(size > 1000)
1009 d.addCallback(_got_text)
1012 def test_POST_DIRURL_deepstats_no_ophandle(self):
1013 d = self.shouldFail2(error.Error,
1014 "test_POST_DIRURL_deepstats_no_ophandle",
1016 "slow operation requires ophandle=",
1017 self.POST, self.public_url, t="start-deep-stats")
1020 def test_POST_DIRURL_deepstats(self):
1021 d = self.POST(self.public_url + "/foo/?t=start-deep-stats&ophandle=127",
1022 followRedirect=True)
1023 d.addCallback(self.wait_for_operation, "127")
1024 d.addCallback(self.get_operation_results, "127", "json")
1025 def _got_json(stats):
1026 expected = {"count-immutable-files": 3,
1027 "count-mutable-files": 0,
1028 "count-literal-files": 0,
1030 "count-directories": 3,
1031 "size-immutable-files": 57,
1032 "size-literal-files": 0,
1033 #"size-directories": 1912, # varies
1034 #"largest-directory": 1590,
1035 "largest-directory-children": 5,
1036 "largest-immutable-file": 19,
1038 for k,v in expected.iteritems():
1039 self.failUnlessEqual(stats[k], v,
1040 "stats[%s] was %s, not %s" %
1042 self.failUnlessEqual(stats["size-files-histogram"],
1044 d.addCallback(_got_json)
1047 def test_POST_DIRURL_stream_manifest(self):
1048 d = self.POST(self.public_url + "/foo/?t=stream-manifest")
1050 self.failUnless(res.endswith("\n"))
1051 units = [simplejson.loads(t) for t in res[:-1].split("\n")]
1052 self.failUnlessEqual(len(units), 7)
1053 self.failUnlessEqual(units[-1]["type"], "stats")
1055 self.failUnlessEqual(first["path"], [])
1056 self.failUnlessEqual(first["cap"], self._foo_uri)
1057 self.failUnlessEqual(first["type"], "directory")
1058 baz = [u for u in units[:-1] if u["cap"] == self._baz_file_uri][0]
1059 self.failUnlessEqual(baz["path"], ["sub", "baz.txt"])
1060 self.failIfEqual(baz["storage-index"], None)
1061 self.failIfEqual(baz["verifycap"], None)
1062 self.failIfEqual(baz["repaircap"], None)
1064 d.addCallback(_check)
1067 def test_GET_DIRURL_uri(self):
1068 d = self.GET(self.public_url + "/foo?t=uri")
1070 self.failUnlessEqual(res, self._foo_uri)
1071 d.addCallback(_check)
1074 def test_GET_DIRURL_readonly_uri(self):
1075 d = self.GET(self.public_url + "/foo?t=readonly-uri")
1077 self.failUnlessEqual(res, self._foo_readonly_uri)
1078 d.addCallback(_check)
1081 def test_PUT_NEWDIRURL(self):
1082 d = self.PUT(self.public_url + "/foo/newdir?t=mkdir", "")
1083 d.addCallback(lambda res:
1084 self.failUnlessNodeHasChild(self._foo_node, u"newdir"))
1085 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
1086 d.addCallback(self.failUnlessNodeKeysAre, [])
1089 def test_PUT_NEWDIRURL_exists(self):
1090 d = self.PUT(self.public_url + "/foo/sub?t=mkdir", "")
1091 d.addCallback(lambda res:
1092 self.failUnlessNodeHasChild(self._foo_node, u"sub"))
1093 d.addCallback(lambda res: self._foo_node.get(u"sub"))
1094 d.addCallback(self.failUnlessNodeKeysAre, [u"baz.txt"])
1097 def test_PUT_NEWDIRURL_blocked(self):
1098 d = self.shouldFail2(error.Error, "PUT_NEWDIRURL_blocked",
1099 "409 Conflict", "Unable to create directory 'bar.txt': a file was in the way",
1101 self.public_url + "/foo/bar.txt/sub?t=mkdir", "")
1102 d.addCallback(lambda res:
1103 self.failUnlessNodeHasChild(self._foo_node, u"sub"))
1104 d.addCallback(lambda res: self._foo_node.get(u"sub"))
1105 d.addCallback(self.failUnlessNodeKeysAre, [u"baz.txt"])
1108 def test_PUT_NEWDIRURL_mkdir_p(self):
1109 d = defer.succeed(None)
1110 d.addCallback(lambda res: self.POST(self.public_url + "/foo", t='mkdir', name='mkp'))
1111 d.addCallback(lambda res: self.failUnlessNodeHasChild(self._foo_node, u"mkp"))
1112 d.addCallback(lambda res: self._foo_node.get(u"mkp"))
1113 def mkdir_p(mkpnode):
1114 url = '/uri/%s?t=mkdir-p&path=/sub1/sub2' % urllib.quote(mkpnode.get_uri())
1116 def made_subsub(ssuri):
1117 d = self._foo_node.get_child_at_path(u"mkp/sub1/sub2")
1118 d.addCallback(lambda ssnode: self.failUnlessEqual(ssnode.get_uri(), ssuri))
1120 d.addCallback(lambda uri2: self.failUnlessEqual(uri2, ssuri))
1122 d.addCallback(made_subsub)
1124 d.addCallback(mkdir_p)
1127 def test_PUT_NEWDIRURL_mkdirs(self):
1128 d = self.PUT(self.public_url + "/foo/subdir/newdir?t=mkdir", "")
1129 d.addCallback(lambda res:
1130 self.failIfNodeHasChild(self._foo_node, u"newdir"))
1131 d.addCallback(lambda res:
1132 self.failUnlessNodeHasChild(self._foo_node, u"subdir"))
1133 d.addCallback(lambda res:
1134 self._foo_node.get_child_at_path(u"subdir/newdir"))
1135 d.addCallback(self.failUnlessNodeKeysAre, [])
1138 def test_DELETE_DIRURL(self):
1139 d = self.DELETE(self.public_url + "/foo")
1140 d.addCallback(lambda res:
1141 self.failIfNodeHasChild(self.public_root, u"foo"))
1144 def test_DELETE_DIRURL_missing(self):
1145 d = self.DELETE(self.public_url + "/foo/missing")
1146 d.addBoth(self.should404, "test_DELETE_DIRURL_missing")
1147 d.addCallback(lambda res:
1148 self.failUnlessNodeHasChild(self.public_root, u"foo"))
1151 def test_DELETE_DIRURL_missing2(self):
1152 d = self.DELETE(self.public_url + "/missing")
1153 d.addBoth(self.should404, "test_DELETE_DIRURL_missing2")
1156 def dump_root(self):
1158 w = webish.DirnodeWalkerMixin()
1159 def visitor(childpath, childnode, metadata):
1161 d = w.walk(self.public_root, visitor)
1164 def failUnlessNodeKeysAre(self, node, expected_keys):
1165 for k in expected_keys:
1166 assert isinstance(k, unicode)
1168 def _check(children):
1169 self.failUnlessEqual(sorted(children.keys()), sorted(expected_keys))
1170 d.addCallback(_check)
1172 def failUnlessNodeHasChild(self, node, name):
1173 assert isinstance(name, unicode)
1175 def _check(children):
1176 self.failUnless(name in children)
1177 d.addCallback(_check)
1179 def failIfNodeHasChild(self, node, name):
1180 assert isinstance(name, unicode)
1182 def _check(children):
1183 self.failIf(name in children)
1184 d.addCallback(_check)
1187 def failUnlessChildContentsAre(self, node, name, expected_contents):
1188 assert isinstance(name, unicode)
1189 d = node.get_child_at_path(name)
1190 d.addCallback(lambda node: node.download_to_data())
1191 def _check(contents):
1192 self.failUnlessEqual(contents, expected_contents)
1193 d.addCallback(_check)
1196 def failUnlessMutableChildContentsAre(self, node, name, expected_contents):
1197 assert isinstance(name, unicode)
1198 d = node.get_child_at_path(name)
1199 d.addCallback(lambda node: node.download_best_version())
1200 def _check(contents):
1201 self.failUnlessEqual(contents, expected_contents)
1202 d.addCallback(_check)
1205 def failUnlessChildURIIs(self, node, name, expected_uri):
1206 assert isinstance(name, unicode)
1207 d = node.get_child_at_path(name)
1209 self.failUnlessEqual(child.get_uri(), expected_uri.strip())
1210 d.addCallback(_check)
1213 def failUnlessURIMatchesChild(self, got_uri, node, name):
1214 assert isinstance(name, unicode)
1215 d = node.get_child_at_path(name)
1217 self.failUnlessEqual(got_uri.strip(), child.get_uri())
1218 d.addCallback(_check)
1221 def failUnlessCHKURIHasContents(self, got_uri, contents):
1222 self.failUnless(FakeCHKFileNode.all_contents[got_uri] == contents)
1224 def test_POST_upload(self):
1225 d = self.POST(self.public_url + "/foo", t="upload",
1226 file=("new.txt", self.NEWFILE_CONTENTS))
1228 d.addCallback(self.failUnlessURIMatchesChild, fn, u"new.txt")
1229 d.addCallback(lambda res:
1230 self.failUnlessChildContentsAre(fn, u"new.txt",
1231 self.NEWFILE_CONTENTS))
1234 def test_POST_upload_unicode(self):
1235 filename = u"n\u00e9wer.txt" # n e-acute w e r . t x t
1236 d = self.POST(self.public_url + "/foo", t="upload",
1237 file=(filename, self.NEWFILE_CONTENTS))
1239 d.addCallback(self.failUnlessURIMatchesChild, fn, filename)
1240 d.addCallback(lambda res:
1241 self.failUnlessChildContentsAre(fn, filename,
1242 self.NEWFILE_CONTENTS))
1243 target_url = self.public_url + "/foo/" + filename.encode("utf-8")
1244 d.addCallback(lambda res: self.GET(target_url))
1245 d.addCallback(lambda contents: self.failUnlessEqual(contents,
1246 self.NEWFILE_CONTENTS,
1250 def test_POST_upload_unicode_named(self):
1251 filename = u"n\u00e9wer.txt" # n e-acute w e r . t x t
1252 d = self.POST(self.public_url + "/foo", t="upload",
1254 file=("overridden", self.NEWFILE_CONTENTS))
1256 d.addCallback(self.failUnlessURIMatchesChild, fn, filename)
1257 d.addCallback(lambda res:
1258 self.failUnlessChildContentsAre(fn, filename,
1259 self.NEWFILE_CONTENTS))
1260 target_url = self.public_url + "/foo/" + filename.encode("utf-8")
1261 d.addCallback(lambda res: self.GET(target_url))
1262 d.addCallback(lambda contents: self.failUnlessEqual(contents,
1263 self.NEWFILE_CONTENTS,
1267 def test_POST_upload_no_link(self):
1268 d = self.POST("/uri", t="upload",
1269 file=("new.txt", self.NEWFILE_CONTENTS))
1270 def _check_upload_results(page):
1271 # this should be a page which describes the results of the upload
1272 # that just finished.
1273 self.failUnless("Upload Results:" in page)
1274 self.failUnless("URI:" in page)
1275 uri_re = re.compile("URI: <tt><span>(.*)</span>")
1276 mo = uri_re.search(page)
1277 self.failUnless(mo, page)
1278 new_uri = mo.group(1)
1280 d.addCallback(_check_upload_results)
1281 d.addCallback(self.failUnlessCHKURIHasContents, self.NEWFILE_CONTENTS)
1284 def test_POST_upload_no_link_whendone(self):
1285 d = self.POST("/uri", t="upload", when_done="/",
1286 file=("new.txt", self.NEWFILE_CONTENTS))
1287 d.addBoth(self.shouldRedirect, "/")
1290 def shouldRedirect2(self, which, checker, callable, *args, **kwargs):
1291 d = defer.maybeDeferred(callable, *args, **kwargs)
1293 if isinstance(res, failure.Failure):
1294 res.trap(error.PageRedirect)
1295 statuscode = res.value.status
1296 target = res.value.location
1297 return checker(statuscode, target)
1298 self.fail("%s: callable was supposed to redirect, not return '%s'"
1303 def test_POST_upload_no_link_whendone_results(self):
1304 def check(statuscode, target):
1305 self.failUnlessEqual(statuscode, str(http.FOUND))
1306 self.failUnless(target.startswith(self.webish_url), target)
1307 return client.getPage(target, method="GET")
1308 d = self.shouldRedirect2("test_POST_upload_no_link_whendone_results",
1310 self.POST, "/uri", t="upload",
1311 when_done="/uri/%(uri)s",
1312 file=("new.txt", self.NEWFILE_CONTENTS))
1313 d.addCallback(lambda res:
1314 self.failUnlessEqual(res, self.NEWFILE_CONTENTS))
1317 def test_POST_upload_no_link_mutable(self):
1318 d = self.POST("/uri", t="upload", mutable="true",
1319 file=("new.txt", self.NEWFILE_CONTENTS))
1320 def _check(new_uri):
1321 new_uri = new_uri.strip()
1322 self.new_uri = new_uri
1324 self.failUnless(IMutableFileURI.providedBy(u))
1325 self.failUnless(u.storage_index in FakeMutableFileNode.all_contents)
1326 n = self.s.create_node_from_uri(new_uri)
1327 return n.download_best_version()
1328 d.addCallback(_check)
1330 self.failUnlessEqual(data, self.NEWFILE_CONTENTS)
1331 return self.GET("/uri/%s" % urllib.quote(self.new_uri))
1332 d.addCallback(_check2)
1334 self.failUnlessEqual(data, self.NEWFILE_CONTENTS)
1335 return self.GET("/file/%s" % urllib.quote(self.new_uri))
1336 d.addCallback(_check3)
1338 self.failUnlessEqual(data, self.NEWFILE_CONTENTS)
1339 d.addCallback(_check4)
1342 def test_POST_upload_no_link_mutable_toobig(self):
1343 d = self.shouldFail2(error.Error,
1344 "test_POST_upload_no_link_mutable_toobig",
1345 "413 Request Entity Too Large",
1346 "SDMF is limited to one segment, and 10001 > 10000",
1348 "/uri", t="upload", mutable="true",
1350 "b" * (self.s.MUTABLE_SIZELIMIT+1)) )
1353 def test_POST_upload_mutable(self):
1354 # this creates a mutable file
1355 d = self.POST(self.public_url + "/foo", t="upload", mutable="true",
1356 file=("new.txt", self.NEWFILE_CONTENTS))
1358 d.addCallback(self.failUnlessURIMatchesChild, fn, u"new.txt")
1359 d.addCallback(lambda res:
1360 self.failUnlessMutableChildContentsAre(fn, u"new.txt",
1361 self.NEWFILE_CONTENTS))
1362 d.addCallback(lambda res: self._foo_node.get(u"new.txt"))
1364 self.failUnless(IMutableFileNode.providedBy(newnode))
1365 self.failUnless(newnode.is_mutable())
1366 self.failIf(newnode.is_readonly())
1367 self._mutable_node = newnode
1368 self._mutable_uri = newnode.get_uri()
1371 # now upload it again and make sure that the URI doesn't change
1372 NEWER_CONTENTS = self.NEWFILE_CONTENTS + "newer\n"
1373 d.addCallback(lambda res:
1374 self.POST(self.public_url + "/foo", t="upload",
1376 file=("new.txt", NEWER_CONTENTS)))
1377 d.addCallback(self.failUnlessURIMatchesChild, fn, u"new.txt")
1378 d.addCallback(lambda res:
1379 self.failUnlessMutableChildContentsAre(fn, u"new.txt",
1381 d.addCallback(lambda res: self._foo_node.get(u"new.txt"))
1383 self.failUnless(IMutableFileNode.providedBy(newnode))
1384 self.failUnless(newnode.is_mutable())
1385 self.failIf(newnode.is_readonly())
1386 self.failUnlessEqual(self._mutable_uri, newnode.get_uri())
1387 d.addCallback(_got2)
1389 # upload a second time, using PUT instead of POST
1390 NEW2_CONTENTS = NEWER_CONTENTS + "overwrite with PUT\n"
1391 d.addCallback(lambda res:
1392 self.PUT(self.public_url + "/foo/new.txt", NEW2_CONTENTS))
1393 d.addCallback(self.failUnlessURIMatchesChild, fn, u"new.txt")
1394 d.addCallback(lambda res:
1395 self.failUnlessMutableChildContentsAre(fn, u"new.txt",
1398 # finally list the directory, since mutable files are displayed
1399 # slightly differently
1401 d.addCallback(lambda res:
1402 self.GET(self.public_url + "/foo/",
1403 followRedirect=True))
1404 def _check_page(res):
1405 # TODO: assert more about the contents
1406 self.failUnless("SSK" in res)
1408 d.addCallback(_check_page)
1410 d.addCallback(lambda res: self._foo_node.get(u"new.txt"))
1412 self.failUnless(IMutableFileNode.providedBy(newnode))
1413 self.failUnless(newnode.is_mutable())
1414 self.failIf(newnode.is_readonly())
1415 self.failUnlessEqual(self._mutable_uri, newnode.get_uri())
1416 d.addCallback(_got3)
1418 # look at the JSON form of the enclosing directory
1419 d.addCallback(lambda res:
1420 self.GET(self.public_url + "/foo/?t=json",
1421 followRedirect=True))
1422 def _check_page_json(res):
1423 parsed = simplejson.loads(res)
1424 self.failUnlessEqual(parsed[0], "dirnode")
1425 children = dict( [(unicode(name),value)
1427 in parsed[1]["children"].iteritems()] )
1428 self.failUnless("new.txt" in children)
1429 new_json = children["new.txt"]
1430 self.failUnlessEqual(new_json[0], "filenode")
1431 self.failUnless(new_json[1]["mutable"])
1432 self.failUnlessEqual(new_json[1]["rw_uri"], self._mutable_uri)
1433 ro_uri = unicode(self._mutable_node.get_readonly().to_string())
1434 self.failUnlessEqual(new_json[1]["ro_uri"], ro_uri)
1435 d.addCallback(_check_page_json)
1437 # and the JSON form of the file
1438 d.addCallback(lambda res:
1439 self.GET(self.public_url + "/foo/new.txt?t=json"))
1440 def _check_file_json(res):
1441 parsed = simplejson.loads(res)
1442 self.failUnlessEqual(parsed[0], "filenode")
1443 self.failUnless(parsed[1]["mutable"])
1444 self.failUnlessEqual(parsed[1]["rw_uri"], self._mutable_uri)
1445 ro_uri = unicode(self._mutable_node.get_readonly().to_string())
1446 self.failUnlessEqual(parsed[1]["ro_uri"], ro_uri)
1447 d.addCallback(_check_file_json)
1449 # and look at t=uri and t=readonly-uri
1450 d.addCallback(lambda res:
1451 self.GET(self.public_url + "/foo/new.txt?t=uri"))
1452 d.addCallback(lambda res: self.failUnlessEqual(res, self._mutable_uri))
1453 d.addCallback(lambda res:
1454 self.GET(self.public_url + "/foo/new.txt?t=readonly-uri"))
1455 def _check_ro_uri(res):
1456 ro_uri = unicode(self._mutable_node.get_readonly().to_string())
1457 self.failUnlessEqual(res, ro_uri)
1458 d.addCallback(_check_ro_uri)
1460 # make sure we can get to it from /uri/URI
1461 d.addCallback(lambda res:
1462 self.GET("/uri/%s" % urllib.quote(self._mutable_uri)))
1463 d.addCallback(lambda res:
1464 self.failUnlessEqual(res, NEW2_CONTENTS))
1466 # and that HEAD computes the size correctly
1467 d.addCallback(lambda res:
1468 self.HEAD(self.public_url + "/foo/new.txt",
1469 return_response=True))
1470 def _got_headers((res, status, headers)):
1471 self.failUnlessEqual(res, "")
1472 self.failUnlessEqual(headers["content-length"][0],
1473 str(len(NEW2_CONTENTS)))
1474 self.failUnlessEqual(headers["content-type"], ["text/plain"])
1475 d.addCallback(_got_headers)
1477 # make sure that size errors are displayed correctly for overwrite
1478 d.addCallback(lambda res:
1479 self.shouldFail2(error.Error,
1480 "test_POST_upload_mutable-toobig",
1481 "413 Request Entity Too Large",
1482 "SDMF is limited to one segment, and 10001 > 10000",
1484 self.public_url + "/foo", t="upload",
1487 "b" * (self.s.MUTABLE_SIZELIMIT+1)),
1490 d.addErrback(self.dump_error)
1493 def test_POST_upload_mutable_toobig(self):
1494 d = self.shouldFail2(error.Error,
1495 "test_POST_upload_no_link_mutable_toobig",
1496 "413 Request Entity Too Large",
1497 "SDMF is limited to one segment, and 10001 > 10000",
1499 self.public_url + "/foo",
1500 t="upload", mutable="true",
1502 "b" * (self.s.MUTABLE_SIZELIMIT+1)) )
1505 def dump_error(self, f):
1506 # if the web server returns an error code (like 400 Bad Request),
1507 # web.client.getPage puts the HTTP response body into the .response
1508 # attribute of the exception object that it gives back. It does not
1509 # appear in the Failure's repr(), so the ERROR that trial displays
1510 # will be rather terse and unhelpful. addErrback this method to the
1511 # end of your chain to get more information out of these errors.
1512 if f.check(error.Error):
1513 print "web.error.Error:"
1515 print f.value.response
1518 def test_POST_upload_replace(self):
1519 d = self.POST(self.public_url + "/foo", t="upload",
1520 file=("bar.txt", self.NEWFILE_CONTENTS))
1522 d.addCallback(self.failUnlessURIMatchesChild, fn, u"bar.txt")
1523 d.addCallback(lambda res:
1524 self.failUnlessChildContentsAre(fn, u"bar.txt",
1525 self.NEWFILE_CONTENTS))
1528 def test_POST_upload_no_replace_ok(self):
1529 d = self.POST(self.public_url + "/foo?replace=false", t="upload",
1530 file=("new.txt", self.NEWFILE_CONTENTS))
1531 d.addCallback(lambda res: self.GET(self.public_url + "/foo/new.txt"))
1532 d.addCallback(lambda res: self.failUnlessEqual(res,
1533 self.NEWFILE_CONTENTS))
1536 def test_POST_upload_no_replace_queryarg(self):
1537 d = self.POST(self.public_url + "/foo?replace=false", t="upload",
1538 file=("bar.txt", self.NEWFILE_CONTENTS))
1539 d.addBoth(self.shouldFail, error.Error,
1540 "POST_upload_no_replace_queryarg",
1542 "There was already a child by that name, and you asked me "
1543 "to not replace it")
1544 d.addCallback(lambda res: self.GET(self.public_url + "/foo/bar.txt"))
1545 d.addCallback(self.failUnlessIsBarDotTxt)
1548 def test_POST_upload_no_replace_field(self):
1549 d = self.POST(self.public_url + "/foo", t="upload", replace="false",
1550 file=("bar.txt", self.NEWFILE_CONTENTS))
1551 d.addBoth(self.shouldFail, error.Error, "POST_upload_no_replace_field",
1553 "There was already a child by that name, and you asked me "
1554 "to not replace it")
1555 d.addCallback(lambda res: self.GET(self.public_url + "/foo/bar.txt"))
1556 d.addCallback(self.failUnlessIsBarDotTxt)
1559 def test_POST_upload_whendone(self):
1560 d = self.POST(self.public_url + "/foo", t="upload", when_done="/THERE",
1561 file=("new.txt", self.NEWFILE_CONTENTS))
1562 d.addBoth(self.shouldRedirect, "/THERE")
1564 d.addCallback(lambda res:
1565 self.failUnlessChildContentsAre(fn, u"new.txt",
1566 self.NEWFILE_CONTENTS))
1569 def test_POST_upload_named(self):
1571 d = self.POST(self.public_url + "/foo", t="upload",
1572 name="new.txt", file=self.NEWFILE_CONTENTS)
1573 d.addCallback(self.failUnlessURIMatchesChild, fn, u"new.txt")
1574 d.addCallback(lambda res:
1575 self.failUnlessChildContentsAre(fn, u"new.txt",
1576 self.NEWFILE_CONTENTS))
1579 def test_POST_upload_named_badfilename(self):
1580 d = self.POST(self.public_url + "/foo", t="upload",
1581 name="slashes/are/bad.txt", file=self.NEWFILE_CONTENTS)
1582 d.addBoth(self.shouldFail, error.Error,
1583 "test_POST_upload_named_badfilename",
1585 "name= may not contain a slash",
1587 # make sure that nothing was added
1588 d.addCallback(lambda res:
1589 self.failUnlessNodeKeysAre(self._foo_node,
1590 [u"bar.txt", u"blockingfile",
1591 u"empty", u"n\u00fc.txt",
1595 def test_POST_FILEURL_check(self):
1596 bar_url = self.public_url + "/foo/bar.txt"
1597 d = self.POST(bar_url, t="check")
1599 self.failUnless("Healthy :" in res)
1600 d.addCallback(_check)
1601 redir_url = "http://allmydata.org/TARGET"
1602 def _check2(statuscode, target):
1603 self.failUnlessEqual(statuscode, str(http.FOUND))
1604 self.failUnlessEqual(target, redir_url)
1605 d.addCallback(lambda res:
1606 self.shouldRedirect2("test_POST_FILEURL_check",
1610 when_done=redir_url))
1611 d.addCallback(lambda res:
1612 self.POST(bar_url, t="check", return_to=redir_url))
1614 self.failUnless("Healthy :" in res)
1615 self.failUnless("Return to parent directory" in res)
1616 self.failUnless(redir_url in res)
1617 d.addCallback(_check3)
1619 d.addCallback(lambda res:
1620 self.POST(bar_url, t="check", output="JSON"))
1621 def _check_json(res):
1622 data = simplejson.loads(res)
1623 self.failUnless("storage-index" in data)
1624 self.failUnless(data["results"]["healthy"])
1625 d.addCallback(_check_json)
1629 def test_POST_FILEURL_check_and_repair(self):
1630 bar_url = self.public_url + "/foo/bar.txt"
1631 d = self.POST(bar_url, t="check", repair="true")
1633 self.failUnless("Healthy :" in res)
1634 d.addCallback(_check)
1635 redir_url = "http://allmydata.org/TARGET"
1636 def _check2(statuscode, target):
1637 self.failUnlessEqual(statuscode, str(http.FOUND))
1638 self.failUnlessEqual(target, redir_url)
1639 d.addCallback(lambda res:
1640 self.shouldRedirect2("test_POST_FILEURL_check_and_repair",
1643 t="check", repair="true",
1644 when_done=redir_url))
1645 d.addCallback(lambda res:
1646 self.POST(bar_url, t="check", return_to=redir_url))
1648 self.failUnless("Healthy :" in res)
1649 self.failUnless("Return to parent directory" in res)
1650 self.failUnless(redir_url in res)
1651 d.addCallback(_check3)
1654 def test_POST_DIRURL_check(self):
1655 foo_url = self.public_url + "/foo/"
1656 d = self.POST(foo_url, t="check")
1658 self.failUnless("Healthy :" in res, res)
1659 d.addCallback(_check)
1660 redir_url = "http://allmydata.org/TARGET"
1661 def _check2(statuscode, target):
1662 self.failUnlessEqual(statuscode, str(http.FOUND))
1663 self.failUnlessEqual(target, redir_url)
1664 d.addCallback(lambda res:
1665 self.shouldRedirect2("test_POST_DIRURL_check",
1669 when_done=redir_url))
1670 d.addCallback(lambda res:
1671 self.POST(foo_url, t="check", return_to=redir_url))
1673 self.failUnless("Healthy :" in res, res)
1674 self.failUnless("Return to parent directory" in res)
1675 self.failUnless(redir_url in res)
1676 d.addCallback(_check3)
1678 d.addCallback(lambda res:
1679 self.POST(foo_url, t="check", output="JSON"))
1680 def _check_json(res):
1681 data = simplejson.loads(res)
1682 self.failUnless("storage-index" in data)
1683 self.failUnless(data["results"]["healthy"])
1684 d.addCallback(_check_json)
1688 def test_POST_DIRURL_check_and_repair(self):
1689 foo_url = self.public_url + "/foo/"
1690 d = self.POST(foo_url, t="check", repair="true")
1692 self.failUnless("Healthy :" in res, res)
1693 d.addCallback(_check)
1694 redir_url = "http://allmydata.org/TARGET"
1695 def _check2(statuscode, target):
1696 self.failUnlessEqual(statuscode, str(http.FOUND))
1697 self.failUnlessEqual(target, redir_url)
1698 d.addCallback(lambda res:
1699 self.shouldRedirect2("test_POST_DIRURL_check_and_repair",
1702 t="check", repair="true",
1703 when_done=redir_url))
1704 d.addCallback(lambda res:
1705 self.POST(foo_url, t="check", return_to=redir_url))
1707 self.failUnless("Healthy :" in res)
1708 self.failUnless("Return to parent directory" in res)
1709 self.failUnless(redir_url in res)
1710 d.addCallback(_check3)
1713 def wait_for_operation(self, ignored, ophandle):
1714 url = "/operations/" + ophandle
1715 url += "?t=status&output=JSON"
1718 data = simplejson.loads(res)
1719 if not data["finished"]:
1720 d = self.stall(delay=1.0)
1721 d.addCallback(self.wait_for_operation, ophandle)
1727 def get_operation_results(self, ignored, ophandle, output=None):
1728 url = "/operations/" + ophandle
1731 url += "&output=" + output
1734 if output and output.lower() == "json":
1735 return simplejson.loads(res)
1740 def test_POST_DIRURL_deepcheck_no_ophandle(self):
1741 d = self.shouldFail2(error.Error,
1742 "test_POST_DIRURL_deepcheck_no_ophandle",
1744 "slow operation requires ophandle=",
1745 self.POST, self.public_url, t="start-deep-check")
1748 def test_POST_DIRURL_deepcheck(self):
1749 def _check_redirect(statuscode, target):
1750 self.failUnlessEqual(statuscode, str(http.FOUND))
1751 self.failUnless(target.endswith("/operations/123"))
1752 d = self.shouldRedirect2("test_POST_DIRURL_deepcheck", _check_redirect,
1753 self.POST, self.public_url,
1754 t="start-deep-check", ophandle="123")
1755 d.addCallback(self.wait_for_operation, "123")
1756 def _check_json(data):
1757 self.failUnlessEqual(data["finished"], True)
1758 self.failUnlessEqual(data["count-objects-checked"], 8)
1759 self.failUnlessEqual(data["count-objects-healthy"], 8)
1760 d.addCallback(_check_json)
1761 d.addCallback(self.get_operation_results, "123", "html")
1762 def _check_html(res):
1763 self.failUnless("Objects Checked: <span>8</span>" in res)
1764 self.failUnless("Objects Healthy: <span>8</span>" in res)
1765 d.addCallback(_check_html)
1767 d.addCallback(lambda res:
1768 self.GET("/operations/123/"))
1769 d.addCallback(_check_html) # should be the same as without the slash
1771 d.addCallback(lambda res:
1772 self.shouldFail2(error.Error, "one", "404 Not Found",
1773 "No detailed results for SI bogus",
1774 self.GET, "/operations/123/bogus"))
1776 foo_si = self._foo_node.get_storage_index()
1777 foo_si_s = base32.b2a(foo_si)
1778 d.addCallback(lambda res:
1779 self.GET("/operations/123/%s?output=JSON" % foo_si_s))
1780 def _check_foo_json(res):
1781 data = simplejson.loads(res)
1782 self.failUnlessEqual(data["storage-index"], foo_si_s)
1783 self.failUnless(data["results"]["healthy"])
1784 d.addCallback(_check_foo_json)
1787 def test_POST_DIRURL_deepcheck_and_repair(self):
1788 d = self.POST(self.public_url, t="start-deep-check", repair="true",
1789 ophandle="124", output="json", followRedirect=True)
1790 d.addCallback(self.wait_for_operation, "124")
1791 def _check_json(data):
1792 self.failUnlessEqual(data["finished"], True)
1793 self.failUnlessEqual(data["count-objects-checked"], 8)
1794 self.failUnlessEqual(data["count-objects-healthy-pre-repair"], 8)
1795 self.failUnlessEqual(data["count-objects-unhealthy-pre-repair"], 0)
1796 self.failUnlessEqual(data["count-corrupt-shares-pre-repair"], 0)
1797 self.failUnlessEqual(data["count-repairs-attempted"], 0)
1798 self.failUnlessEqual(data["count-repairs-successful"], 0)
1799 self.failUnlessEqual(data["count-repairs-unsuccessful"], 0)
1800 self.failUnlessEqual(data["count-objects-healthy-post-repair"], 8)
1801 self.failUnlessEqual(data["count-objects-unhealthy-post-repair"], 0)
1802 self.failUnlessEqual(data["count-corrupt-shares-post-repair"], 0)
1803 d.addCallback(_check_json)
1804 d.addCallback(self.get_operation_results, "124", "html")
1805 def _check_html(res):
1806 self.failUnless("Objects Checked: <span>8</span>" in res)
1808 self.failUnless("Objects Healthy (before repair): <span>8</span>" in res)
1809 self.failUnless("Objects Unhealthy (before repair): <span>0</span>" in res)
1810 self.failUnless("Corrupt Shares (before repair): <span>0</span>" in res)
1812 self.failUnless("Repairs Attempted: <span>0</span>" in res)
1813 self.failUnless("Repairs Successful: <span>0</span>" in res)
1814 self.failUnless("Repairs Unsuccessful: <span>0</span>" in res)
1816 self.failUnless("Objects Healthy (after repair): <span>8</span>" in res)
1817 self.failUnless("Objects Unhealthy (after repair): <span>0</span>" in res)
1818 self.failUnless("Corrupt Shares (after repair): <span>0</span>" in res)
1819 d.addCallback(_check_html)
1822 def test_POST_FILEURL_bad_t(self):
1823 d = self.shouldFail2(error.Error, "POST_bad_t", "400 Bad Request",
1824 "POST to file: bad t=bogus",
1825 self.POST, self.public_url + "/foo/bar.txt",
1829 def test_POST_mkdir(self): # return value?
1830 d = self.POST(self.public_url + "/foo", t="mkdir", name="newdir")
1831 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
1832 d.addCallback(self.failUnlessNodeKeysAre, [])
1835 def test_POST_mkdir_2(self):
1836 d = self.POST(self.public_url + "/foo/newdir?t=mkdir", "")
1837 d.addCallback(lambda res:
1838 self.failUnlessNodeHasChild(self._foo_node, u"newdir"))
1839 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
1840 d.addCallback(self.failUnlessNodeKeysAre, [])
1843 def test_POST_mkdirs_2(self):
1844 d = self.POST(self.public_url + "/foo/bardir/newdir?t=mkdir", "")
1845 d.addCallback(lambda res:
1846 self.failUnlessNodeHasChild(self._foo_node, u"bardir"))
1847 d.addCallback(lambda res: self._foo_node.get(u"bardir"))
1848 d.addCallback(lambda bardirnode: bardirnode.get(u"newdir"))
1849 d.addCallback(self.failUnlessNodeKeysAre, [])
1852 def test_POST_mkdir_no_parentdir_noredirect(self):
1853 d = self.POST("/uri?t=mkdir")
1854 def _after_mkdir(res):
1855 uri.NewDirectoryURI.init_from_string(res)
1856 d.addCallback(_after_mkdir)
1859 def test_POST_mkdir_no_parentdir_redirect(self):
1860 d = self.POST("/uri?t=mkdir&redirect_to_result=true")
1861 d.addBoth(self.shouldRedirect, None, statuscode='303')
1862 def _check_target(target):
1863 target = urllib.unquote(target)
1864 self.failUnless(target.startswith("uri/URI:DIR2:"), target)
1865 d.addCallback(_check_target)
1868 def test_POST_noparent_bad(self):
1869 d = self.shouldHTTPError("POST /uri?t=bogus", 400, "Bad Request",
1870 "/uri accepts only PUT, PUT?t=mkdir, "
1871 "POST?t=upload, and POST?t=mkdir",
1872 self.POST, "/uri?t=bogus")
1875 def test_welcome_page_mkdir_button(self):
1876 # Fetch the welcome page.
1878 def _after_get_welcome_page(res):
1879 MKDIR_BUTTON_RE=re.compile('<form action="([^"]*)" method="post".*<input type="hidden" name="t" value="([^"]*)" /><input type="hidden" name="([^"]*)" value="([^"]*)" /><input type="submit" value="Create Directory!" />', re.I)
1880 mo = MKDIR_BUTTON_RE.search(res)
1881 formaction = mo.group(1)
1883 formaname = mo.group(3)
1884 formavalue = mo.group(4)
1885 return (formaction, formt, formaname, formavalue)
1886 d.addCallback(_after_get_welcome_page)
1887 def _after_parse_form(res):
1888 (formaction, formt, formaname, formavalue) = res
1889 return self.POST("/%s?t=%s&%s=%s" % (formaction, formt, formaname, formavalue))
1890 d.addCallback(_after_parse_form)
1891 d.addBoth(self.shouldRedirect, None, statuscode='303')
1894 def test_POST_mkdir_replace(self): # return value?
1895 d = self.POST(self.public_url + "/foo", t="mkdir", name="sub")
1896 d.addCallback(lambda res: self._foo_node.get(u"sub"))
1897 d.addCallback(self.failUnlessNodeKeysAre, [])
1900 def test_POST_mkdir_no_replace_queryarg(self): # return value?
1901 d = self.POST(self.public_url + "/foo?replace=false", t="mkdir", name="sub")
1902 d.addBoth(self.shouldFail, error.Error,
1903 "POST_mkdir_no_replace_queryarg",
1905 "There was already a child by that name, and you asked me "
1906 "to not replace it")
1907 d.addCallback(lambda res: self._foo_node.get(u"sub"))
1908 d.addCallback(self.failUnlessNodeKeysAre, [u"baz.txt"])
1911 def test_POST_mkdir_no_replace_field(self): # return value?
1912 d = self.POST(self.public_url + "/foo", t="mkdir", name="sub",
1914 d.addBoth(self.shouldFail, error.Error, "POST_mkdir_no_replace_field",
1916 "There was already a child by that name, and you asked me "
1917 "to not replace it")
1918 d.addCallback(lambda res: self._foo_node.get(u"sub"))
1919 d.addCallback(self.failUnlessNodeKeysAre, [u"baz.txt"])
1922 def test_POST_mkdir_whendone_field(self):
1923 d = self.POST(self.public_url + "/foo",
1924 t="mkdir", name="newdir", when_done="/THERE")
1925 d.addBoth(self.shouldRedirect, "/THERE")
1926 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
1927 d.addCallback(self.failUnlessNodeKeysAre, [])
1930 def test_POST_mkdir_whendone_queryarg(self):
1931 d = self.POST(self.public_url + "/foo?when_done=/THERE",
1932 t="mkdir", name="newdir")
1933 d.addBoth(self.shouldRedirect, "/THERE")
1934 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
1935 d.addCallback(self.failUnlessNodeKeysAre, [])
1938 def test_POST_bad_t(self):
1939 d = self.shouldFail2(error.Error, "POST_bad_t", "400 Bad Request",
1940 "POST to a directory with bad t=BOGUS",
1941 self.POST, self.public_url + "/foo", t="BOGUS")
1944 def test_POST_set_children(self):
1945 contents9, n9, newuri9 = self.makefile(9)
1946 contents10, n10, newuri10 = self.makefile(10)
1947 contents11, n11, newuri11 = self.makefile(11)
1950 "atomic_added_1": [ "filenode", { "rw_uri": "%s",
1953 "ctime": 1002777696.7564139,
1954 "mtime": 1002777696.7564139
1957 "atomic_added_2": [ "filenode", { "rw_uri": "%s",
1960 "ctime": 1002777696.7564139,
1961 "mtime": 1002777696.7564139
1964 "atomic_added_3": [ "filenode", { "rw_uri": "%s",
1967 "ctime": 1002777696.7564139,
1968 "mtime": 1002777696.7564139
1971 }""" % (newuri9, newuri10, newuri11)
1973 url = self.webish_url + self.public_url + "/foo" + "?t=set_children"
1975 d = client.getPage(url, method="POST", postdata=reqbody)
1977 self.failUnlessURIMatchesChild(newuri9, self._foo_node, u"atomic_added_1")
1978 self.failUnlessURIMatchesChild(newuri10, self._foo_node, u"atomic_added_2")
1979 self.failUnlessURIMatchesChild(newuri11, self._foo_node, u"atomic_added_3")
1981 d.addCallback(_then)
1982 d.addErrback(self.dump_error)
1985 def test_POST_put_uri(self):
1986 contents, n, newuri = self.makefile(8)
1987 d = self.POST(self.public_url + "/foo", t="uri", name="new.txt", uri=newuri)
1988 d.addCallback(self.failUnlessURIMatchesChild, self._foo_node, u"new.txt")
1989 d.addCallback(lambda res:
1990 self.failUnlessChildContentsAre(self._foo_node, u"new.txt",
1994 def test_POST_put_uri_replace(self):
1995 contents, n, newuri = self.makefile(8)
1996 d = self.POST(self.public_url + "/foo", t="uri", name="bar.txt", uri=newuri)
1997 d.addCallback(self.failUnlessURIMatchesChild, self._foo_node, u"bar.txt")
1998 d.addCallback(lambda res:
1999 self.failUnlessChildContentsAre(self._foo_node, u"bar.txt",
2003 def test_POST_put_uri_no_replace_queryarg(self):
2004 contents, n, newuri = self.makefile(8)
2005 d = self.POST(self.public_url + "/foo?replace=false", t="uri",
2006 name="bar.txt", uri=newuri)
2007 d.addBoth(self.shouldFail, error.Error,
2008 "POST_put_uri_no_replace_queryarg",
2010 "There was already a child by that name, and you asked me "
2011 "to not replace it")
2012 d.addCallback(lambda res: self.GET(self.public_url + "/foo/bar.txt"))
2013 d.addCallback(self.failUnlessIsBarDotTxt)
2016 def test_POST_put_uri_no_replace_field(self):
2017 contents, n, newuri = self.makefile(8)
2018 d = self.POST(self.public_url + "/foo", t="uri", replace="false",
2019 name="bar.txt", uri=newuri)
2020 d.addBoth(self.shouldFail, error.Error,
2021 "POST_put_uri_no_replace_field",
2023 "There was already a child by that name, and you asked me "
2024 "to not replace it")
2025 d.addCallback(lambda res: self.GET(self.public_url + "/foo/bar.txt"))
2026 d.addCallback(self.failUnlessIsBarDotTxt)
2029 def test_POST_delete(self):
2030 d = self.POST(self.public_url + "/foo", t="delete", name="bar.txt")
2031 d.addCallback(lambda res: self._foo_node.list())
2032 def _check(children):
2033 self.failIf(u"bar.txt" in children)
2034 d.addCallback(_check)
2037 def test_POST_rename_file(self):
2038 d = self.POST(self.public_url + "/foo", t="rename",
2039 from_name="bar.txt", to_name='wibble.txt')
2040 d.addCallback(lambda res:
2041 self.failIfNodeHasChild(self._foo_node, u"bar.txt"))
2042 d.addCallback(lambda res:
2043 self.failUnlessNodeHasChild(self._foo_node, u"wibble.txt"))
2044 d.addCallback(lambda res: self.GET(self.public_url + "/foo/wibble.txt"))
2045 d.addCallback(self.failUnlessIsBarDotTxt)
2046 d.addCallback(lambda res: self.GET(self.public_url + "/foo/wibble.txt?t=json"))
2047 d.addCallback(self.failUnlessIsBarJSON)
2050 def test_POST_rename_file_redundant(self):
2051 d = self.POST(self.public_url + "/foo", t="rename",
2052 from_name="bar.txt", to_name='bar.txt')
2053 d.addCallback(lambda res:
2054 self.failUnlessNodeHasChild(self._foo_node, u"bar.txt"))
2055 d.addCallback(lambda res: self.GET(self.public_url + "/foo/bar.txt"))
2056 d.addCallback(self.failUnlessIsBarDotTxt)
2057 d.addCallback(lambda res: self.GET(self.public_url + "/foo/bar.txt?t=json"))
2058 d.addCallback(self.failUnlessIsBarJSON)
2061 def test_POST_rename_file_replace(self):
2062 # rename a file and replace a directory with it
2063 d = self.POST(self.public_url + "/foo", t="rename",
2064 from_name="bar.txt", to_name='empty')
2065 d.addCallback(lambda res:
2066 self.failIfNodeHasChild(self._foo_node, u"bar.txt"))
2067 d.addCallback(lambda res:
2068 self.failUnlessNodeHasChild(self._foo_node, u"empty"))
2069 d.addCallback(lambda res: self.GET(self.public_url + "/foo/empty"))
2070 d.addCallback(self.failUnlessIsBarDotTxt)
2071 d.addCallback(lambda res: self.GET(self.public_url + "/foo/empty?t=json"))
2072 d.addCallback(self.failUnlessIsBarJSON)
2075 def test_POST_rename_file_no_replace_queryarg(self):
2076 # rename a file and replace a directory with it
2077 d = self.POST(self.public_url + "/foo?replace=false", t="rename",
2078 from_name="bar.txt", to_name='empty')
2079 d.addBoth(self.shouldFail, error.Error,
2080 "POST_rename_file_no_replace_queryarg",
2082 "There was already a child by that name, and you asked me "
2083 "to not replace it")
2084 d.addCallback(lambda res: self.GET(self.public_url + "/foo/empty?t=json"))
2085 d.addCallback(self.failUnlessIsEmptyJSON)
2088 def test_POST_rename_file_no_replace_field(self):
2089 # rename a file and replace a directory with it
2090 d = self.POST(self.public_url + "/foo", t="rename", replace="false",
2091 from_name="bar.txt", to_name='empty')
2092 d.addBoth(self.shouldFail, error.Error,
2093 "POST_rename_file_no_replace_field",
2095 "There was already a child by that name, and you asked me "
2096 "to not replace it")
2097 d.addCallback(lambda res: self.GET(self.public_url + "/foo/empty?t=json"))
2098 d.addCallback(self.failUnlessIsEmptyJSON)
2101 def failUnlessIsEmptyJSON(self, res):
2102 data = simplejson.loads(res)
2103 self.failUnlessEqual(data[0], "dirnode", data)
2104 self.failUnlessEqual(len(data[1]["children"]), 0)
2106 def test_POST_rename_file_slash_fail(self):
2107 d = self.POST(self.public_url + "/foo", t="rename",
2108 from_name="bar.txt", to_name='kirk/spock.txt')
2109 d.addBoth(self.shouldFail, error.Error,
2110 "test_POST_rename_file_slash_fail",
2112 "to_name= may not contain a slash",
2114 d.addCallback(lambda res:
2115 self.failUnlessNodeHasChild(self._foo_node, u"bar.txt"))
2118 def test_POST_rename_dir(self):
2119 d = self.POST(self.public_url, t="rename",
2120 from_name="foo", to_name='plunk')
2121 d.addCallback(lambda res:
2122 self.failIfNodeHasChild(self.public_root, u"foo"))
2123 d.addCallback(lambda res:
2124 self.failUnlessNodeHasChild(self.public_root, u"plunk"))
2125 d.addCallback(lambda res: self.GET(self.public_url + "/plunk?t=json"))
2126 d.addCallback(self.failUnlessIsFooJSON)
2129 def shouldRedirect(self, res, target=None, statuscode=None, which=""):
2130 """ If target is not None then the redirection has to go to target. If
2131 statuscode is not None then the redirection has to be accomplished with
2132 that HTTP status code."""
2133 if not isinstance(res, failure.Failure):
2134 to_where = (target is None) and "somewhere" or ("to " + target)
2135 self.fail("%s: we were expecting to get redirected %s, not get an"
2136 " actual page: %s" % (which, to_where, res))
2137 res.trap(error.PageRedirect)
2138 if statuscode is not None:
2139 self.failUnlessEqual(res.value.status, statuscode,
2140 "%s: not a redirect" % which)
2141 if target is not None:
2142 # the PageRedirect does not seem to capture the uri= query arg
2143 # properly, so we can't check for it.
2144 realtarget = self.webish_url + target
2145 self.failUnlessEqual(res.value.location, realtarget,
2146 "%s: wrong target" % which)
2147 return res.value.location
2149 def test_GET_URI_form(self):
2150 base = "/uri?uri=%s" % self._bar_txt_uri
2151 # this is supposed to give us a redirect to /uri/$URI, plus arguments
2152 targetbase = "/uri/%s" % urllib.quote(self._bar_txt_uri)
2154 d.addBoth(self.shouldRedirect, targetbase)
2155 d.addCallback(lambda res: self.GET(base+"&filename=bar.txt"))
2156 d.addBoth(self.shouldRedirect, targetbase+"?filename=bar.txt")
2157 d.addCallback(lambda res: self.GET(base+"&t=json"))
2158 d.addBoth(self.shouldRedirect, targetbase+"?t=json")
2159 d.addCallback(self.log, "about to get file by uri")
2160 d.addCallback(lambda res: self.GET(base, followRedirect=True))
2161 d.addCallback(self.failUnlessIsBarDotTxt)
2162 d.addCallback(self.log, "got file by uri, about to get dir by uri")
2163 d.addCallback(lambda res: self.GET("/uri?uri=%s&t=json" % self._foo_uri,
2164 followRedirect=True))
2165 d.addCallback(self.failUnlessIsFooJSON)
2166 d.addCallback(self.log, "got dir by uri")
2170 def test_GET_URI_form_bad(self):
2171 d = self.shouldFail2(error.Error, "test_GET_URI_form_bad",
2172 "400 Bad Request", "GET /uri requires uri=",
2176 def test_GET_rename_form(self):
2177 d = self.GET(self.public_url + "/foo?t=rename-form&name=bar.txt",
2178 followRedirect=True)
2180 self.failUnless('name="when_done" value="."' in res, res)
2181 self.failUnless(re.search(r'name="from_name" value="bar\.txt"', res))
2182 d.addCallback(_check)
2185 def log(self, res, msg):
2186 #print "MSG: %s RES: %s" % (msg, res)
2190 def test_GET_URI_URL(self):
2191 base = "/uri/%s" % self._bar_txt_uri
2193 d.addCallback(self.failUnlessIsBarDotTxt)
2194 d.addCallback(lambda res: self.GET(base+"?filename=bar.txt"))
2195 d.addCallback(self.failUnlessIsBarDotTxt)
2196 d.addCallback(lambda res: self.GET(base+"?filename=bar.txt&save=true"))
2197 d.addCallback(self.failUnlessIsBarDotTxt)
2200 def test_GET_URI_URL_dir(self):
2201 base = "/uri/%s?t=json" % self._foo_uri
2203 d.addCallback(self.failUnlessIsFooJSON)
2206 def test_GET_URI_URL_missing(self):
2207 base = "/uri/%s" % self._bad_file_uri
2208 d = self.shouldHTTPError("test_GET_URI_URL_missing",
2209 http.GONE, None, "NotEnoughSharesError",
2211 # TODO: how can we exercise both sides of WebDownloadTarget.fail
2212 # here? we must arrange for a download to fail after target.open()
2213 # has been called, and then inspect the response to see that it is
2214 # shorter than we expected.
2217 def test_PUT_DIRURL_uri(self):
2218 d = self.s.create_empty_dirnode()
2220 new_uri = dn.get_uri()
2221 # replace /foo with a new (empty) directory
2222 d = self.PUT(self.public_url + "/foo?t=uri", new_uri)
2223 d.addCallback(lambda res:
2224 self.failUnlessEqual(res.strip(), new_uri))
2225 d.addCallback(lambda res:
2226 self.failUnlessChildURIIs(self.public_root,
2230 d.addCallback(_made_dir)
2233 def test_PUT_DIRURL_uri_noreplace(self):
2234 d = self.s.create_empty_dirnode()
2236 new_uri = dn.get_uri()
2237 # replace /foo with a new (empty) directory, but ask that
2238 # replace=false, so it should fail
2239 d = self.shouldFail2(error.Error, "test_PUT_DIRURL_uri_noreplace",
2240 "409 Conflict", "There was already a child by that name, and you asked me to not replace it",
2242 self.public_url + "/foo?t=uri&replace=false",
2244 d.addCallback(lambda res:
2245 self.failUnlessChildURIIs(self.public_root,
2249 d.addCallback(_made_dir)
2252 def test_PUT_DIRURL_bad_t(self):
2253 d = self.shouldFail2(error.Error, "test_PUT_DIRURL_bad_t",
2254 "400 Bad Request", "PUT to a directory",
2255 self.PUT, self.public_url + "/foo?t=BOGUS", "")
2256 d.addCallback(lambda res:
2257 self.failUnlessChildURIIs(self.public_root,
2262 def test_PUT_NEWFILEURL_uri(self):
2263 contents, n, new_uri = self.makefile(8)
2264 d = self.PUT(self.public_url + "/foo/new.txt?t=uri", new_uri)
2265 d.addCallback(lambda res: self.failUnlessEqual(res.strip(), new_uri))
2266 d.addCallback(lambda res:
2267 self.failUnlessChildContentsAre(self._foo_node, u"new.txt",
2271 def test_PUT_NEWFILEURL_uri_replace(self):
2272 contents, n, new_uri = self.makefile(8)
2273 d = self.PUT(self.public_url + "/foo/bar.txt?t=uri", new_uri)
2274 d.addCallback(lambda res: self.failUnlessEqual(res.strip(), new_uri))
2275 d.addCallback(lambda res:
2276 self.failUnlessChildContentsAre(self._foo_node, u"bar.txt",
2280 def test_PUT_NEWFILEURL_uri_no_replace(self):
2281 contents, n, new_uri = self.makefile(8)
2282 d = self.PUT(self.public_url + "/foo/bar.txt?t=uri&replace=false", new_uri)
2283 d.addBoth(self.shouldFail, error.Error, "PUT_NEWFILEURL_uri_no_replace",
2285 "There was already a child by that name, and you asked me "
2286 "to not replace it")
2289 def test_PUT_NEWFILE_URI(self):
2290 file_contents = "New file contents here\n"
2291 d = self.PUT("/uri", file_contents)
2293 assert isinstance(uri, str), uri
2294 self.failUnless(uri in FakeCHKFileNode.all_contents)
2295 self.failUnlessEqual(FakeCHKFileNode.all_contents[uri],
2297 return self.GET("/uri/%s" % uri)
2298 d.addCallback(_check)
2300 self.failUnlessEqual(res, file_contents)
2301 d.addCallback(_check2)
2304 def test_PUT_NEWFILE_URI_only_PUT(self):
2305 d = self.PUT("/uri?t=bogus", "")
2306 d.addBoth(self.shouldFail, error.Error,
2307 "PUT_NEWFILE_URI_only_PUT",
2309 "/uri accepts only PUT, PUT?t=mkdir, POST?t=upload, and POST?t=mkdir")
2312 def test_PUT_NEWFILE_URI_mutable(self):
2313 file_contents = "New file contents here\n"
2314 d = self.PUT("/uri?mutable=true", file_contents)
2315 def _check_mutable(uri):
2318 self.failUnless(IMutableFileURI.providedBy(u))
2319 self.failUnless(u.storage_index in FakeMutableFileNode.all_contents)
2320 n = self.s.create_node_from_uri(uri)
2321 return n.download_best_version()
2322 d.addCallback(_check_mutable)
2323 def _check2_mutable(data):
2324 self.failUnlessEqual(data, file_contents)
2325 d.addCallback(_check2_mutable)
2329 self.failUnless(uri.to_string() in FakeCHKFileNode.all_contents)
2330 self.failUnlessEqual(FakeCHKFileNode.all_contents[uri.to_string()],
2332 return self.GET("/uri/%s" % uri)
2333 d.addCallback(_check)
2335 self.failUnlessEqual(res, file_contents)
2336 d.addCallback(_check2)
2339 def test_PUT_mkdir(self):
2340 d = self.PUT("/uri?t=mkdir", "")
2342 n = self.s.create_node_from_uri(uri.strip())
2343 d2 = self.failUnlessNodeKeysAre(n, [])
2344 d2.addCallback(lambda res:
2345 self.GET("/uri/%s?t=json" % uri))
2347 d.addCallback(_check)
2348 d.addCallback(self.failUnlessIsEmptyJSON)
2351 def test_POST_check(self):
2352 d = self.POST(self.public_url + "/foo", t="check", name="bar.txt")
2354 # this returns a string form of the results, which are probably
2355 # None since we're using fake filenodes.
2356 # TODO: verify that the check actually happened, by changing
2357 # FakeCHKFileNode to count how many times .check() has been
2360 d.addCallback(_done)
2363 def test_bad_method(self):
2364 url = self.webish_url + self.public_url + "/foo/bar.txt"
2365 d = self.shouldHTTPError("test_bad_method",
2366 501, "Not Implemented",
2367 "I don't know how to treat a BOGUS request.",
2368 client.getPage, url, method="BOGUS")
2371 def test_short_url(self):
2372 url = self.webish_url + "/uri"
2373 d = self.shouldHTTPError("test_short_url", 501, "Not Implemented",
2374 "I don't know how to treat a DELETE request.",
2375 client.getPage, url, method="DELETE")
2378 def test_ophandle_bad(self):
2379 url = self.webish_url + "/operations/bogus?t=status"
2380 d = self.shouldHTTPError("test_ophandle_bad", 404, "404 Not Found",
2381 "unknown/expired handle 'bogus'",
2382 client.getPage, url)
2385 def test_ophandle_cancel(self):
2386 d = self.POST(self.public_url + "/foo/?t=start-manifest&ophandle=128",
2387 followRedirect=True)
2388 d.addCallback(lambda ignored:
2389 self.GET("/operations/128?t=status&output=JSON"))
2391 data = simplejson.loads(res)
2392 self.failUnless("finished" in data, res)
2393 monitor = self.ws.root.child_operations.handles["128"][0]
2394 d = self.POST("/operations/128?t=cancel&output=JSON")
2396 data = simplejson.loads(res)
2397 self.failUnless("finished" in data, res)
2398 # t=cancel causes the handle to be forgotten
2399 self.failUnless(monitor.is_cancelled())
2400 d.addCallback(_check2)
2402 d.addCallback(_check1)
2403 d.addCallback(lambda ignored:
2404 self.shouldHTTPError("test_ophandle_cancel",
2405 404, "404 Not Found",
2406 "unknown/expired handle '128'",
2408 "/operations/128?t=status&output=JSON"))
2411 def test_ophandle_retainfor(self):
2412 d = self.POST(self.public_url + "/foo/?t=start-manifest&ophandle=129&retain-for=60",
2413 followRedirect=True)
2414 d.addCallback(lambda ignored:
2415 self.GET("/operations/129?t=status&output=JSON&retain-for=0"))
2417 data = simplejson.loads(res)
2418 self.failUnless("finished" in data, res)
2419 d.addCallback(_check1)
2420 # the retain-for=0 will cause the handle to be expired very soon
2421 d.addCallback(self.stall, 2.0)
2422 d.addCallback(lambda ignored:
2423 self.shouldHTTPError("test_ophandle_retainfor",
2424 404, "404 Not Found",
2425 "unknown/expired handle '129'",
2427 "/operations/129?t=status&output=JSON"))
2430 def test_ophandle_release_after_complete(self):
2431 d = self.POST(self.public_url + "/foo/?t=start-manifest&ophandle=130",
2432 followRedirect=True)
2433 d.addCallback(self.wait_for_operation, "130")
2434 d.addCallback(lambda ignored:
2435 self.GET("/operations/130?t=status&output=JSON&release-after-complete=true"))
2436 # the release-after-complete=true will cause the handle to be expired
2437 d.addCallback(lambda ignored:
2438 self.shouldHTTPError("test_ophandle_release_after_complete",
2439 404, "404 Not Found",
2440 "unknown/expired handle '130'",
2442 "/operations/130?t=status&output=JSON"))
2445 def test_incident(self):
2446 d = self.POST("/report_incident", details="eek")
2448 self.failUnless("Thank you for your report!" in res, res)
2449 d.addCallback(_done)
2452 def test_static(self):
2453 webdir = os.path.join(self.staticdir, "subdir")
2454 fileutil.make_dirs(webdir)
2455 f = open(os.path.join(webdir, "hello.txt"), "wb")
2459 d = self.GET("/static/subdir/hello.txt")
2461 self.failUnlessEqual(res, "hello")
2462 d.addCallback(_check)
2466 class Util(unittest.TestCase):
2467 def test_abbreviate_time(self):
2468 self.failUnlessEqual(common.abbreviate_time(None), "")
2469 self.failUnlessEqual(common.abbreviate_time(1.234), "1.23s")
2470 self.failUnlessEqual(common.abbreviate_time(0.123), "123ms")
2471 self.failUnlessEqual(common.abbreviate_time(0.00123), "1.2ms")
2472 self.failUnlessEqual(common.abbreviate_time(0.000123), "123us")
2474 def test_abbreviate_rate(self):
2475 self.failUnlessEqual(common.abbreviate_rate(None), "")
2476 self.failUnlessEqual(common.abbreviate_rate(1234000), "1.23MBps")
2477 self.failUnlessEqual(common.abbreviate_rate(12340), "12.3kBps")
2478 self.failUnlessEqual(common.abbreviate_rate(123), "123Bps")
2480 def test_abbreviate_size(self):
2481 self.failUnlessEqual(common.abbreviate_size(None), "")
2482 self.failUnlessEqual(common.abbreviate_size(1.23*1000*1000*1000), "1.23GB")
2483 self.failUnlessEqual(common.abbreviate_size(1.23*1000*1000), "1.23MB")
2484 self.failUnlessEqual(common.abbreviate_size(1230), "1.2kB")
2485 self.failUnlessEqual(common.abbreviate_size(123), "123B")
2487 def test_plural(self):
2489 return "%d second%s" % (s, status.plural(s))
2490 self.failUnlessEqual(convert(0), "0 seconds")
2491 self.failUnlessEqual(convert(1), "1 second")
2492 self.failUnlessEqual(convert(2), "2 seconds")
2494 return "has share%s: %s" % (status.plural(s), ",".join(s))
2495 self.failUnlessEqual(convert2([]), "has shares: ")
2496 self.failUnlessEqual(convert2(["1"]), "has share: 1")
2497 self.failUnlessEqual(convert2(["1","2"]), "has shares: 1,2")
2500 class Grid(GridTestMixin, WebErrorMixin, unittest.TestCase, ShouldFailMixin):
2502 def CHECK(self, ign, which, args, clientnum=0):
2503 fileurl = self.fileurls[which]
2504 url = fileurl + "?" + args
2505 return self.GET(url, method="POST", clientnum=clientnum)
2507 def test_filecheck(self):
2508 self.basedir = "web/Grid/filecheck"
2510 c0 = self.g.clients[0]
2513 d = c0.upload(upload.Data(DATA, convergence=""))
2514 def _stash_uri(ur, which):
2515 self.uris[which] = ur.uri
2516 d.addCallback(_stash_uri, "good")
2517 d.addCallback(lambda ign:
2518 c0.upload(upload.Data(DATA+"1", convergence="")))
2519 d.addCallback(_stash_uri, "sick")
2520 d.addCallback(lambda ign:
2521 c0.upload(upload.Data(DATA+"2", convergence="")))
2522 d.addCallback(_stash_uri, "dead")
2523 def _stash_mutable_uri(n, which):
2524 self.uris[which] = n.get_uri()
2525 assert isinstance(self.uris[which], str)
2526 d.addCallback(lambda ign: c0.create_mutable_file(DATA+"3"))
2527 d.addCallback(_stash_mutable_uri, "corrupt")
2528 d.addCallback(lambda ign:
2529 c0.upload(upload.Data("literal", convergence="")))
2530 d.addCallback(_stash_uri, "small")
2532 def _compute_fileurls(ignored):
2534 for which in self.uris:
2535 self.fileurls[which] = "uri/" + urllib.quote(self.uris[which])
2536 d.addCallback(_compute_fileurls)
2538 def _clobber_shares(ignored):
2539 good_shares = self.find_shares(self.uris["good"])
2540 self.failUnlessEqual(len(good_shares), 10)
2541 sick_shares = self.find_shares(self.uris["sick"])
2542 os.unlink(sick_shares[0][2])
2543 dead_shares = self.find_shares(self.uris["dead"])
2544 for i in range(1, 10):
2545 os.unlink(dead_shares[i][2])
2546 c_shares = self.find_shares(self.uris["corrupt"])
2547 cso = CorruptShareOptions()
2548 cso.stdout = StringIO()
2549 cso.parseOptions([c_shares[0][2]])
2551 d.addCallback(_clobber_shares)
2553 d.addCallback(self.CHECK, "good", "t=check")
2554 def _got_html_good(res):
2555 self.failUnless("Healthy" in res, res)
2556 self.failIf("Not Healthy" in res, res)
2557 d.addCallback(_got_html_good)
2558 d.addCallback(self.CHECK, "good", "t=check&return_to=somewhere")
2559 def _got_html_good_return_to(res):
2560 self.failUnless("Healthy" in res, res)
2561 self.failIf("Not Healthy" in res, res)
2562 self.failUnless('<a href="somewhere">Return to parent directory'
2564 d.addCallback(_got_html_good_return_to)
2565 d.addCallback(self.CHECK, "good", "t=check&output=json")
2566 def _got_json_good(res):
2567 r = simplejson.loads(res)
2568 self.failUnlessEqual(r["summary"], "Healthy")
2569 self.failUnless(r["results"]["healthy"])
2570 self.failIf(r["results"]["needs-rebalancing"])
2571 self.failUnless(r["results"]["recoverable"])
2572 d.addCallback(_got_json_good)
2574 d.addCallback(self.CHECK, "small", "t=check")
2575 def _got_html_small(res):
2576 self.failUnless("Literal files are always healthy" in res, res)
2577 self.failIf("Not Healthy" in res, res)
2578 d.addCallback(_got_html_small)
2579 d.addCallback(self.CHECK, "small", "t=check&return_to=somewhere")
2580 def _got_html_small_return_to(res):
2581 self.failUnless("Literal files are always healthy" in res, res)
2582 self.failIf("Not Healthy" in res, res)
2583 self.failUnless('<a href="somewhere">Return to parent directory'
2585 d.addCallback(_got_html_small_return_to)
2586 d.addCallback(self.CHECK, "small", "t=check&output=json")
2587 def _got_json_small(res):
2588 r = simplejson.loads(res)
2589 self.failUnlessEqual(r["storage-index"], "")
2590 self.failUnless(r["results"]["healthy"])
2591 d.addCallback(_got_json_small)
2593 d.addCallback(self.CHECK, "sick", "t=check")
2594 def _got_html_sick(res):
2595 self.failUnless("Not Healthy" in res, res)
2596 d.addCallback(_got_html_sick)
2597 d.addCallback(self.CHECK, "sick", "t=check&output=json")
2598 def _got_json_sick(res):
2599 r = simplejson.loads(res)
2600 self.failUnlessEqual(r["summary"],
2601 "Not Healthy: 9 shares (enc 3-of-10)")
2602 self.failIf(r["results"]["healthy"])
2603 self.failIf(r["results"]["needs-rebalancing"])
2604 self.failUnless(r["results"]["recoverable"])
2605 d.addCallback(_got_json_sick)
2607 d.addCallback(self.CHECK, "dead", "t=check")
2608 def _got_html_dead(res):
2609 self.failUnless("Not Healthy" in res, res)
2610 d.addCallback(_got_html_dead)
2611 d.addCallback(self.CHECK, "dead", "t=check&output=json")
2612 def _got_json_dead(res):
2613 r = simplejson.loads(res)
2614 self.failUnlessEqual(r["summary"],
2615 "Not Healthy: 1 shares (enc 3-of-10)")
2616 self.failIf(r["results"]["healthy"])
2617 self.failIf(r["results"]["needs-rebalancing"])
2618 self.failIf(r["results"]["recoverable"])
2619 d.addCallback(_got_json_dead)
2621 d.addCallback(self.CHECK, "corrupt", "t=check&verify=true")
2622 def _got_html_corrupt(res):
2623 self.failUnless("Not Healthy! : Unhealthy" in res, res)
2624 d.addCallback(_got_html_corrupt)
2625 d.addCallback(self.CHECK, "corrupt", "t=check&verify=true&output=json")
2626 def _got_json_corrupt(res):
2627 r = simplejson.loads(res)
2628 self.failUnless("Unhealthy: 9 shares (enc 3-of-10)" in r["summary"],
2630 self.failIf(r["results"]["healthy"])
2631 self.failUnless(r["results"]["recoverable"])
2632 self.failUnlessEqual(r["results"]["count-shares-good"], 9)
2633 self.failUnlessEqual(r["results"]["count-corrupt-shares"], 1)
2634 d.addCallback(_got_json_corrupt)
2636 d.addErrback(self.explain_web_error)
2639 def test_repair_html(self):
2640 self.basedir = "web/Grid/repair_html"
2642 c0 = self.g.clients[0]
2645 d = c0.upload(upload.Data(DATA, convergence=""))
2646 def _stash_uri(ur, which):
2647 self.uris[which] = ur.uri
2648 d.addCallback(_stash_uri, "good")
2649 d.addCallback(lambda ign:
2650 c0.upload(upload.Data(DATA+"1", convergence="")))
2651 d.addCallback(_stash_uri, "sick")
2652 d.addCallback(lambda ign:
2653 c0.upload(upload.Data(DATA+"2", convergence="")))
2654 d.addCallback(_stash_uri, "dead")
2655 def _stash_mutable_uri(n, which):
2656 self.uris[which] = n.get_uri()
2657 assert isinstance(self.uris[which], str)
2658 d.addCallback(lambda ign: c0.create_mutable_file(DATA+"3"))
2659 d.addCallback(_stash_mutable_uri, "corrupt")
2661 def _compute_fileurls(ignored):
2663 for which in self.uris:
2664 self.fileurls[which] = "uri/" + urllib.quote(self.uris[which])
2665 d.addCallback(_compute_fileurls)
2667 def _clobber_shares(ignored):
2668 good_shares = self.find_shares(self.uris["good"])
2669 self.failUnlessEqual(len(good_shares), 10)
2670 sick_shares = self.find_shares(self.uris["sick"])
2671 os.unlink(sick_shares[0][2])
2672 dead_shares = self.find_shares(self.uris["dead"])
2673 for i in range(1, 10):
2674 os.unlink(dead_shares[i][2])
2675 c_shares = self.find_shares(self.uris["corrupt"])
2676 cso = CorruptShareOptions()
2677 cso.stdout = StringIO()
2678 cso.parseOptions([c_shares[0][2]])
2680 d.addCallback(_clobber_shares)
2682 d.addCallback(self.CHECK, "good", "t=check&repair=true")
2683 def _got_html_good(res):
2684 self.failUnless("Healthy" in res, res)
2685 self.failIf("Not Healthy" in res, res)
2686 self.failUnless("No repair necessary" in res, res)
2687 d.addCallback(_got_html_good)
2689 d.addCallback(self.CHECK, "sick", "t=check&repair=true")
2690 def _got_html_sick(res):
2691 self.failUnless("Healthy : healthy" in res, res)
2692 self.failIf("Not Healthy" in res, res)
2693 self.failUnless("Repair successful" in res, res)
2694 d.addCallback(_got_html_sick)
2696 # repair of a dead file will fail, of course, but it isn't yet
2697 # clear how this should be reported. Right now it shows up as
2700 #d.addCallback(self.CHECK, "dead", "t=check&repair=true")
2701 #def _got_html_dead(res):
2703 # self.failUnless("Healthy : healthy" in res, res)
2704 # self.failIf("Not Healthy" in res, res)
2705 # self.failUnless("No repair necessary" in res, res)
2706 #d.addCallback(_got_html_dead)
2708 d.addCallback(self.CHECK, "corrupt", "t=check&verify=true&repair=true")
2709 def _got_html_corrupt(res):
2710 self.failUnless("Healthy : Healthy" in res, res)
2711 self.failIf("Not Healthy" in res, res)
2712 self.failUnless("Repair successful" in res, res)
2713 d.addCallback(_got_html_corrupt)
2715 d.addErrback(self.explain_web_error)
2718 def test_repair_json(self):
2719 self.basedir = "web/Grid/repair_json"
2721 c0 = self.g.clients[0]
2724 d = c0.upload(upload.Data(DATA+"1", convergence=""))
2725 def _stash_uri(ur, which):
2726 self.uris[which] = ur.uri
2727 d.addCallback(_stash_uri, "sick")
2729 def _compute_fileurls(ignored):
2731 for which in self.uris:
2732 self.fileurls[which] = "uri/" + urllib.quote(self.uris[which])
2733 d.addCallback(_compute_fileurls)
2735 def _clobber_shares(ignored):
2736 sick_shares = self.find_shares(self.uris["sick"])
2737 os.unlink(sick_shares[0][2])
2738 d.addCallback(_clobber_shares)
2740 d.addCallback(self.CHECK, "sick", "t=check&repair=true&output=json")
2741 def _got_json_sick(res):
2742 r = simplejson.loads(res)
2743 self.failUnlessEqual(r["repair-attempted"], True)
2744 self.failUnlessEqual(r["repair-successful"], True)
2745 self.failUnlessEqual(r["pre-repair-results"]["summary"],
2746 "Not Healthy: 9 shares (enc 3-of-10)")
2747 self.failIf(r["pre-repair-results"]["results"]["healthy"])
2748 self.failUnlessEqual(r["post-repair-results"]["summary"], "healthy")
2749 self.failUnless(r["post-repair-results"]["results"]["healthy"])
2750 d.addCallback(_got_json_sick)
2752 d.addErrback(self.explain_web_error)
2755 def test_deep_check(self):
2756 self.basedir = "web/Grid/deep_check"
2758 c0 = self.g.clients[0]
2762 d = c0.create_empty_dirnode()
2763 def _stash_root_and_create_file(n):
2765 self.fileurls["root"] = "uri/" + urllib.quote(n.get_uri()) + "/"
2766 return n.add_file(u"good", upload.Data(DATA, convergence=""))
2767 d.addCallback(_stash_root_and_create_file)
2768 def _stash_uri(fn, which):
2769 self.uris[which] = fn.get_uri()
2771 d.addCallback(_stash_uri, "good")
2772 d.addCallback(lambda ign:
2773 self.rootnode.add_file(u"small",
2774 upload.Data("literal",
2776 d.addCallback(_stash_uri, "small")
2777 d.addCallback(lambda ign:
2778 self.rootnode.add_file(u"sick",
2779 upload.Data(DATA+"1",
2781 d.addCallback(_stash_uri, "sick")
2783 def _clobber_shares(ignored):
2784 self.delete_shares_numbered(self.uris["sick"], [0,1])
2785 d.addCallback(_clobber_shares)
2792 d.addCallback(self.CHECK, "root", "t=stream-deep-check")
2794 units = [simplejson.loads(line)
2795 for line in res.splitlines()
2797 self.failUnlessEqual(len(units), 4+1)
2798 # should be parent-first
2800 self.failUnlessEqual(u0["path"], [])
2801 self.failUnlessEqual(u0["type"], "directory")
2802 self.failUnlessEqual(u0["cap"], self.rootnode.get_uri())
2803 u0cr = u0["check-results"]
2804 self.failUnlessEqual(u0cr["results"]["count-shares-good"], 10)
2806 ugood = [u for u in units
2807 if u["type"] == "file" and u["path"] == [u"good"]][0]
2808 self.failUnlessEqual(ugood["cap"], self.uris["good"])
2809 ugoodcr = ugood["check-results"]
2810 self.failUnlessEqual(ugoodcr["results"]["count-shares-good"], 10)
2813 self.failUnlessEqual(stats["type"], "stats")
2815 self.failUnlessEqual(s["count-immutable-files"], 2)
2816 self.failUnlessEqual(s["count-literal-files"], 1)
2817 self.failUnlessEqual(s["count-directories"], 1)
2818 d.addCallback(_done)
2820 # now add root/subdir and root/subdir/grandchild, then make subdir
2821 # unrecoverable, then see what happens
2823 d.addCallback(lambda ign:
2824 self.rootnode.create_empty_directory(u"subdir"))
2825 d.addCallback(_stash_uri, "subdir")
2826 d.addCallback(lambda subdir_node:
2827 subdir_node.add_file(u"grandchild",
2828 upload.Data(DATA+"2",
2830 d.addCallback(_stash_uri, "grandchild")
2832 d.addCallback(lambda ign:
2833 self.delete_shares_numbered(self.uris["subdir"],
2840 # root/subdir [unrecoverable]
2841 # root/subdir/grandchild
2843 # how should a streaming-JSON API indicate fatal error?
2844 # answer: emit ERROR: instead of a JSON string
2846 d.addCallback(self.CHECK, "root", "t=stream-manifest")
2847 def _check_broken_manifest(res):
2848 lines = res.splitlines()
2850 for (i,line) in enumerate(lines)
2851 if line.startswith("ERROR:")]
2853 self.fail("no ERROR: in output: %s" % (res,))
2854 first_error = error_lines[0]
2855 error_line = lines[first_error]
2856 error_msg = lines[first_error+1:]
2857 error_msg_s = "\n".join(error_msg) + "\n"
2858 self.failUnlessIn("ERROR: UnrecoverableFileError(no recoverable versions)",
2860 self.failUnless(len(error_msg) > 2, error_msg_s) # some traceback
2861 units = [simplejson.loads(line) for line in lines[:first_error]]
2862 self.failUnlessEqual(len(units), 5) # includes subdir
2863 last_unit = units[-1]
2864 self.failUnlessEqual(last_unit["path"], ["subdir"])
2865 d.addCallback(_check_broken_manifest)
2867 d.addCallback(self.CHECK, "root", "t=stream-deep-check")
2868 def _check_broken_deepcheck(res):
2869 lines = res.splitlines()
2871 for (i,line) in enumerate(lines)
2872 if line.startswith("ERROR:")]
2874 self.fail("no ERROR: in output: %s" % (res,))
2875 first_error = error_lines[0]
2876 error_line = lines[first_error]
2877 error_msg = lines[first_error+1:]
2878 error_msg_s = "\n".join(error_msg) + "\n"
2879 self.failUnlessIn("ERROR: UnrecoverableFileError(no recoverable versions)",
2881 self.failUnless(len(error_msg) > 2, error_msg_s) # some traceback
2882 units = [simplejson.loads(line) for line in lines[:first_error]]
2883 self.failUnlessEqual(len(units), 5) # includes subdir
2884 last_unit = units[-1]
2885 self.failUnlessEqual(last_unit["path"], ["subdir"])
2886 r = last_unit["check-results"]["results"]
2887 self.failUnlessEqual(r["count-recoverable-versions"], 0)
2888 self.failUnlessEqual(r["count-shares-good"], 1)
2889 self.failUnlessEqual(r["recoverable"], False)
2890 d.addCallback(_check_broken_deepcheck)
2892 d.addErrback(self.explain_web_error)
2895 def test_deep_check_and_repair(self):
2896 self.basedir = "web/Grid/deep_check_and_repair"
2898 c0 = self.g.clients[0]
2902 d = c0.create_empty_dirnode()
2903 def _stash_root_and_create_file(n):
2905 self.fileurls["root"] = "uri/" + urllib.quote(n.get_uri()) + "/"
2906 return n.add_file(u"good", upload.Data(DATA, convergence=""))
2907 d.addCallback(_stash_root_and_create_file)
2908 def _stash_uri(fn, which):
2909 self.uris[which] = fn.get_uri()
2910 d.addCallback(_stash_uri, "good")
2911 d.addCallback(lambda ign:
2912 self.rootnode.add_file(u"small",
2913 upload.Data("literal",
2915 d.addCallback(_stash_uri, "small")
2916 d.addCallback(lambda ign:
2917 self.rootnode.add_file(u"sick",
2918 upload.Data(DATA+"1",
2920 d.addCallback(_stash_uri, "sick")
2921 #d.addCallback(lambda ign:
2922 # self.rootnode.add_file(u"dead",
2923 # upload.Data(DATA+"2",
2925 #d.addCallback(_stash_uri, "dead")
2927 #d.addCallback(lambda ign: c0.create_mutable_file("mutable"))
2928 #d.addCallback(lambda fn: self.rootnode.set_node(u"corrupt", fn))
2929 #d.addCallback(_stash_uri, "corrupt")
2931 def _clobber_shares(ignored):
2932 good_shares = self.find_shares(self.uris["good"])
2933 self.failUnlessEqual(len(good_shares), 10)
2934 sick_shares = self.find_shares(self.uris["sick"])
2935 os.unlink(sick_shares[0][2])
2936 #dead_shares = self.find_shares(self.uris["dead"])
2937 #for i in range(1, 10):
2938 # os.unlink(dead_shares[i][2])
2940 #c_shares = self.find_shares(self.uris["corrupt"])
2941 #cso = CorruptShareOptions()
2942 #cso.stdout = StringIO()
2943 #cso.parseOptions([c_shares[0][2]])
2945 d.addCallback(_clobber_shares)
2948 # root/good CHK, 10 shares
2950 # root/sick CHK, 9 shares
2952 d.addCallback(self.CHECK, "root", "t=stream-deep-check&repair=true")
2954 units = [simplejson.loads(line)
2955 for line in res.splitlines()
2957 self.failUnlessEqual(len(units), 4+1)
2958 # should be parent-first
2960 self.failUnlessEqual(u0["path"], [])
2961 self.failUnlessEqual(u0["type"], "directory")
2962 self.failUnlessEqual(u0["cap"], self.rootnode.get_uri())
2963 u0crr = u0["check-and-repair-results"]
2964 self.failUnlessEqual(u0crr["repair-attempted"], False)
2965 self.failUnlessEqual(u0crr["pre-repair-results"]["results"]["count-shares-good"], 10)
2967 ugood = [u for u in units
2968 if u["type"] == "file" and u["path"] == [u"good"]][0]
2969 self.failUnlessEqual(ugood["cap"], self.uris["good"])
2970 ugoodcrr = ugood["check-and-repair-results"]
2971 self.failUnlessEqual(u0crr["repair-attempted"], False)
2972 self.failUnlessEqual(u0crr["pre-repair-results"]["results"]["count-shares-good"], 10)
2974 usick = [u for u in units
2975 if u["type"] == "file" and u["path"] == [u"sick"]][0]
2976 self.failUnlessEqual(usick["cap"], self.uris["sick"])
2977 usickcrr = usick["check-and-repair-results"]
2978 self.failUnlessEqual(usickcrr["repair-attempted"], True)
2979 self.failUnlessEqual(usickcrr["repair-successful"], True)
2980 self.failUnlessEqual(usickcrr["pre-repair-results"]["results"]["count-shares-good"], 9)
2981 self.failUnlessEqual(usickcrr["post-repair-results"]["results"]["count-shares-good"], 10)
2984 self.failUnlessEqual(stats["type"], "stats")
2986 self.failUnlessEqual(s["count-immutable-files"], 2)
2987 self.failUnlessEqual(s["count-literal-files"], 1)
2988 self.failUnlessEqual(s["count-directories"], 1)
2989 d.addCallback(_done)
2991 d.addErrback(self.explain_web_error)
2994 def _count_leases(self, ignored, which):
2995 u = self.uris[which]
2996 shares = self.find_shares(u)
2998 for shnum, serverid, fn in shares:
2999 sf = get_share_file(fn)
3000 num_leases = len(list(sf.get_leases()))
3001 lease_counts.append( (fn, num_leases) )
3004 def _assert_leasecount(self, lease_counts, expected):
3005 for (fn, num_leases) in lease_counts:
3006 if num_leases != expected:
3007 self.fail("expected %d leases, have %d, on %s" %
3008 (expected, num_leases, fn))
3010 def test_add_lease(self):
3011 self.basedir = "web/Grid/add_lease"
3012 self.set_up_grid(num_clients=2)
3013 c0 = self.g.clients[0]
3016 d = c0.upload(upload.Data(DATA, convergence=""))
3017 def _stash_uri(ur, which):
3018 self.uris[which] = ur.uri
3019 d.addCallback(_stash_uri, "one")
3020 d.addCallback(lambda ign:
3021 c0.upload(upload.Data(DATA+"1", convergence="")))
3022 d.addCallback(_stash_uri, "two")
3023 def _stash_mutable_uri(n, which):
3024 self.uris[which] = n.get_uri()
3025 assert isinstance(self.uris[which], str)
3026 d.addCallback(lambda ign: c0.create_mutable_file(DATA+"2"))
3027 d.addCallback(_stash_mutable_uri, "mutable")
3029 def _compute_fileurls(ignored):
3031 for which in self.uris:
3032 self.fileurls[which] = "uri/" + urllib.quote(self.uris[which])
3033 d.addCallback(_compute_fileurls)
3035 d.addCallback(self._count_leases, "one")
3036 d.addCallback(self._assert_leasecount, 1)
3037 d.addCallback(self._count_leases, "two")
3038 d.addCallback(self._assert_leasecount, 1)
3039 d.addCallback(self._count_leases, "mutable")
3040 d.addCallback(self._assert_leasecount, 1)
3042 d.addCallback(self.CHECK, "one", "t=check") # no add-lease
3043 def _got_html_good(res):
3044 self.failUnless("Healthy" in res, res)
3045 self.failIf("Not Healthy" in res, res)
3046 d.addCallback(_got_html_good)
3048 d.addCallback(self._count_leases, "one")
3049 d.addCallback(self._assert_leasecount, 1)
3050 d.addCallback(self._count_leases, "two")
3051 d.addCallback(self._assert_leasecount, 1)
3052 d.addCallback(self._count_leases, "mutable")
3053 d.addCallback(self._assert_leasecount, 1)
3055 # this CHECK uses the original client, which uses the same
3056 # lease-secrets, so it will just renew the original lease
3057 d.addCallback(self.CHECK, "one", "t=check&add-lease=true")
3058 d.addCallback(_got_html_good)
3060 d.addCallback(self._count_leases, "one")
3061 d.addCallback(self._assert_leasecount, 1)
3062 d.addCallback(self._count_leases, "two")
3063 d.addCallback(self._assert_leasecount, 1)
3064 d.addCallback(self._count_leases, "mutable")
3065 d.addCallback(self._assert_leasecount, 1)
3067 # this CHECK uses an alternate client, which adds a second lease
3068 d.addCallback(self.CHECK, "one", "t=check&add-lease=true", clientnum=1)
3069 d.addCallback(_got_html_good)
3071 d.addCallback(self._count_leases, "one")
3072 d.addCallback(self._assert_leasecount, 2)
3073 d.addCallback(self._count_leases, "two")
3074 d.addCallback(self._assert_leasecount, 1)
3075 d.addCallback(self._count_leases, "mutable")
3076 d.addCallback(self._assert_leasecount, 1)
3078 d.addCallback(self.CHECK, "mutable", "t=check&add-lease=true")
3079 d.addCallback(_got_html_good)
3081 d.addCallback(self._count_leases, "one")
3082 d.addCallback(self._assert_leasecount, 2)
3083 d.addCallback(self._count_leases, "two")
3084 d.addCallback(self._assert_leasecount, 1)
3085 d.addCallback(self._count_leases, "mutable")
3086 d.addCallback(self._assert_leasecount, 1)
3088 d.addCallback(self.CHECK, "mutable", "t=check&add-lease=true",
3090 d.addCallback(_got_html_good)
3092 d.addCallback(self._count_leases, "one")
3093 d.addCallback(self._assert_leasecount, 2)
3094 d.addCallback(self._count_leases, "two")
3095 d.addCallback(self._assert_leasecount, 1)
3096 d.addCallback(self._count_leases, "mutable")
3097 d.addCallback(self._assert_leasecount, 2)
3099 d.addErrback(self.explain_web_error)
3102 def test_deep_add_lease(self):
3103 self.basedir = "web/Grid/deep_add_lease"
3104 self.set_up_grid(num_clients=2)
3105 c0 = self.g.clients[0]
3109 d = c0.create_empty_dirnode()
3110 def _stash_root_and_create_file(n):
3112 self.uris["root"] = n.get_uri()
3113 self.fileurls["root"] = "uri/" + urllib.quote(n.get_uri()) + "/"
3114 return n.add_file(u"one", upload.Data(DATA, convergence=""))
3115 d.addCallback(_stash_root_and_create_file)
3116 def _stash_uri(fn, which):
3117 self.uris[which] = fn.get_uri()
3118 d.addCallback(_stash_uri, "one")
3119 d.addCallback(lambda ign:
3120 self.rootnode.add_file(u"small",
3121 upload.Data("literal",
3123 d.addCallback(_stash_uri, "small")
3125 d.addCallback(lambda ign: c0.create_mutable_file("mutable"))
3126 d.addCallback(lambda fn: self.rootnode.set_node(u"mutable", fn))
3127 d.addCallback(_stash_uri, "mutable")
3129 d.addCallback(self.CHECK, "root", "t=stream-deep-check") # no add-lease
3131 units = [simplejson.loads(line)
3132 for line in res.splitlines()
3134 # root, one, small, mutable, stats
3135 self.failUnlessEqual(len(units), 4+1)
3136 d.addCallback(_done)
3138 d.addCallback(self._count_leases, "root")
3139 d.addCallback(self._assert_leasecount, 1)
3140 d.addCallback(self._count_leases, "one")
3141 d.addCallback(self._assert_leasecount, 1)
3142 d.addCallback(self._count_leases, "mutable")
3143 d.addCallback(self._assert_leasecount, 1)
3145 d.addCallback(self.CHECK, "root", "t=stream-deep-check&add-lease=true")
3146 d.addCallback(_done)
3148 d.addCallback(self._count_leases, "root")
3149 d.addCallback(self._assert_leasecount, 1)
3150 d.addCallback(self._count_leases, "one")
3151 d.addCallback(self._assert_leasecount, 1)
3152 d.addCallback(self._count_leases, "mutable")
3153 d.addCallback(self._assert_leasecount, 1)
3155 d.addCallback(self.CHECK, "root", "t=stream-deep-check&add-lease=true",
3157 d.addCallback(_done)
3159 d.addCallback(self._count_leases, "root")
3160 d.addCallback(self._assert_leasecount, 2)
3161 d.addCallback(self._count_leases, "one")
3162 d.addCallback(self._assert_leasecount, 2)
3163 d.addCallback(self._count_leases, "mutable")
3164 d.addCallback(self._assert_leasecount, 2)
3166 d.addErrback(self.explain_web_error)
3170 def test_exceptions(self):
3171 self.basedir = "web/Grid/exceptions"
3172 self.set_up_grid(num_clients=1, num_servers=2)
3173 c0 = self.g.clients[0]
3176 d = c0.create_empty_dirnode()
3178 self.fileurls["root"] = "uri/" + urllib.quote(n.get_uri()) + "/"
3179 self.fileurls["imaginary"] = self.fileurls["root"] + "imaginary"
3181 d.addCallback(_stash_root)
3182 d.addCallback(lambda ign: c0.upload(upload.Data(DATA, convergence="")))
3184 self.fileurls["1share"] = "uri/" + urllib.quote(ur.uri)
3185 self.delete_shares_numbered(ur.uri, range(1,10))
3187 u = uri.from_string(ur.uri)
3188 u.key = testutil.flip_bit(u.key, 0)
3189 baduri = u.to_string()
3190 self.fileurls["0shares"] = "uri/" + urllib.quote(baduri)
3191 d.addCallback(_stash_bad)
3192 d.addCallback(lambda ign: c0.create_empty_dirnode())
3193 def _mangle_dirnode_1share(n):
3195 url = self.fileurls["dir-1share"] = "uri/" + urllib.quote(u) + "/"
3196 self.fileurls["dir-1share-json"] = url + "?t=json"
3197 self.delete_shares_numbered(u, range(1,10))
3198 d.addCallback(_mangle_dirnode_1share)
3199 d.addCallback(lambda ign: c0.create_empty_dirnode())
3200 def _mangle_dirnode_0share(n):
3202 url = self.fileurls["dir-0share"] = "uri/" + urllib.quote(u) + "/"
3203 self.fileurls["dir-0share-json"] = url + "?t=json"
3204 self.delete_shares_numbered(u, range(0,10))
3205 d.addCallback(_mangle_dirnode_0share)
3207 # NotEnoughSharesError should be reported sensibly, with a
3208 # text/plain explanation of the problem, and perhaps some
3209 # information on which shares *could* be found.
3211 d.addCallback(lambda ignored:
3212 self.shouldHTTPError("GET unrecoverable",
3213 410, "Gone", "NotEnoughSharesError",
3214 self.GET, self.fileurls["0shares"]))
3215 def _check_zero_shares(body):
3216 self.failIf("<html>" in body, body)
3217 body = " ".join(body.strip().split())
3218 exp = ("NotEnoughSharesError: no shares could be found. "
3219 "Zero shares usually indicates a corrupt URI, or that "
3220 "no servers were connected, but it might also indicate "
3221 "severe corruption. You should perform a filecheck on "
3222 "this object to learn more.")
3223 self.failUnlessEqual(exp, body)
3224 d.addCallback(_check_zero_shares)
3227 d.addCallback(lambda ignored:
3228 self.shouldHTTPError("GET 1share",
3229 410, "Gone", "NotEnoughSharesError",
3230 self.GET, self.fileurls["1share"]))
3231 def _check_one_share(body):
3232 self.failIf("<html>" in body, body)
3233 body = " ".join(body.strip().split())
3234 exp = ("NotEnoughSharesError: 1 share found, but we need "
3235 "3 to recover the file. This indicates that some "
3236 "servers were unavailable, or that shares have been "
3237 "lost to server departure, hard drive failure, or disk "
3238 "corruption. You should perform a filecheck on "
3239 "this object to learn more.")
3240 self.failUnlessEqual(exp, body)
3241 d.addCallback(_check_one_share)
3243 d.addCallback(lambda ignored:
3244 self.shouldHTTPError("GET imaginary",
3245 404, "Not Found", None,
3246 self.GET, self.fileurls["imaginary"]))
3247 def _missing_child(body):
3248 self.failUnless("No such child: imaginary" in body, body)
3249 d.addCallback(_missing_child)
3251 d.addCallback(lambda ignored: self.GET(self.fileurls["dir-0share"]))
3252 def _check_0shares_dir_html(body):
3253 self.failUnless("<html>" in body, body)
3254 # we should see the regular page, but without the child table or
3256 body = " ".join(body.strip().split())
3257 self.failUnlessIn('href="?t=info">More info on this directory',
3259 exp = ("UnrecoverableFileError: the directory (or mutable file) "
3260 "could not be retrieved, because there were insufficient "
3261 "good shares. This might indicate that no servers were "
3262 "connected, insufficient servers were connected, the URI "
3263 "was corrupt, or that shares have been lost due to server "
3264 "departure, hard drive failure, or disk corruption. You "
3265 "should perform a filecheck on this object to learn more.")
3266 self.failUnlessIn(exp, body)
3267 self.failUnlessIn("No upload forms: directory is unreadable", body)
3268 d.addCallback(_check_0shares_dir_html)
3270 d.addCallback(lambda ignored: self.GET(self.fileurls["dir-1share"]))
3271 def _check_1shares_dir_html(body):
3272 # at some point, we'll split UnrecoverableFileError into 0-shares
3273 # and some-shares like we did for immutable files (since there
3274 # are different sorts of advice to offer in each case). For now,
3275 # they present the same way.
3276 self.failUnless("<html>" in body, body)
3277 body = " ".join(body.strip().split())
3278 self.failUnlessIn('href="?t=info">More info on this directory',
3280 exp = ("UnrecoverableFileError: the directory (or mutable file) "
3281 "could not be retrieved, because there were insufficient "
3282 "good shares. This might indicate that no servers were "
3283 "connected, insufficient servers were connected, the URI "
3284 "was corrupt, or that shares have been lost due to server "
3285 "departure, hard drive failure, or disk corruption. You "
3286 "should perform a filecheck on this object to learn more.")
3287 self.failUnlessIn(exp, body)
3288 self.failUnlessIn("No upload forms: directory is unreadable", body)
3289 d.addCallback(_check_1shares_dir_html)
3291 d.addCallback(lambda ignored:
3292 self.shouldHTTPError("GET dir-0share-json",
3293 410, "Gone", "UnrecoverableFileError",
3295 self.fileurls["dir-0share-json"]))
3296 def _check_unrecoverable_file(body):
3297 self.failIf("<html>" in body, body)
3298 body = " ".join(body.strip().split())
3299 exp = ("UnrecoverableFileError: the directory (or mutable file) "
3300 "could not be retrieved, because there were insufficient "
3301 "good shares. This might indicate that no servers were "
3302 "connected, insufficient servers were connected, the URI "
3303 "was corrupt, or that shares have been lost due to server "
3304 "departure, hard drive failure, or disk corruption. You "
3305 "should perform a filecheck on this object to learn more.")
3306 self.failUnlessEqual(exp, body)
3307 d.addCallback(_check_unrecoverable_file)
3309 d.addCallback(lambda ignored:
3310 self.shouldHTTPError("GET dir-1share-json",
3311 410, "Gone", "UnrecoverableFileError",
3313 self.fileurls["dir-1share-json"]))
3314 d.addCallback(_check_unrecoverable_file)
3316 d.addCallback(lambda ignored:
3317 self.shouldHTTPError("GET imaginary",
3318 404, "Not Found", None,
3319 self.GET, self.fileurls["imaginary"]))
3321 # attach a webapi child that throws a random error, to test how it
3323 w = c0.getServiceNamed("webish")
3324 w.root.putChild("ERRORBOOM", ErrorBoom())
3326 d.addCallback(lambda ignored:
3327 self.shouldHTTPError("GET errorboom_html",
3328 500, "Internal Server Error", None,
3329 self.GET, "ERRORBOOM"))
3330 def _internal_error_html(body):
3331 # test that a weird exception during a webapi operation with
3332 # Accept:*/* results in a text/html stack trace, while one
3333 # without that Accept: line gets us a text/plain stack trace
3334 self.failUnless("<html>" in body, "expected HTML, not '%s'" % body)
3335 d.addCallback(_internal_error_html)
3337 d.addCallback(lambda ignored:
3338 self.shouldHTTPError("GET errorboom_text",
3339 500, "Internal Server Error", None,
3340 self.GET, "ERRORBOOM",
3341 headers={"accept": ["text/plain"]}))
3342 def _internal_error_text(body):
3343 # test that a weird exception during a webapi operation with
3344 # Accept:*/* results in a text/html stack trace, while one
3345 # without that Accept: line gets us a text/plain stack trace
3346 self.failIf("<html>" in body, body)
3347 self.failUnless(body.startswith("Traceback "), body)
3348 d.addCallback(_internal_error_text)
3350 def _flush_errors(res):
3351 # Trial: please ignore the CompletelyUnhandledError in the logs
3352 self.flushLoggedErrors(CompletelyUnhandledError)
3354 d.addBoth(_flush_errors)
3358 class CompletelyUnhandledError(Exception):
3360 class ErrorBoom(rend.Page):
3361 def beforeRender(self, ctx):
3362 raise CompletelyUnhandledError("whoops")