1 import os.path, re, urllib
3 from StringIO import StringIO
4 from twisted.application import service
5 from twisted.trial import unittest
6 from twisted.internet import defer, reactor
7 from twisted.web import client, error, http
8 from twisted.python import failure, log
10 from allmydata import interfaces, uri, webish
11 from allmydata.storage.shares import get_share_file
12 from allmydata.storage_client import StorageFarmBroker
13 from allmydata.immutable import upload, download
14 from allmydata.web import status, common
15 from allmydata.scripts.debug import CorruptShareOptions, corrupt_share
16 from allmydata.util import fileutil, base32
17 from allmydata.util.assertutil import precondition
18 from allmydata.test.common import FakeDirectoryNode, FakeCHKFileNode, \
19 FakeMutableFileNode, create_chk_filenode, WebErrorMixin, ShouldFailMixin
20 from allmydata.interfaces import IURI, INewDirectoryURI, \
21 IReadonlyNewDirectoryURI, IFileURI, IMutableFileURI, IMutableFileNode
22 from allmydata.mutable import servermap, publish, retrieve
23 import common_util as testutil
24 from allmydata.test.no_network import GridTestMixin
26 from allmydata.test.common_web import HTTPClientGETFactory, \
29 # create a fake uploader/downloader, and a couple of fake dirnodes, then
30 # create a webserver that works against them
32 timeout = 240 # Most of these take longer than 120 seconds on Francois's arm box.
34 class FakeIntroducerClient:
35 def get_all_connectors(self):
37 def get_all_connections_for(self, service_name):
39 def get_all_peerids(self):
42 class FakeStatsProvider:
44 stats = {'stats': {}, 'counters': {}}
47 class FakeClient(service.MultiService):
48 nodeid = "fake_nodeid"
49 nickname = "fake_nickname"
50 basedir = "fake_basedir"
51 def get_versions(self):
52 return {'allmydata': "fake",
57 introducer_furl = "None"
58 introducer_client = FakeIntroducerClient()
59 _all_upload_status = [upload.UploadStatus()]
60 _all_download_status = [download.DownloadStatus()]
61 _all_mapupdate_statuses = [servermap.UpdateStatus()]
62 _all_publish_statuses = [publish.PublishStatus()]
63 _all_retrieve_statuses = [retrieve.RetrieveStatus()]
64 convergence = "some random string"
65 stats_provider = FakeStatsProvider()
67 def connected_to_introducer(self):
70 storage_broker = StorageFarmBroker()
71 def get_storage_broker(self):
72 return self.storage_broker
74 def create_node_from_uri(self, auri):
75 precondition(isinstance(auri, str), auri)
76 u = uri.from_string(auri)
77 if (INewDirectoryURI.providedBy(u)
78 or IReadonlyNewDirectoryURI.providedBy(u)):
79 return FakeDirectoryNode(self).init_from_uri(u)
80 if IFileURI.providedBy(u):
81 return FakeCHKFileNode(u, self)
82 assert IMutableFileURI.providedBy(u), u
83 return FakeMutableFileNode(self).init_from_uri(u)
85 def create_empty_dirnode(self):
86 n = FakeDirectoryNode(self)
88 d.addCallback(lambda res: n)
91 MUTABLE_SIZELIMIT = FakeMutableFileNode.MUTABLE_SIZELIMIT
92 def create_mutable_file(self, contents=""):
93 n = FakeMutableFileNode(self)
94 return n.create(contents)
96 def upload(self, uploadable):
97 d = uploadable.get_size()
98 d.addCallback(lambda size: uploadable.read(size))
100 data = "".join(datav)
101 n = create_chk_filenode(self, data)
102 results = upload.UploadResults()
103 results.uri = n.get_uri()
105 d.addCallback(_got_data)
108 def list_all_upload_statuses(self):
109 return self._all_upload_status
110 def list_all_download_statuses(self):
111 return self._all_download_status
112 def list_all_mapupdate_statuses(self):
113 return self._all_mapupdate_statuses
114 def list_all_publish_statuses(self):
115 return self._all_publish_statuses
116 def list_all_retrieve_statuses(self):
117 return self._all_retrieve_statuses
118 def list_all_helper_statuses(self):
121 class WebMixin(object):
123 self.s = FakeClient()
124 self.s.startService()
125 self.staticdir = self.mktemp()
126 self.ws = s = webish.WebishServer(self.s, "0", staticdir=self.staticdir)
127 s.setServiceParent(self.s)
128 self.webish_port = port = s.listener._port.getHost().port
129 self.webish_url = "http://localhost:%d" % port
131 l = [ self.s.create_empty_dirnode() for x in range(6) ]
132 d = defer.DeferredList(l)
134 self.public_root = res[0][1]
135 assert interfaces.IDirectoryNode.providedBy(self.public_root), res
136 self.public_url = "/uri/" + self.public_root.get_uri()
137 self.private_root = res[1][1]
141 self._foo_uri = foo.get_uri()
142 self._foo_readonly_uri = foo.get_readonly_uri()
143 self._foo_verifycap = foo.get_verify_cap().to_string()
144 # NOTE: we ignore the deferred on all set_uri() calls, because we
145 # know the fake nodes do these synchronously
146 self.public_root.set_uri(u"foo", foo.get_uri())
148 self.BAR_CONTENTS, n, self._bar_txt_uri = self.makefile(0)
149 foo.set_uri(u"bar.txt", self._bar_txt_uri)
150 self._bar_txt_verifycap = n.get_verify_cap().to_string()
152 foo.set_uri(u"empty", res[3][1].get_uri())
153 sub_uri = res[4][1].get_uri()
154 self._sub_uri = sub_uri
155 foo.set_uri(u"sub", sub_uri)
156 sub = self.s.create_node_from_uri(sub_uri)
158 _ign, n, blocking_uri = self.makefile(1)
159 foo.set_uri(u"blockingfile", blocking_uri)
161 unicode_filename = u"n\u00fc.txt" # n u-umlaut . t x t
162 # ok, unicode calls it LATIN SMALL LETTER U WITH DIAERESIS but I
163 # still think of it as an umlaut
164 foo.set_uri(unicode_filename, self._bar_txt_uri)
166 _ign, n, baz_file = self.makefile(2)
167 self._baz_file_uri = baz_file
168 sub.set_uri(u"baz.txt", baz_file)
170 _ign, n, self._bad_file_uri = self.makefile(3)
171 # this uri should not be downloadable
172 del FakeCHKFileNode.all_contents[self._bad_file_uri]
175 self.public_root.set_uri(u"reedownlee", rodir.get_readonly_uri())
176 rodir.set_uri(u"nor", baz_file)
181 # public/foo/blockingfile
184 # public/foo/sub/baz.txt
186 # public/reedownlee/nor
187 self.NEWFILE_CONTENTS = "newfile contents\n"
189 return foo.get_metadata_for(u"bar.txt")
191 def _got_metadata(metadata):
192 self._bar_txt_metadata = metadata
193 d.addCallback(_got_metadata)
196 def makefile(self, number):
197 contents = "contents of file %s\n" % number
198 n = create_chk_filenode(self.s, contents)
199 return contents, n, n.get_uri()
202 return self.s.stopService()
204 def failUnlessIsBarDotTxt(self, res):
205 self.failUnlessEqual(res, self.BAR_CONTENTS, res)
207 def failUnlessIsBarJSON(self, res):
208 data = simplejson.loads(res)
209 self.failUnless(isinstance(data, list))
210 self.failUnlessEqual(data[0], u"filenode")
211 self.failUnless(isinstance(data[1], dict))
212 self.failIf(data[1]["mutable"])
213 self.failIf("rw_uri" in data[1]) # immutable
214 self.failUnlessEqual(data[1]["ro_uri"], self._bar_txt_uri)
215 self.failUnlessEqual(data[1]["verify_uri"], self._bar_txt_verifycap)
216 self.failUnlessEqual(data[1]["size"], len(self.BAR_CONTENTS))
218 def failUnlessIsFooJSON(self, res):
219 data = simplejson.loads(res)
220 self.failUnless(isinstance(data, list))
221 self.failUnlessEqual(data[0], "dirnode", res)
222 self.failUnless(isinstance(data[1], dict))
223 self.failUnless(data[1]["mutable"])
224 self.failUnless("rw_uri" in data[1]) # mutable
225 self.failUnlessEqual(data[1]["rw_uri"], self._foo_uri)
226 self.failUnlessEqual(data[1]["ro_uri"], self._foo_readonly_uri)
227 self.failUnlessEqual(data[1]["verify_uri"], self._foo_verifycap)
229 kidnames = sorted([unicode(n) for n in data[1]["children"]])
230 self.failUnlessEqual(kidnames,
231 [u"bar.txt", u"blockingfile", u"empty",
232 u"n\u00fc.txt", u"sub"])
233 kids = dict( [(unicode(name),value)
235 in data[1]["children"].iteritems()] )
236 self.failUnlessEqual(kids[u"sub"][0], "dirnode")
237 self.failUnless("metadata" in kids[u"sub"][1])
238 self.failUnless("ctime" in kids[u"sub"][1]["metadata"])
239 self.failUnless("mtime" in kids[u"sub"][1]["metadata"])
240 self.failUnlessEqual(kids[u"bar.txt"][0], "filenode")
241 self.failUnlessEqual(kids[u"bar.txt"][1]["size"], len(self.BAR_CONTENTS))
242 self.failUnlessEqual(kids[u"bar.txt"][1]["ro_uri"], self._bar_txt_uri)
243 self.failUnlessEqual(kids[u"bar.txt"][1]["verify_uri"],
244 self._bar_txt_verifycap)
245 self.failUnlessEqual(kids[u"bar.txt"][1]["metadata"]["ctime"],
246 self._bar_txt_metadata["ctime"])
247 self.failUnlessEqual(kids[u"n\u00fc.txt"][1]["ro_uri"],
250 def GET(self, urlpath, followRedirect=False, return_response=False,
252 # if return_response=True, this fires with (data, statuscode,
253 # respheaders) instead of just data.
254 assert not isinstance(urlpath, unicode)
255 url = self.webish_url + urlpath
256 factory = HTTPClientGETFactory(url, method="GET",
257 followRedirect=followRedirect, **kwargs)
258 reactor.connectTCP("localhost", self.webish_port, factory)
261 return (data, factory.status, factory.response_headers)
263 d.addCallback(_got_data)
264 return factory.deferred
266 def HEAD(self, urlpath, return_response=False, **kwargs):
267 # this requires some surgery, because twisted.web.client doesn't want
268 # to give us back the response headers.
269 factory = HTTPClientHEADFactory(urlpath, method="HEAD", **kwargs)
270 reactor.connectTCP("localhost", self.webish_port, factory)
273 return (data, factory.status, factory.response_headers)
275 d.addCallback(_got_data)
276 return factory.deferred
278 def PUT(self, urlpath, data, **kwargs):
279 url = self.webish_url + urlpath
280 return client.getPage(url, method="PUT", postdata=data, **kwargs)
282 def DELETE(self, urlpath):
283 url = self.webish_url + urlpath
284 return client.getPage(url, method="DELETE")
286 def POST(self, urlpath, followRedirect=False, **fields):
287 url = self.webish_url + urlpath
288 sepbase = "boogabooga"
292 form.append('Content-Disposition: form-data; name="_charset"')
296 for name, value in fields.iteritems():
297 if isinstance(value, tuple):
298 filename, value = value
299 form.append('Content-Disposition: form-data; name="%s"; '
300 'filename="%s"' % (name, filename.encode("utf-8")))
302 form.append('Content-Disposition: form-data; name="%s"' % name)
304 if isinstance(value, unicode):
305 value = value.encode("utf-8")
308 assert isinstance(value, str)
312 body = "\r\n".join(form) + "\r\n"
313 headers = {"content-type": "multipart/form-data; boundary=%s" % sepbase,
315 return client.getPage(url, method="POST", postdata=body,
316 headers=headers, followRedirect=followRedirect)
318 def shouldFail(self, res, expected_failure, which,
319 substring=None, response_substring=None):
320 if isinstance(res, failure.Failure):
321 res.trap(expected_failure)
323 self.failUnless(substring in str(res),
324 "substring '%s' not in '%s'"
325 % (substring, str(res)))
326 if response_substring:
327 self.failUnless(response_substring in res.value.response,
328 "response substring '%s' not in '%s'"
329 % (response_substring, res.value.response))
331 self.fail("%s was supposed to raise %s, not get '%s'" %
332 (which, expected_failure, res))
334 def shouldFail2(self, expected_failure, which, substring,
336 callable, *args, **kwargs):
337 assert substring is None or isinstance(substring, str)
338 assert response_substring is None or isinstance(response_substring, str)
339 d = defer.maybeDeferred(callable, *args, **kwargs)
341 if isinstance(res, failure.Failure):
342 res.trap(expected_failure)
344 self.failUnless(substring in str(res),
345 "%s: substring '%s' not in '%s'"
346 % (which, substring, str(res)))
347 if response_substring:
348 self.failUnless(response_substring in res.value.response,
349 "%s: response substring '%s' not in '%s'"
351 response_substring, res.value.response))
353 self.fail("%s was supposed to raise %s, not get '%s'" %
354 (which, expected_failure, res))
358 def should404(self, res, which):
359 if isinstance(res, failure.Failure):
360 res.trap(error.Error)
361 self.failUnlessEqual(res.value.status, "404")
363 self.fail("%s was supposed to Error(404), not get '%s'" %
367 class Web(WebMixin, WebErrorMixin, testutil.StallMixin, unittest.TestCase):
368 def test_create(self):
371 def test_welcome(self):
374 self.failUnless('Welcome To AllMyData' in res)
375 self.failUnless('Tahoe' in res)
377 self.s.basedir = 'web/test_welcome'
378 fileutil.make_dirs("web/test_welcome")
379 fileutil.make_dirs("web/test_welcome/private")
381 d.addCallback(_check)
384 def test_provisioning(self):
385 d = self.GET("/provisioning/")
387 self.failUnless('Tahoe Provisioning Tool' in res)
388 fields = {'filled': True,
389 "num_users": int(50e3),
390 "files_per_user": 1000,
391 "space_per_user": int(1e9),
392 "sharing_ratio": 1.0,
393 "encoding_parameters": "3-of-10-5",
395 "ownership_mode": "A",
396 "download_rate": 100,
401 return self.POST("/provisioning/", **fields)
403 d.addCallback(_check)
405 self.failUnless('Tahoe Provisioning Tool' in res)
406 self.failUnless("Share space consumed: 167.01TB" in res)
408 fields = {'filled': True,
409 "num_users": int(50e6),
410 "files_per_user": 1000,
411 "space_per_user": int(5e9),
412 "sharing_ratio": 1.0,
413 "encoding_parameters": "25-of-100-50",
414 "num_servers": 30000,
415 "ownership_mode": "E",
416 "drive_failure_model": "U",
418 "download_rate": 1000,
423 return self.POST("/provisioning/", **fields)
424 d.addCallback(_check2)
426 self.failUnless("Share space consumed: huge!" in res)
427 fields = {'filled': True}
428 return self.POST("/provisioning/", **fields)
429 d.addCallback(_check3)
431 self.failUnless("Share space consumed:" in res)
432 d.addCallback(_check4)
435 def test_reliability_tool(self):
437 from allmydata import reliability
438 _hush_pyflakes = reliability
440 raise unittest.SkipTest("reliability tool requires NumPy")
442 d = self.GET("/reliability/")
444 self.failUnless('Tahoe Reliability Tool' in res)
445 fields = {'drive_lifetime': "8Y",
450 "check_period": "1M",
451 "report_period": "3M",
454 return self.POST("/reliability/", **fields)
456 d.addCallback(_check)
458 self.failUnless('Tahoe Reliability Tool' in res)
459 r = r'Probability of loss \(no maintenance\):\s+<span>0.033591'
460 self.failUnless(re.search(r, res), res)
461 d.addCallback(_check2)
464 def test_status(self):
465 dl_num = self.s.list_all_download_statuses()[0].get_counter()
466 ul_num = self.s.list_all_upload_statuses()[0].get_counter()
467 mu_num = self.s.list_all_mapupdate_statuses()[0].get_counter()
468 pub_num = self.s.list_all_publish_statuses()[0].get_counter()
469 ret_num = self.s.list_all_retrieve_statuses()[0].get_counter()
470 d = self.GET("/status", followRedirect=True)
472 self.failUnless('Upload and Download Status' in res, res)
473 self.failUnless('"down-%d"' % dl_num in res, res)
474 self.failUnless('"up-%d"' % ul_num in res, res)
475 self.failUnless('"mapupdate-%d"' % mu_num in res, res)
476 self.failUnless('"publish-%d"' % pub_num in res, res)
477 self.failUnless('"retrieve-%d"' % ret_num in res, res)
478 d.addCallback(_check)
479 d.addCallback(lambda res: self.GET("/status/?t=json"))
480 def _check_json(res):
481 data = simplejson.loads(res)
482 self.failUnless(isinstance(data, dict))
483 active = data["active"]
484 # TODO: test more. We need a way to fake an active operation
486 d.addCallback(_check_json)
488 d.addCallback(lambda res: self.GET("/status/down-%d" % dl_num))
490 self.failUnless("File Download Status" in res, res)
491 d.addCallback(_check_dl)
492 d.addCallback(lambda res: self.GET("/status/up-%d" % ul_num))
494 self.failUnless("File Upload Status" in res, res)
495 d.addCallback(_check_ul)
496 d.addCallback(lambda res: self.GET("/status/mapupdate-%d" % mu_num))
497 def _check_mapupdate(res):
498 self.failUnless("Mutable File Servermap Update Status" in res, res)
499 d.addCallback(_check_mapupdate)
500 d.addCallback(lambda res: self.GET("/status/publish-%d" % pub_num))
501 def _check_publish(res):
502 self.failUnless("Mutable File Publish Status" in res, res)
503 d.addCallback(_check_publish)
504 d.addCallback(lambda res: self.GET("/status/retrieve-%d" % ret_num))
505 def _check_retrieve(res):
506 self.failUnless("Mutable File Retrieve Status" in res, res)
507 d.addCallback(_check_retrieve)
511 def test_status_numbers(self):
512 drrm = status.DownloadResultsRendererMixin()
513 self.failUnlessEqual(drrm.render_time(None, None), "")
514 self.failUnlessEqual(drrm.render_time(None, 2.5), "2.50s")
515 self.failUnlessEqual(drrm.render_time(None, 0.25), "250ms")
516 self.failUnlessEqual(drrm.render_time(None, 0.0021), "2.1ms")
517 self.failUnlessEqual(drrm.render_time(None, 0.000123), "123us")
518 self.failUnlessEqual(drrm.render_rate(None, None), "")
519 self.failUnlessEqual(drrm.render_rate(None, 2500000), "2.50MBps")
520 self.failUnlessEqual(drrm.render_rate(None, 30100), "30.1kBps")
521 self.failUnlessEqual(drrm.render_rate(None, 123), "123Bps")
523 urrm = status.UploadResultsRendererMixin()
524 self.failUnlessEqual(urrm.render_time(None, None), "")
525 self.failUnlessEqual(urrm.render_time(None, 2.5), "2.50s")
526 self.failUnlessEqual(urrm.render_time(None, 0.25), "250ms")
527 self.failUnlessEqual(urrm.render_time(None, 0.0021), "2.1ms")
528 self.failUnlessEqual(urrm.render_time(None, 0.000123), "123us")
529 self.failUnlessEqual(urrm.render_rate(None, None), "")
530 self.failUnlessEqual(urrm.render_rate(None, 2500000), "2.50MBps")
531 self.failUnlessEqual(urrm.render_rate(None, 30100), "30.1kBps")
532 self.failUnlessEqual(urrm.render_rate(None, 123), "123Bps")
534 def test_GET_FILEURL(self):
535 d = self.GET(self.public_url + "/foo/bar.txt")
536 d.addCallback(self.failUnlessIsBarDotTxt)
539 def test_GET_FILEURL_range(self):
540 headers = {"range": "bytes=1-10"}
541 d = self.GET(self.public_url + "/foo/bar.txt", headers=headers,
542 return_response=True)
543 def _got((res, status, headers)):
544 self.failUnlessEqual(int(status), 206)
545 self.failUnless(headers.has_key("content-range"))
546 self.failUnlessEqual(headers["content-range"][0],
547 "bytes 1-10/%d" % len(self.BAR_CONTENTS))
548 self.failUnlessEqual(res, self.BAR_CONTENTS[1:11])
552 def test_GET_FILEURL_partial_range(self):
553 headers = {"range": "bytes=5-"}
554 length = len(self.BAR_CONTENTS)
555 d = self.GET(self.public_url + "/foo/bar.txt", headers=headers,
556 return_response=True)
557 def _got((res, status, headers)):
558 self.failUnlessEqual(int(status), 206)
559 self.failUnless(headers.has_key("content-range"))
560 self.failUnlessEqual(headers["content-range"][0],
561 "bytes 5-%d/%d" % (length-1, length))
562 self.failUnlessEqual(res, self.BAR_CONTENTS[5:])
566 def test_HEAD_FILEURL_range(self):
567 headers = {"range": "bytes=1-10"}
568 d = self.HEAD(self.public_url + "/foo/bar.txt", headers=headers,
569 return_response=True)
570 def _got((res, status, headers)):
571 self.failUnlessEqual(res, "")
572 self.failUnlessEqual(int(status), 206)
573 self.failUnless(headers.has_key("content-range"))
574 self.failUnlessEqual(headers["content-range"][0],
575 "bytes 1-10/%d" % len(self.BAR_CONTENTS))
579 def test_HEAD_FILEURL_partial_range(self):
580 headers = {"range": "bytes=5-"}
581 length = len(self.BAR_CONTENTS)
582 d = self.HEAD(self.public_url + "/foo/bar.txt", headers=headers,
583 return_response=True)
584 def _got((res, status, headers)):
585 self.failUnlessEqual(int(status), 206)
586 self.failUnless(headers.has_key("content-range"))
587 self.failUnlessEqual(headers["content-range"][0],
588 "bytes 5-%d/%d" % (length-1, length))
592 def test_GET_FILEURL_range_bad(self):
593 headers = {"range": "BOGUS=fizbop-quarnak"}
594 d = self.shouldFail2(error.Error, "test_GET_FILEURL_range_bad",
596 "Syntactically invalid http range header",
597 self.GET, self.public_url + "/foo/bar.txt",
601 def test_HEAD_FILEURL(self):
602 d = self.HEAD(self.public_url + "/foo/bar.txt", return_response=True)
603 def _got((res, status, headers)):
604 self.failUnlessEqual(res, "")
605 self.failUnlessEqual(headers["content-length"][0],
606 str(len(self.BAR_CONTENTS)))
607 self.failUnlessEqual(headers["content-type"], ["text/plain"])
611 def test_GET_FILEURL_named(self):
612 base = "/file/%s" % urllib.quote(self._bar_txt_uri)
613 base2 = "/named/%s" % urllib.quote(self._bar_txt_uri)
614 d = self.GET(base + "/@@name=/blah.txt")
615 d.addCallback(self.failUnlessIsBarDotTxt)
616 d.addCallback(lambda res: self.GET(base + "/blah.txt"))
617 d.addCallback(self.failUnlessIsBarDotTxt)
618 d.addCallback(lambda res: self.GET(base + "/ignore/lots/blah.txt"))
619 d.addCallback(self.failUnlessIsBarDotTxt)
620 d.addCallback(lambda res: self.GET(base2 + "/@@name=/blah.txt"))
621 d.addCallback(self.failUnlessIsBarDotTxt)
622 save_url = base + "?save=true&filename=blah.txt"
623 d.addCallback(lambda res: self.GET(save_url))
624 d.addCallback(self.failUnlessIsBarDotTxt) # TODO: check headers
625 u_filename = u"n\u00e9wer.txt" # n e-acute w e r . t x t
626 u_fn_e = urllib.quote(u_filename.encode("utf-8"))
627 u_url = base + "?save=true&filename=" + u_fn_e
628 d.addCallback(lambda res: self.GET(u_url))
629 d.addCallback(self.failUnlessIsBarDotTxt) # TODO: check headers
632 def test_PUT_FILEURL_named_bad(self):
633 base = "/file/%s" % urllib.quote(self._bar_txt_uri)
634 d = self.shouldFail2(error.Error, "test_PUT_FILEURL_named_bad",
636 "/file can only be used with GET or HEAD",
637 self.PUT, base + "/@@name=/blah.txt", "")
640 def test_GET_DIRURL_named_bad(self):
641 base = "/file/%s" % urllib.quote(self._foo_uri)
642 d = self.shouldFail2(error.Error, "test_PUT_DIRURL_named_bad",
645 self.GET, base + "/@@name=/blah.txt")
648 def test_GET_slash_file_bad(self):
649 d = self.shouldFail2(error.Error, "test_GET_slash_file_bad",
651 "/file must be followed by a file-cap and a name",
655 def test_GET_unhandled_URI_named(self):
656 contents, n, newuri = self.makefile(12)
657 verifier_cap = n.get_verify_cap().to_string()
658 base = "/file/%s" % urllib.quote(verifier_cap)
659 # client.create_node_from_uri() can't handle verify-caps
660 d = self.shouldFail2(error.Error, "GET_unhandled_URI_named",
662 "is not a valid file- or directory- cap",
666 def test_GET_unhandled_URI(self):
667 contents, n, newuri = self.makefile(12)
668 verifier_cap = n.get_verify_cap().to_string()
669 base = "/uri/%s" % urllib.quote(verifier_cap)
670 # client.create_node_from_uri() can't handle verify-caps
671 d = self.shouldFail2(error.Error, "test_GET_unhandled_URI",
673 "is not a valid file- or directory- cap",
677 def test_GET_FILE_URI(self):
678 base = "/uri/%s" % urllib.quote(self._bar_txt_uri)
680 d.addCallback(self.failUnlessIsBarDotTxt)
683 def test_GET_FILE_URI_badchild(self):
684 base = "/uri/%s/boguschild" % urllib.quote(self._bar_txt_uri)
685 errmsg = "Files have no children, certainly not named 'boguschild'"
686 d = self.shouldFail2(error.Error, "test_GET_FILE_URI_badchild",
687 "400 Bad Request", errmsg,
691 def test_PUT_FILE_URI_badchild(self):
692 base = "/uri/%s/boguschild" % urllib.quote(self._bar_txt_uri)
693 errmsg = "Cannot create directory 'boguschild', because its parent is a file, not a directory"
694 d = self.shouldFail2(error.Error, "test_GET_FILE_URI_badchild",
695 "400 Bad Request", errmsg,
699 def test_GET_FILEURL_save(self):
700 d = self.GET(self.public_url + "/foo/bar.txt?filename=bar.txt&save=true")
701 # TODO: look at the headers, expect a Content-Disposition: attachment
703 d.addCallback(self.failUnlessIsBarDotTxt)
706 def test_GET_FILEURL_missing(self):
707 d = self.GET(self.public_url + "/foo/missing")
708 d.addBoth(self.should404, "test_GET_FILEURL_missing")
711 def test_PUT_NEWFILEURL(self):
712 d = self.PUT(self.public_url + "/foo/new.txt", self.NEWFILE_CONTENTS)
713 # TODO: we lose the response code, so we can't check this
714 #self.failUnlessEqual(responsecode, 201)
715 d.addCallback(self.failUnlessURIMatchesChild, self._foo_node, u"new.txt")
716 d.addCallback(lambda res:
717 self.failUnlessChildContentsAre(self._foo_node, u"new.txt",
718 self.NEWFILE_CONTENTS))
721 def test_PUT_NEWFILEURL_not_mutable(self):
722 d = self.PUT(self.public_url + "/foo/new.txt?mutable=false",
723 self.NEWFILE_CONTENTS)
724 # TODO: we lose the response code, so we can't check this
725 #self.failUnlessEqual(responsecode, 201)
726 d.addCallback(self.failUnlessURIMatchesChild, self._foo_node, u"new.txt")
727 d.addCallback(lambda res:
728 self.failUnlessChildContentsAre(self._foo_node, u"new.txt",
729 self.NEWFILE_CONTENTS))
732 def test_PUT_NEWFILEURL_range_bad(self):
733 headers = {"content-range": "bytes 1-10/%d" % len(self.NEWFILE_CONTENTS)}
734 target = self.public_url + "/foo/new.txt"
735 d = self.shouldFail2(error.Error, "test_PUT_NEWFILEURL_range_bad",
736 "501 Not Implemented",
737 "Content-Range in PUT not yet supported",
738 # (and certainly not for immutable files)
739 self.PUT, target, self.NEWFILE_CONTENTS[1:11],
741 d.addCallback(lambda res:
742 self.failIfNodeHasChild(self._foo_node, u"new.txt"))
745 def test_PUT_NEWFILEURL_mutable(self):
746 d = self.PUT(self.public_url + "/foo/new.txt?mutable=true",
747 self.NEWFILE_CONTENTS)
748 # TODO: we lose the response code, so we can't check this
749 #self.failUnlessEqual(responsecode, 201)
751 u = uri.from_string_mutable_filenode(res)
752 self.failUnless(u.is_mutable())
753 self.failIf(u.is_readonly())
755 d.addCallback(_check_uri)
756 d.addCallback(self.failUnlessURIMatchesChild, self._foo_node, u"new.txt")
757 d.addCallback(lambda res:
758 self.failUnlessMutableChildContentsAre(self._foo_node,
760 self.NEWFILE_CONTENTS))
763 def test_PUT_NEWFILEURL_mutable_toobig(self):
764 d = self.shouldFail2(error.Error, "test_PUT_NEWFILEURL_mutable_toobig",
765 "413 Request Entity Too Large",
766 "SDMF is limited to one segment, and 10001 > 10000",
768 self.public_url + "/foo/new.txt?mutable=true",
769 "b" * (self.s.MUTABLE_SIZELIMIT+1))
772 def test_PUT_NEWFILEURL_replace(self):
773 d = self.PUT(self.public_url + "/foo/bar.txt", self.NEWFILE_CONTENTS)
774 # TODO: we lose the response code, so we can't check this
775 #self.failUnlessEqual(responsecode, 200)
776 d.addCallback(self.failUnlessURIMatchesChild, self._foo_node, u"bar.txt")
777 d.addCallback(lambda res:
778 self.failUnlessChildContentsAre(self._foo_node, u"bar.txt",
779 self.NEWFILE_CONTENTS))
782 def test_PUT_NEWFILEURL_bad_t(self):
783 d = self.shouldFail2(error.Error, "PUT_bad_t", "400 Bad Request",
784 "PUT to a file: bad t=bogus",
785 self.PUT, self.public_url + "/foo/bar.txt?t=bogus",
789 def test_PUT_NEWFILEURL_no_replace(self):
790 d = self.PUT(self.public_url + "/foo/bar.txt?replace=false",
791 self.NEWFILE_CONTENTS)
792 d.addBoth(self.shouldFail, error.Error, "PUT_NEWFILEURL_no_replace",
794 "There was already a child by that name, and you asked me "
798 def test_PUT_NEWFILEURL_mkdirs(self):
799 d = self.PUT(self.public_url + "/foo/newdir/new.txt", self.NEWFILE_CONTENTS)
801 d.addCallback(self.failUnlessURIMatchesChild, fn, u"newdir/new.txt")
802 d.addCallback(lambda res: self.failIfNodeHasChild(fn, u"new.txt"))
803 d.addCallback(lambda res: self.failUnlessNodeHasChild(fn, u"newdir"))
804 d.addCallback(lambda res:
805 self.failUnlessChildContentsAre(fn, u"newdir/new.txt",
806 self.NEWFILE_CONTENTS))
809 def test_PUT_NEWFILEURL_blocked(self):
810 d = self.PUT(self.public_url + "/foo/blockingfile/new.txt",
811 self.NEWFILE_CONTENTS)
812 d.addBoth(self.shouldFail, error.Error, "PUT_NEWFILEURL_blocked",
814 "Unable to create directory 'blockingfile': a file was in the way")
817 def test_DELETE_FILEURL(self):
818 d = self.DELETE(self.public_url + "/foo/bar.txt")
819 d.addCallback(lambda res:
820 self.failIfNodeHasChild(self._foo_node, u"bar.txt"))
823 def test_DELETE_FILEURL_missing(self):
824 d = self.DELETE(self.public_url + "/foo/missing")
825 d.addBoth(self.should404, "test_DELETE_FILEURL_missing")
828 def test_DELETE_FILEURL_missing2(self):
829 d = self.DELETE(self.public_url + "/missing/missing")
830 d.addBoth(self.should404, "test_DELETE_FILEURL_missing2")
833 def test_GET_FILEURL_json(self):
834 # twisted.web.http.parse_qs ignores any query args without an '=', so
835 # I can't do "GET /path?json", I have to do "GET /path/t=json"
836 # instead. This may make it tricky to emulate the S3 interface
838 d = self.GET(self.public_url + "/foo/bar.txt?t=json")
839 d.addCallback(self.failUnlessIsBarJSON)
842 def test_GET_FILEURL_json_missing(self):
843 d = self.GET(self.public_url + "/foo/missing?json")
844 d.addBoth(self.should404, "test_GET_FILEURL_json_missing")
847 def test_GET_FILEURL_uri(self):
848 d = self.GET(self.public_url + "/foo/bar.txt?t=uri")
850 self.failUnlessEqual(res, self._bar_txt_uri)
851 d.addCallback(_check)
852 d.addCallback(lambda res:
853 self.GET(self.public_url + "/foo/bar.txt?t=readonly-uri"))
855 # for now, for files, uris and readonly-uris are the same
856 self.failUnlessEqual(res, self._bar_txt_uri)
857 d.addCallback(_check2)
860 def test_GET_FILEURL_badtype(self):
861 d = self.shouldHTTPError("GET t=bogus", 400, "Bad Request",
864 self.public_url + "/foo/bar.txt?t=bogus")
867 def test_GET_FILEURL_uri_missing(self):
868 d = self.GET(self.public_url + "/foo/missing?t=uri")
869 d.addBoth(self.should404, "test_GET_FILEURL_uri_missing")
872 def test_GET_DIRURL(self):
873 # the addSlash means we get a redirect here
874 # from /uri/$URI/foo/ , we need ../../../ to get back to the root
876 d = self.GET(self.public_url + "/foo", followRedirect=True)
878 self.failUnless(('<a href="%s">Return to Welcome page' % ROOT)
880 # the FILE reference points to a URI, but it should end in bar.txt
881 bar_url = ("%s/file/%s/@@named=/bar.txt" %
882 (ROOT, urllib.quote(self._bar_txt_uri)))
883 get_bar = "".join([r'<td>FILE</td>',
885 r'<a href="%s">bar.txt</a>' % bar_url,
887 r'\s+<td>%d</td>' % len(self.BAR_CONTENTS),
889 self.failUnless(re.search(get_bar, res), res)
890 for line in res.split("\n"):
891 # find the line that contains the delete button for bar.txt
892 if ("form action" in line and
893 'value="delete"' in line and
894 'value="bar.txt"' in line):
895 # the form target should use a relative URL
896 foo_url = urllib.quote("%s/uri/%s/" % (ROOT, self._foo_uri))
897 self.failUnless(('action="%s"' % foo_url) in line, line)
898 # and the when_done= should too
899 #done_url = urllib.quote(???)
900 #self.failUnless(('name="when_done" value="%s"' % done_url)
904 self.fail("unable to find delete-bar.txt line", res)
906 # the DIR reference just points to a URI
907 sub_url = ("%s/uri/%s/" % (ROOT, urllib.quote(self._sub_uri)))
908 get_sub = ((r'<td>DIR</td>')
909 +r'\s+<td><a href="%s">sub</a></td>' % sub_url)
910 self.failUnless(re.search(get_sub, res), res)
911 d.addCallback(_check)
913 # look at a directory which is readonly
914 d.addCallback(lambda res:
915 self.GET(self.public_url + "/reedownlee", followRedirect=True))
917 self.failUnless("(read-only)" in res, res)
918 self.failIf("Upload a file" in res, res)
919 d.addCallback(_check2)
921 # and at a directory that contains a readonly directory
922 d.addCallback(lambda res:
923 self.GET(self.public_url, followRedirect=True))
925 self.failUnless(re.search('<td>DIR-RO</td>'
926 r'\s+<td><a href="[\.\/]+/uri/URI%3ADIR2-RO%3A[^"]+">reedownlee</a></td>', res), res)
927 d.addCallback(_check3)
929 # and an empty directory
930 d.addCallback(lambda res: self.GET(self.public_url + "/foo/empty/"))
932 self.failUnless("directory is empty" in res, res)
933 MKDIR_BUTTON_RE=re.compile('<input type="hidden" name="t" value="mkdir" />.*<legend class="freeform-form-label">Create a new directory in this directory</legend>.*<input type="submit" value="Create" />', re.I)
934 self.failUnless(MKDIR_BUTTON_RE.search(res), res)
935 d.addCallback(_check4)
939 def test_GET_DIRURL_badtype(self):
940 d = self.shouldHTTPError("test_GET_DIRURL_badtype",
944 self.public_url + "/foo?t=bogus")
947 def test_GET_DIRURL_json(self):
948 d = self.GET(self.public_url + "/foo?t=json")
949 d.addCallback(self.failUnlessIsFooJSON)
953 def test_POST_DIRURL_manifest_no_ophandle(self):
954 d = self.shouldFail2(error.Error,
955 "test_POST_DIRURL_manifest_no_ophandle",
957 "slow operation requires ophandle=",
958 self.POST, self.public_url, t="start-manifest")
961 def test_POST_DIRURL_manifest(self):
962 d = defer.succeed(None)
963 def getman(ignored, output):
964 d = self.POST(self.public_url + "/foo/?t=start-manifest&ophandle=125",
966 d.addCallback(self.wait_for_operation, "125")
967 d.addCallback(self.get_operation_results, "125", output)
969 d.addCallback(getman, None)
970 def _got_html(manifest):
971 self.failUnless("Manifest of SI=" in manifest)
972 self.failUnless("<td>sub</td>" in manifest)
973 self.failUnless(self._sub_uri in manifest)
974 self.failUnless("<td>sub/baz.txt</td>" in manifest)
975 d.addCallback(_got_html)
977 # both t=status and unadorned GET should be identical
978 d.addCallback(lambda res: self.GET("/operations/125"))
979 d.addCallback(_got_html)
981 d.addCallback(getman, "html")
982 d.addCallback(_got_html)
983 d.addCallback(getman, "text")
984 def _got_text(manifest):
985 self.failUnless("\nsub " + self._sub_uri + "\n" in manifest)
986 self.failUnless("\nsub/baz.txt URI:CHK:" in manifest)
987 d.addCallback(_got_text)
988 d.addCallback(getman, "JSON")
990 data = res["manifest"]
992 for (path_list, cap) in data:
993 got[tuple(path_list)] = cap
994 self.failUnlessEqual(got[(u"sub",)], self._sub_uri)
995 self.failUnless((u"sub",u"baz.txt") in got)
996 self.failUnless("finished" in res)
997 self.failUnless("origin" in res)
998 self.failUnless("storage-index" in res)
999 self.failUnless("verifycaps" in res)
1000 self.failUnless("stats" in res)
1001 d.addCallback(_got_json)
1004 def test_POST_DIRURL_deepsize_no_ophandle(self):
1005 d = self.shouldFail2(error.Error,
1006 "test_POST_DIRURL_deepsize_no_ophandle",
1008 "slow operation requires ophandle=",
1009 self.POST, self.public_url, t="start-deep-size")
1012 def test_POST_DIRURL_deepsize(self):
1013 d = self.POST(self.public_url + "/foo/?t=start-deep-size&ophandle=126",
1014 followRedirect=True)
1015 d.addCallback(self.wait_for_operation, "126")
1016 d.addCallback(self.get_operation_results, "126", "json")
1017 def _got_json(data):
1018 self.failUnlessEqual(data["finished"], True)
1020 self.failUnless(size > 1000)
1021 d.addCallback(_got_json)
1022 d.addCallback(self.get_operation_results, "126", "text")
1024 mo = re.search(r'^size: (\d+)$', res, re.M)
1025 self.failUnless(mo, res)
1026 size = int(mo.group(1))
1027 # with directories, the size varies.
1028 self.failUnless(size > 1000)
1029 d.addCallback(_got_text)
1032 def test_POST_DIRURL_deepstats_no_ophandle(self):
1033 d = self.shouldFail2(error.Error,
1034 "test_POST_DIRURL_deepstats_no_ophandle",
1036 "slow operation requires ophandle=",
1037 self.POST, self.public_url, t="start-deep-stats")
1040 def test_POST_DIRURL_deepstats(self):
1041 d = self.POST(self.public_url + "/foo/?t=start-deep-stats&ophandle=127",
1042 followRedirect=True)
1043 d.addCallback(self.wait_for_operation, "127")
1044 d.addCallback(self.get_operation_results, "127", "json")
1045 def _got_json(stats):
1046 expected = {"count-immutable-files": 3,
1047 "count-mutable-files": 0,
1048 "count-literal-files": 0,
1050 "count-directories": 3,
1051 "size-immutable-files": 57,
1052 "size-literal-files": 0,
1053 #"size-directories": 1912, # varies
1054 #"largest-directory": 1590,
1055 "largest-directory-children": 5,
1056 "largest-immutable-file": 19,
1058 for k,v in expected.iteritems():
1059 self.failUnlessEqual(stats[k], v,
1060 "stats[%s] was %s, not %s" %
1062 self.failUnlessEqual(stats["size-files-histogram"],
1064 d.addCallback(_got_json)
1067 def test_POST_DIRURL_stream_manifest(self):
1068 d = self.POST(self.public_url + "/foo/?t=stream-manifest")
1070 self.failUnless(res.endswith("\n"))
1071 units = [simplejson.loads(t) for t in res[:-1].split("\n")]
1072 self.failUnlessEqual(len(units), 7)
1073 self.failUnlessEqual(units[-1]["type"], "stats")
1075 self.failUnlessEqual(first["path"], [])
1076 self.failUnlessEqual(first["cap"], self._foo_uri)
1077 self.failUnlessEqual(first["type"], "directory")
1078 baz = [u for u in units[:-1] if u["cap"] == self._baz_file_uri][0]
1079 self.failUnlessEqual(baz["path"], ["sub", "baz.txt"])
1080 self.failIfEqual(baz["storage-index"], None)
1081 self.failIfEqual(baz["verifycap"], None)
1082 self.failIfEqual(baz["repaircap"], None)
1084 d.addCallback(_check)
1087 def test_GET_DIRURL_uri(self):
1088 d = self.GET(self.public_url + "/foo?t=uri")
1090 self.failUnlessEqual(res, self._foo_uri)
1091 d.addCallback(_check)
1094 def test_GET_DIRURL_readonly_uri(self):
1095 d = self.GET(self.public_url + "/foo?t=readonly-uri")
1097 self.failUnlessEqual(res, self._foo_readonly_uri)
1098 d.addCallback(_check)
1101 def test_PUT_NEWDIRURL(self):
1102 d = self.PUT(self.public_url + "/foo/newdir?t=mkdir", "")
1103 d.addCallback(lambda res:
1104 self.failUnlessNodeHasChild(self._foo_node, u"newdir"))
1105 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
1106 d.addCallback(self.failUnlessNodeKeysAre, [])
1109 def test_PUT_NEWDIRURL_exists(self):
1110 d = self.PUT(self.public_url + "/foo/sub?t=mkdir", "")
1111 d.addCallback(lambda res:
1112 self.failUnlessNodeHasChild(self._foo_node, u"sub"))
1113 d.addCallback(lambda res: self._foo_node.get(u"sub"))
1114 d.addCallback(self.failUnlessNodeKeysAre, [u"baz.txt"])
1117 def test_PUT_NEWDIRURL_blocked(self):
1118 d = self.shouldFail2(error.Error, "PUT_NEWDIRURL_blocked",
1119 "409 Conflict", "Unable to create directory 'bar.txt': a file was in the way",
1121 self.public_url + "/foo/bar.txt/sub?t=mkdir", "")
1122 d.addCallback(lambda res:
1123 self.failUnlessNodeHasChild(self._foo_node, u"sub"))
1124 d.addCallback(lambda res: self._foo_node.get(u"sub"))
1125 d.addCallback(self.failUnlessNodeKeysAre, [u"baz.txt"])
1128 def test_PUT_NEWDIRURL_mkdir_p(self):
1129 d = defer.succeed(None)
1130 d.addCallback(lambda res: self.POST(self.public_url + "/foo", t='mkdir', name='mkp'))
1131 d.addCallback(lambda res: self.failUnlessNodeHasChild(self._foo_node, u"mkp"))
1132 d.addCallback(lambda res: self._foo_node.get(u"mkp"))
1133 def mkdir_p(mkpnode):
1134 url = '/uri/%s?t=mkdir-p&path=/sub1/sub2' % urllib.quote(mkpnode.get_uri())
1136 def made_subsub(ssuri):
1137 d = self._foo_node.get_child_at_path(u"mkp/sub1/sub2")
1138 d.addCallback(lambda ssnode: self.failUnlessEqual(ssnode.get_uri(), ssuri))
1140 d.addCallback(lambda uri2: self.failUnlessEqual(uri2, ssuri))
1142 d.addCallback(made_subsub)
1144 d.addCallback(mkdir_p)
1147 def test_PUT_NEWDIRURL_mkdirs(self):
1148 d = self.PUT(self.public_url + "/foo/subdir/newdir?t=mkdir", "")
1149 d.addCallback(lambda res:
1150 self.failIfNodeHasChild(self._foo_node, u"newdir"))
1151 d.addCallback(lambda res:
1152 self.failUnlessNodeHasChild(self._foo_node, u"subdir"))
1153 d.addCallback(lambda res:
1154 self._foo_node.get_child_at_path(u"subdir/newdir"))
1155 d.addCallback(self.failUnlessNodeKeysAre, [])
1158 def test_DELETE_DIRURL(self):
1159 d = self.DELETE(self.public_url + "/foo")
1160 d.addCallback(lambda res:
1161 self.failIfNodeHasChild(self.public_root, u"foo"))
1164 def test_DELETE_DIRURL_missing(self):
1165 d = self.DELETE(self.public_url + "/foo/missing")
1166 d.addBoth(self.should404, "test_DELETE_DIRURL_missing")
1167 d.addCallback(lambda res:
1168 self.failUnlessNodeHasChild(self.public_root, u"foo"))
1171 def test_DELETE_DIRURL_missing2(self):
1172 d = self.DELETE(self.public_url + "/missing")
1173 d.addBoth(self.should404, "test_DELETE_DIRURL_missing2")
1176 def dump_root(self):
1178 w = webish.DirnodeWalkerMixin()
1179 def visitor(childpath, childnode, metadata):
1181 d = w.walk(self.public_root, visitor)
1184 def failUnlessNodeKeysAre(self, node, expected_keys):
1185 for k in expected_keys:
1186 assert isinstance(k, unicode)
1188 def _check(children):
1189 self.failUnlessEqual(sorted(children.keys()), sorted(expected_keys))
1190 d.addCallback(_check)
1192 def failUnlessNodeHasChild(self, node, name):
1193 assert isinstance(name, unicode)
1195 def _check(children):
1196 self.failUnless(name in children)
1197 d.addCallback(_check)
1199 def failIfNodeHasChild(self, node, name):
1200 assert isinstance(name, unicode)
1202 def _check(children):
1203 self.failIf(name in children)
1204 d.addCallback(_check)
1207 def failUnlessChildContentsAre(self, node, name, expected_contents):
1208 assert isinstance(name, unicode)
1209 d = node.get_child_at_path(name)
1210 d.addCallback(lambda node: node.download_to_data())
1211 def _check(contents):
1212 self.failUnlessEqual(contents, expected_contents)
1213 d.addCallback(_check)
1216 def failUnlessMutableChildContentsAre(self, node, name, expected_contents):
1217 assert isinstance(name, unicode)
1218 d = node.get_child_at_path(name)
1219 d.addCallback(lambda node: node.download_best_version())
1220 def _check(contents):
1221 self.failUnlessEqual(contents, expected_contents)
1222 d.addCallback(_check)
1225 def failUnlessChildURIIs(self, node, name, expected_uri):
1226 assert isinstance(name, unicode)
1227 d = node.get_child_at_path(name)
1229 self.failUnlessEqual(child.get_uri(), expected_uri.strip())
1230 d.addCallback(_check)
1233 def failUnlessURIMatchesChild(self, got_uri, node, name):
1234 assert isinstance(name, unicode)
1235 d = node.get_child_at_path(name)
1237 self.failUnlessEqual(got_uri.strip(), child.get_uri())
1238 d.addCallback(_check)
1241 def failUnlessCHKURIHasContents(self, got_uri, contents):
1242 self.failUnless(FakeCHKFileNode.all_contents[got_uri] == contents)
1244 def test_POST_upload(self):
1245 d = self.POST(self.public_url + "/foo", t="upload",
1246 file=("new.txt", self.NEWFILE_CONTENTS))
1248 d.addCallback(self.failUnlessURIMatchesChild, fn, u"new.txt")
1249 d.addCallback(lambda res:
1250 self.failUnlessChildContentsAre(fn, u"new.txt",
1251 self.NEWFILE_CONTENTS))
1254 def test_POST_upload_unicode(self):
1255 filename = u"n\u00e9wer.txt" # n e-acute w e r . t x t
1256 d = self.POST(self.public_url + "/foo", t="upload",
1257 file=(filename, self.NEWFILE_CONTENTS))
1259 d.addCallback(self.failUnlessURIMatchesChild, fn, filename)
1260 d.addCallback(lambda res:
1261 self.failUnlessChildContentsAre(fn, filename,
1262 self.NEWFILE_CONTENTS))
1263 target_url = self.public_url + "/foo/" + filename.encode("utf-8")
1264 d.addCallback(lambda res: self.GET(target_url))
1265 d.addCallback(lambda contents: self.failUnlessEqual(contents,
1266 self.NEWFILE_CONTENTS,
1270 def test_POST_upload_unicode_named(self):
1271 filename = u"n\u00e9wer.txt" # n e-acute w e r . t x t
1272 d = self.POST(self.public_url + "/foo", t="upload",
1274 file=("overridden", self.NEWFILE_CONTENTS))
1276 d.addCallback(self.failUnlessURIMatchesChild, fn, filename)
1277 d.addCallback(lambda res:
1278 self.failUnlessChildContentsAre(fn, filename,
1279 self.NEWFILE_CONTENTS))
1280 target_url = self.public_url + "/foo/" + filename.encode("utf-8")
1281 d.addCallback(lambda res: self.GET(target_url))
1282 d.addCallback(lambda contents: self.failUnlessEqual(contents,
1283 self.NEWFILE_CONTENTS,
1287 def test_POST_upload_no_link(self):
1288 d = self.POST("/uri", t="upload",
1289 file=("new.txt", self.NEWFILE_CONTENTS))
1290 def _check_upload_results(page):
1291 # this should be a page which describes the results of the upload
1292 # that just finished.
1293 self.failUnless("Upload Results:" in page)
1294 self.failUnless("URI:" in page)
1295 uri_re = re.compile("URI: <tt><span>(.*)</span>")
1296 mo = uri_re.search(page)
1297 self.failUnless(mo, page)
1298 new_uri = mo.group(1)
1300 d.addCallback(_check_upload_results)
1301 d.addCallback(self.failUnlessCHKURIHasContents, self.NEWFILE_CONTENTS)
1304 def test_POST_upload_no_link_whendone(self):
1305 d = self.POST("/uri", t="upload", when_done="/",
1306 file=("new.txt", self.NEWFILE_CONTENTS))
1307 d.addBoth(self.shouldRedirect, "/")
1310 def shouldRedirect2(self, which, checker, callable, *args, **kwargs):
1311 d = defer.maybeDeferred(callable, *args, **kwargs)
1313 if isinstance(res, failure.Failure):
1314 res.trap(error.PageRedirect)
1315 statuscode = res.value.status
1316 target = res.value.location
1317 return checker(statuscode, target)
1318 self.fail("%s: callable was supposed to redirect, not return '%s'"
1323 def test_POST_upload_no_link_whendone_results(self):
1324 def check(statuscode, target):
1325 self.failUnlessEqual(statuscode, str(http.FOUND))
1326 self.failUnless(target.startswith(self.webish_url), target)
1327 return client.getPage(target, method="GET")
1328 d = self.shouldRedirect2("test_POST_upload_no_link_whendone_results",
1330 self.POST, "/uri", t="upload",
1331 when_done="/uri/%(uri)s",
1332 file=("new.txt", self.NEWFILE_CONTENTS))
1333 d.addCallback(lambda res:
1334 self.failUnlessEqual(res, self.NEWFILE_CONTENTS))
1337 def test_POST_upload_no_link_mutable(self):
1338 d = self.POST("/uri", t="upload", mutable="true",
1339 file=("new.txt", self.NEWFILE_CONTENTS))
1340 def _check(new_uri):
1341 new_uri = new_uri.strip()
1342 self.new_uri = new_uri
1344 self.failUnless(IMutableFileURI.providedBy(u))
1345 self.failUnless(u.storage_index in FakeMutableFileNode.all_contents)
1346 n = self.s.create_node_from_uri(new_uri)
1347 return n.download_best_version()
1348 d.addCallback(_check)
1350 self.failUnlessEqual(data, self.NEWFILE_CONTENTS)
1351 return self.GET("/uri/%s" % urllib.quote(self.new_uri))
1352 d.addCallback(_check2)
1354 self.failUnlessEqual(data, self.NEWFILE_CONTENTS)
1355 return self.GET("/file/%s" % urllib.quote(self.new_uri))
1356 d.addCallback(_check3)
1358 self.failUnlessEqual(data, self.NEWFILE_CONTENTS)
1359 d.addCallback(_check4)
1362 def test_POST_upload_no_link_mutable_toobig(self):
1363 d = self.shouldFail2(error.Error,
1364 "test_POST_upload_no_link_mutable_toobig",
1365 "413 Request Entity Too Large",
1366 "SDMF is limited to one segment, and 10001 > 10000",
1368 "/uri", t="upload", mutable="true",
1370 "b" * (self.s.MUTABLE_SIZELIMIT+1)) )
1373 def test_POST_upload_mutable(self):
1374 # this creates a mutable file
1375 d = self.POST(self.public_url + "/foo", t="upload", mutable="true",
1376 file=("new.txt", self.NEWFILE_CONTENTS))
1378 d.addCallback(self.failUnlessURIMatchesChild, fn, u"new.txt")
1379 d.addCallback(lambda res:
1380 self.failUnlessMutableChildContentsAre(fn, u"new.txt",
1381 self.NEWFILE_CONTENTS))
1382 d.addCallback(lambda res: self._foo_node.get(u"new.txt"))
1384 self.failUnless(IMutableFileNode.providedBy(newnode))
1385 self.failUnless(newnode.is_mutable())
1386 self.failIf(newnode.is_readonly())
1387 self._mutable_node = newnode
1388 self._mutable_uri = newnode.get_uri()
1391 # now upload it again and make sure that the URI doesn't change
1392 NEWER_CONTENTS = self.NEWFILE_CONTENTS + "newer\n"
1393 d.addCallback(lambda res:
1394 self.POST(self.public_url + "/foo", t="upload",
1396 file=("new.txt", NEWER_CONTENTS)))
1397 d.addCallback(self.failUnlessURIMatchesChild, fn, u"new.txt")
1398 d.addCallback(lambda res:
1399 self.failUnlessMutableChildContentsAre(fn, u"new.txt",
1401 d.addCallback(lambda res: self._foo_node.get(u"new.txt"))
1403 self.failUnless(IMutableFileNode.providedBy(newnode))
1404 self.failUnless(newnode.is_mutable())
1405 self.failIf(newnode.is_readonly())
1406 self.failUnlessEqual(self._mutable_uri, newnode.get_uri())
1407 d.addCallback(_got2)
1409 # upload a second time, using PUT instead of POST
1410 NEW2_CONTENTS = NEWER_CONTENTS + "overwrite with PUT\n"
1411 d.addCallback(lambda res:
1412 self.PUT(self.public_url + "/foo/new.txt", NEW2_CONTENTS))
1413 d.addCallback(self.failUnlessURIMatchesChild, fn, u"new.txt")
1414 d.addCallback(lambda res:
1415 self.failUnlessMutableChildContentsAre(fn, u"new.txt",
1418 # finally list the directory, since mutable files are displayed
1419 # slightly differently
1421 d.addCallback(lambda res:
1422 self.GET(self.public_url + "/foo/",
1423 followRedirect=True))
1424 def _check_page(res):
1425 # TODO: assert more about the contents
1426 self.failUnless("SSK" in res)
1428 d.addCallback(_check_page)
1430 d.addCallback(lambda res: self._foo_node.get(u"new.txt"))
1432 self.failUnless(IMutableFileNode.providedBy(newnode))
1433 self.failUnless(newnode.is_mutable())
1434 self.failIf(newnode.is_readonly())
1435 self.failUnlessEqual(self._mutable_uri, newnode.get_uri())
1436 d.addCallback(_got3)
1438 # look at the JSON form of the enclosing directory
1439 d.addCallback(lambda res:
1440 self.GET(self.public_url + "/foo/?t=json",
1441 followRedirect=True))
1442 def _check_page_json(res):
1443 parsed = simplejson.loads(res)
1444 self.failUnlessEqual(parsed[0], "dirnode")
1445 children = dict( [(unicode(name),value)
1447 in parsed[1]["children"].iteritems()] )
1448 self.failUnless("new.txt" in children)
1449 new_json = children["new.txt"]
1450 self.failUnlessEqual(new_json[0], "filenode")
1451 self.failUnless(new_json[1]["mutable"])
1452 self.failUnlessEqual(new_json[1]["rw_uri"], self._mutable_uri)
1453 ro_uri = unicode(self._mutable_node.get_readonly().to_string())
1454 self.failUnlessEqual(new_json[1]["ro_uri"], ro_uri)
1455 d.addCallback(_check_page_json)
1457 # and the JSON form of the file
1458 d.addCallback(lambda res:
1459 self.GET(self.public_url + "/foo/new.txt?t=json"))
1460 def _check_file_json(res):
1461 parsed = simplejson.loads(res)
1462 self.failUnlessEqual(parsed[0], "filenode")
1463 self.failUnless(parsed[1]["mutable"])
1464 self.failUnlessEqual(parsed[1]["rw_uri"], self._mutable_uri)
1465 ro_uri = unicode(self._mutable_node.get_readonly().to_string())
1466 self.failUnlessEqual(parsed[1]["ro_uri"], ro_uri)
1467 d.addCallback(_check_file_json)
1469 # and look at t=uri and t=readonly-uri
1470 d.addCallback(lambda res:
1471 self.GET(self.public_url + "/foo/new.txt?t=uri"))
1472 d.addCallback(lambda res: self.failUnlessEqual(res, self._mutable_uri))
1473 d.addCallback(lambda res:
1474 self.GET(self.public_url + "/foo/new.txt?t=readonly-uri"))
1475 def _check_ro_uri(res):
1476 ro_uri = unicode(self._mutable_node.get_readonly().to_string())
1477 self.failUnlessEqual(res, ro_uri)
1478 d.addCallback(_check_ro_uri)
1480 # make sure we can get to it from /uri/URI
1481 d.addCallback(lambda res:
1482 self.GET("/uri/%s" % urllib.quote(self._mutable_uri)))
1483 d.addCallback(lambda res:
1484 self.failUnlessEqual(res, NEW2_CONTENTS))
1486 # and that HEAD computes the size correctly
1487 d.addCallback(lambda res:
1488 self.HEAD(self.public_url + "/foo/new.txt",
1489 return_response=True))
1490 def _got_headers((res, status, headers)):
1491 self.failUnlessEqual(res, "")
1492 self.failUnlessEqual(headers["content-length"][0],
1493 str(len(NEW2_CONTENTS)))
1494 self.failUnlessEqual(headers["content-type"], ["text/plain"])
1495 d.addCallback(_got_headers)
1497 # make sure that size errors are displayed correctly for overwrite
1498 d.addCallback(lambda res:
1499 self.shouldFail2(error.Error,
1500 "test_POST_upload_mutable-toobig",
1501 "413 Request Entity Too Large",
1502 "SDMF is limited to one segment, and 10001 > 10000",
1504 self.public_url + "/foo", t="upload",
1507 "b" * (self.s.MUTABLE_SIZELIMIT+1)),
1510 d.addErrback(self.dump_error)
1513 def test_POST_upload_mutable_toobig(self):
1514 d = self.shouldFail2(error.Error,
1515 "test_POST_upload_no_link_mutable_toobig",
1516 "413 Request Entity Too Large",
1517 "SDMF is limited to one segment, and 10001 > 10000",
1519 self.public_url + "/foo",
1520 t="upload", mutable="true",
1522 "b" * (self.s.MUTABLE_SIZELIMIT+1)) )
1525 def dump_error(self, f):
1526 # if the web server returns an error code (like 400 Bad Request),
1527 # web.client.getPage puts the HTTP response body into the .response
1528 # attribute of the exception object that it gives back. It does not
1529 # appear in the Failure's repr(), so the ERROR that trial displays
1530 # will be rather terse and unhelpful. addErrback this method to the
1531 # end of your chain to get more information out of these errors.
1532 if f.check(error.Error):
1533 print "web.error.Error:"
1535 print f.value.response
1538 def test_POST_upload_replace(self):
1539 d = self.POST(self.public_url + "/foo", t="upload",
1540 file=("bar.txt", self.NEWFILE_CONTENTS))
1542 d.addCallback(self.failUnlessURIMatchesChild, fn, u"bar.txt")
1543 d.addCallback(lambda res:
1544 self.failUnlessChildContentsAre(fn, u"bar.txt",
1545 self.NEWFILE_CONTENTS))
1548 def test_POST_upload_no_replace_ok(self):
1549 d = self.POST(self.public_url + "/foo?replace=false", t="upload",
1550 file=("new.txt", self.NEWFILE_CONTENTS))
1551 d.addCallback(lambda res: self.GET(self.public_url + "/foo/new.txt"))
1552 d.addCallback(lambda res: self.failUnlessEqual(res,
1553 self.NEWFILE_CONTENTS))
1556 def test_POST_upload_no_replace_queryarg(self):
1557 d = self.POST(self.public_url + "/foo?replace=false", t="upload",
1558 file=("bar.txt", self.NEWFILE_CONTENTS))
1559 d.addBoth(self.shouldFail, error.Error,
1560 "POST_upload_no_replace_queryarg",
1562 "There was already a child by that name, and you asked me "
1563 "to not replace it")
1564 d.addCallback(lambda res: self.GET(self.public_url + "/foo/bar.txt"))
1565 d.addCallback(self.failUnlessIsBarDotTxt)
1568 def test_POST_upload_no_replace_field(self):
1569 d = self.POST(self.public_url + "/foo", t="upload", replace="false",
1570 file=("bar.txt", self.NEWFILE_CONTENTS))
1571 d.addBoth(self.shouldFail, error.Error, "POST_upload_no_replace_field",
1573 "There was already a child by that name, and you asked me "
1574 "to not replace it")
1575 d.addCallback(lambda res: self.GET(self.public_url + "/foo/bar.txt"))
1576 d.addCallback(self.failUnlessIsBarDotTxt)
1579 def test_POST_upload_whendone(self):
1580 d = self.POST(self.public_url + "/foo", t="upload", when_done="/THERE",
1581 file=("new.txt", self.NEWFILE_CONTENTS))
1582 d.addBoth(self.shouldRedirect, "/THERE")
1584 d.addCallback(lambda res:
1585 self.failUnlessChildContentsAre(fn, u"new.txt",
1586 self.NEWFILE_CONTENTS))
1589 def test_POST_upload_named(self):
1591 d = self.POST(self.public_url + "/foo", t="upload",
1592 name="new.txt", file=self.NEWFILE_CONTENTS)
1593 d.addCallback(self.failUnlessURIMatchesChild, fn, u"new.txt")
1594 d.addCallback(lambda res:
1595 self.failUnlessChildContentsAre(fn, u"new.txt",
1596 self.NEWFILE_CONTENTS))
1599 def test_POST_upload_named_badfilename(self):
1600 d = self.POST(self.public_url + "/foo", t="upload",
1601 name="slashes/are/bad.txt", file=self.NEWFILE_CONTENTS)
1602 d.addBoth(self.shouldFail, error.Error,
1603 "test_POST_upload_named_badfilename",
1605 "name= may not contain a slash",
1607 # make sure that nothing was added
1608 d.addCallback(lambda res:
1609 self.failUnlessNodeKeysAre(self._foo_node,
1610 [u"bar.txt", u"blockingfile",
1611 u"empty", u"n\u00fc.txt",
1615 def test_POST_FILEURL_check(self):
1616 bar_url = self.public_url + "/foo/bar.txt"
1617 d = self.POST(bar_url, t="check")
1619 self.failUnless("Healthy :" in res)
1620 d.addCallback(_check)
1621 redir_url = "http://allmydata.org/TARGET"
1622 def _check2(statuscode, target):
1623 self.failUnlessEqual(statuscode, str(http.FOUND))
1624 self.failUnlessEqual(target, redir_url)
1625 d.addCallback(lambda res:
1626 self.shouldRedirect2("test_POST_FILEURL_check",
1630 when_done=redir_url))
1631 d.addCallback(lambda res:
1632 self.POST(bar_url, t="check", return_to=redir_url))
1634 self.failUnless("Healthy :" in res)
1635 self.failUnless("Return to file" in res)
1636 self.failUnless(redir_url in res)
1637 d.addCallback(_check3)
1639 d.addCallback(lambda res:
1640 self.POST(bar_url, t="check", output="JSON"))
1641 def _check_json(res):
1642 data = simplejson.loads(res)
1643 self.failUnless("storage-index" in data)
1644 self.failUnless(data["results"]["healthy"])
1645 d.addCallback(_check_json)
1649 def test_POST_FILEURL_check_and_repair(self):
1650 bar_url = self.public_url + "/foo/bar.txt"
1651 d = self.POST(bar_url, t="check", repair="true")
1653 self.failUnless("Healthy :" in res)
1654 d.addCallback(_check)
1655 redir_url = "http://allmydata.org/TARGET"
1656 def _check2(statuscode, target):
1657 self.failUnlessEqual(statuscode, str(http.FOUND))
1658 self.failUnlessEqual(target, redir_url)
1659 d.addCallback(lambda res:
1660 self.shouldRedirect2("test_POST_FILEURL_check_and_repair",
1663 t="check", repair="true",
1664 when_done=redir_url))
1665 d.addCallback(lambda res:
1666 self.POST(bar_url, t="check", return_to=redir_url))
1668 self.failUnless("Healthy :" in res)
1669 self.failUnless("Return to file" in res)
1670 self.failUnless(redir_url in res)
1671 d.addCallback(_check3)
1674 def test_POST_DIRURL_check(self):
1675 foo_url = self.public_url + "/foo/"
1676 d = self.POST(foo_url, t="check")
1678 self.failUnless("Healthy :" in res, res)
1679 d.addCallback(_check)
1680 redir_url = "http://allmydata.org/TARGET"
1681 def _check2(statuscode, target):
1682 self.failUnlessEqual(statuscode, str(http.FOUND))
1683 self.failUnlessEqual(target, redir_url)
1684 d.addCallback(lambda res:
1685 self.shouldRedirect2("test_POST_DIRURL_check",
1689 when_done=redir_url))
1690 d.addCallback(lambda res:
1691 self.POST(foo_url, t="check", return_to=redir_url))
1693 self.failUnless("Healthy :" in res, res)
1694 self.failUnless("Return to file/directory" in res)
1695 self.failUnless(redir_url in res)
1696 d.addCallback(_check3)
1698 d.addCallback(lambda res:
1699 self.POST(foo_url, t="check", output="JSON"))
1700 def _check_json(res):
1701 data = simplejson.loads(res)
1702 self.failUnless("storage-index" in data)
1703 self.failUnless(data["results"]["healthy"])
1704 d.addCallback(_check_json)
1708 def test_POST_DIRURL_check_and_repair(self):
1709 foo_url = self.public_url + "/foo/"
1710 d = self.POST(foo_url, t="check", repair="true")
1712 self.failUnless("Healthy :" in res, res)
1713 d.addCallback(_check)
1714 redir_url = "http://allmydata.org/TARGET"
1715 def _check2(statuscode, target):
1716 self.failUnlessEqual(statuscode, str(http.FOUND))
1717 self.failUnlessEqual(target, redir_url)
1718 d.addCallback(lambda res:
1719 self.shouldRedirect2("test_POST_DIRURL_check_and_repair",
1722 t="check", repair="true",
1723 when_done=redir_url))
1724 d.addCallback(lambda res:
1725 self.POST(foo_url, t="check", return_to=redir_url))
1727 self.failUnless("Healthy :" in res)
1728 self.failUnless("Return to file/directory" in res)
1729 self.failUnless(redir_url in res)
1730 d.addCallback(_check3)
1733 def wait_for_operation(self, ignored, ophandle):
1734 url = "/operations/" + ophandle
1735 url += "?t=status&output=JSON"
1738 data = simplejson.loads(res)
1739 if not data["finished"]:
1740 d = self.stall(delay=1.0)
1741 d.addCallback(self.wait_for_operation, ophandle)
1747 def get_operation_results(self, ignored, ophandle, output=None):
1748 url = "/operations/" + ophandle
1751 url += "&output=" + output
1754 if output and output.lower() == "json":
1755 return simplejson.loads(res)
1760 def test_POST_DIRURL_deepcheck_no_ophandle(self):
1761 d = self.shouldFail2(error.Error,
1762 "test_POST_DIRURL_deepcheck_no_ophandle",
1764 "slow operation requires ophandle=",
1765 self.POST, self.public_url, t="start-deep-check")
1768 def test_POST_DIRURL_deepcheck(self):
1769 def _check_redirect(statuscode, target):
1770 self.failUnlessEqual(statuscode, str(http.FOUND))
1771 self.failUnless(target.endswith("/operations/123"))
1772 d = self.shouldRedirect2("test_POST_DIRURL_deepcheck", _check_redirect,
1773 self.POST, self.public_url,
1774 t="start-deep-check", ophandle="123")
1775 d.addCallback(self.wait_for_operation, "123")
1776 def _check_json(data):
1777 self.failUnlessEqual(data["finished"], True)
1778 self.failUnlessEqual(data["count-objects-checked"], 8)
1779 self.failUnlessEqual(data["count-objects-healthy"], 8)
1780 d.addCallback(_check_json)
1781 d.addCallback(self.get_operation_results, "123", "html")
1782 def _check_html(res):
1783 self.failUnless("Objects Checked: <span>8</span>" in res)
1784 self.failUnless("Objects Healthy: <span>8</span>" in res)
1785 d.addCallback(_check_html)
1787 d.addCallback(lambda res:
1788 self.GET("/operations/123/"))
1789 d.addCallback(_check_html) # should be the same as without the slash
1791 d.addCallback(lambda res:
1792 self.shouldFail2(error.Error, "one", "404 Not Found",
1793 "No detailed results for SI bogus",
1794 self.GET, "/operations/123/bogus"))
1796 foo_si = self._foo_node.get_storage_index()
1797 foo_si_s = base32.b2a(foo_si)
1798 d.addCallback(lambda res:
1799 self.GET("/operations/123/%s?output=JSON" % foo_si_s))
1800 def _check_foo_json(res):
1801 data = simplejson.loads(res)
1802 self.failUnlessEqual(data["storage-index"], foo_si_s)
1803 self.failUnless(data["results"]["healthy"])
1804 d.addCallback(_check_foo_json)
1807 def test_POST_DIRURL_deepcheck_and_repair(self):
1808 d = self.POST(self.public_url, t="start-deep-check", repair="true",
1809 ophandle="124", output="json", followRedirect=True)
1810 d.addCallback(self.wait_for_operation, "124")
1811 def _check_json(data):
1812 self.failUnlessEqual(data["finished"], True)
1813 self.failUnlessEqual(data["count-objects-checked"], 8)
1814 self.failUnlessEqual(data["count-objects-healthy-pre-repair"], 8)
1815 self.failUnlessEqual(data["count-objects-unhealthy-pre-repair"], 0)
1816 self.failUnlessEqual(data["count-corrupt-shares-pre-repair"], 0)
1817 self.failUnlessEqual(data["count-repairs-attempted"], 0)
1818 self.failUnlessEqual(data["count-repairs-successful"], 0)
1819 self.failUnlessEqual(data["count-repairs-unsuccessful"], 0)
1820 self.failUnlessEqual(data["count-objects-healthy-post-repair"], 8)
1821 self.failUnlessEqual(data["count-objects-unhealthy-post-repair"], 0)
1822 self.failUnlessEqual(data["count-corrupt-shares-post-repair"], 0)
1823 d.addCallback(_check_json)
1824 d.addCallback(self.get_operation_results, "124", "html")
1825 def _check_html(res):
1826 self.failUnless("Objects Checked: <span>8</span>" in res)
1828 self.failUnless("Objects Healthy (before repair): <span>8</span>" in res)
1829 self.failUnless("Objects Unhealthy (before repair): <span>0</span>" in res)
1830 self.failUnless("Corrupt Shares (before repair): <span>0</span>" in res)
1832 self.failUnless("Repairs Attempted: <span>0</span>" in res)
1833 self.failUnless("Repairs Successful: <span>0</span>" in res)
1834 self.failUnless("Repairs Unsuccessful: <span>0</span>" in res)
1836 self.failUnless("Objects Healthy (after repair): <span>8</span>" in res)
1837 self.failUnless("Objects Unhealthy (after repair): <span>0</span>" in res)
1838 self.failUnless("Corrupt Shares (after repair): <span>0</span>" in res)
1839 d.addCallback(_check_html)
1842 def test_POST_FILEURL_bad_t(self):
1843 d = self.shouldFail2(error.Error, "POST_bad_t", "400 Bad Request",
1844 "POST to file: bad t=bogus",
1845 self.POST, self.public_url + "/foo/bar.txt",
1849 def test_POST_mkdir(self): # return value?
1850 d = self.POST(self.public_url + "/foo", t="mkdir", name="newdir")
1851 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
1852 d.addCallback(self.failUnlessNodeKeysAre, [])
1855 def test_POST_mkdir_2(self):
1856 d = self.POST(self.public_url + "/foo/newdir?t=mkdir", "")
1857 d.addCallback(lambda res:
1858 self.failUnlessNodeHasChild(self._foo_node, u"newdir"))
1859 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
1860 d.addCallback(self.failUnlessNodeKeysAre, [])
1863 def test_POST_mkdirs_2(self):
1864 d = self.POST(self.public_url + "/foo/bardir/newdir?t=mkdir", "")
1865 d.addCallback(lambda res:
1866 self.failUnlessNodeHasChild(self._foo_node, u"bardir"))
1867 d.addCallback(lambda res: self._foo_node.get(u"bardir"))
1868 d.addCallback(lambda bardirnode: bardirnode.get(u"newdir"))
1869 d.addCallback(self.failUnlessNodeKeysAre, [])
1872 def test_POST_mkdir_no_parentdir_noredirect(self):
1873 d = self.POST("/uri?t=mkdir")
1874 def _after_mkdir(res):
1875 uri.NewDirectoryURI.init_from_string(res)
1876 d.addCallback(_after_mkdir)
1879 def test_POST_mkdir_no_parentdir_redirect(self):
1880 d = self.POST("/uri?t=mkdir&redirect_to_result=true")
1881 d.addBoth(self.shouldRedirect, None, statuscode='303')
1882 def _check_target(target):
1883 target = urllib.unquote(target)
1884 self.failUnless(target.startswith("uri/URI:DIR2:"), target)
1885 d.addCallback(_check_target)
1888 def test_POST_noparent_bad(self):
1889 d = self.shouldHTTPError("POST /uri?t=bogus", 400, "Bad Request",
1890 "/uri accepts only PUT, PUT?t=mkdir, "
1891 "POST?t=upload, and POST?t=mkdir",
1892 self.POST, "/uri?t=bogus")
1895 def test_welcome_page_mkdir_button(self):
1896 # Fetch the welcome page.
1898 def _after_get_welcome_page(res):
1899 MKDIR_BUTTON_RE=re.compile('<form action="([^"]*)" method="post".*?<input type="hidden" name="t" value="([^"]*)" /><input type="hidden" name="([^"]*)" value="([^"]*)" /><input type="submit" value="Create a directory" />', re.I)
1900 mo = MKDIR_BUTTON_RE.search(res)
1901 formaction = mo.group(1)
1903 formaname = mo.group(3)
1904 formavalue = mo.group(4)
1905 return (formaction, formt, formaname, formavalue)
1906 d.addCallback(_after_get_welcome_page)
1907 def _after_parse_form(res):
1908 (formaction, formt, formaname, formavalue) = res
1909 return self.POST("/%s?t=%s&%s=%s" % (formaction, formt, formaname, formavalue))
1910 d.addCallback(_after_parse_form)
1911 d.addBoth(self.shouldRedirect, None, statuscode='303')
1914 def test_POST_mkdir_replace(self): # return value?
1915 d = self.POST(self.public_url + "/foo", t="mkdir", name="sub")
1916 d.addCallback(lambda res: self._foo_node.get(u"sub"))
1917 d.addCallback(self.failUnlessNodeKeysAre, [])
1920 def test_POST_mkdir_no_replace_queryarg(self): # return value?
1921 d = self.POST(self.public_url + "/foo?replace=false", t="mkdir", name="sub")
1922 d.addBoth(self.shouldFail, error.Error,
1923 "POST_mkdir_no_replace_queryarg",
1925 "There was already a child by that name, and you asked me "
1926 "to not replace it")
1927 d.addCallback(lambda res: self._foo_node.get(u"sub"))
1928 d.addCallback(self.failUnlessNodeKeysAre, [u"baz.txt"])
1931 def test_POST_mkdir_no_replace_field(self): # return value?
1932 d = self.POST(self.public_url + "/foo", t="mkdir", name="sub",
1934 d.addBoth(self.shouldFail, error.Error, "POST_mkdir_no_replace_field",
1936 "There was already a child by that name, and you asked me "
1937 "to not replace it")
1938 d.addCallback(lambda res: self._foo_node.get(u"sub"))
1939 d.addCallback(self.failUnlessNodeKeysAre, [u"baz.txt"])
1942 def test_POST_mkdir_whendone_field(self):
1943 d = self.POST(self.public_url + "/foo",
1944 t="mkdir", name="newdir", when_done="/THERE")
1945 d.addBoth(self.shouldRedirect, "/THERE")
1946 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
1947 d.addCallback(self.failUnlessNodeKeysAre, [])
1950 def test_POST_mkdir_whendone_queryarg(self):
1951 d = self.POST(self.public_url + "/foo?when_done=/THERE",
1952 t="mkdir", name="newdir")
1953 d.addBoth(self.shouldRedirect, "/THERE")
1954 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
1955 d.addCallback(self.failUnlessNodeKeysAre, [])
1958 def test_POST_bad_t(self):
1959 d = self.shouldFail2(error.Error, "POST_bad_t", "400 Bad Request",
1960 "POST to a directory with bad t=BOGUS",
1961 self.POST, self.public_url + "/foo", t="BOGUS")
1964 def test_POST_set_children(self):
1965 contents9, n9, newuri9 = self.makefile(9)
1966 contents10, n10, newuri10 = self.makefile(10)
1967 contents11, n11, newuri11 = self.makefile(11)
1970 "atomic_added_1": [ "filenode", { "rw_uri": "%s",
1973 "ctime": 1002777696.7564139,
1974 "mtime": 1002777696.7564139
1977 "atomic_added_2": [ "filenode", { "rw_uri": "%s",
1980 "ctime": 1002777696.7564139,
1981 "mtime": 1002777696.7564139
1984 "atomic_added_3": [ "filenode", { "rw_uri": "%s",
1987 "ctime": 1002777696.7564139,
1988 "mtime": 1002777696.7564139
1991 }""" % (newuri9, newuri10, newuri11)
1993 url = self.webish_url + self.public_url + "/foo" + "?t=set_children"
1995 d = client.getPage(url, method="POST", postdata=reqbody)
1997 self.failUnlessURIMatchesChild(newuri9, self._foo_node, u"atomic_added_1")
1998 self.failUnlessURIMatchesChild(newuri10, self._foo_node, u"atomic_added_2")
1999 self.failUnlessURIMatchesChild(newuri11, self._foo_node, u"atomic_added_3")
2001 d.addCallback(_then)
2002 d.addErrback(self.dump_error)
2005 def test_POST_put_uri(self):
2006 contents, n, newuri = self.makefile(8)
2007 d = self.POST(self.public_url + "/foo", t="uri", name="new.txt", uri=newuri)
2008 d.addCallback(self.failUnlessURIMatchesChild, self._foo_node, u"new.txt")
2009 d.addCallback(lambda res:
2010 self.failUnlessChildContentsAre(self._foo_node, u"new.txt",
2014 def test_POST_put_uri_replace(self):
2015 contents, n, newuri = self.makefile(8)
2016 d = self.POST(self.public_url + "/foo", t="uri", name="bar.txt", uri=newuri)
2017 d.addCallback(self.failUnlessURIMatchesChild, self._foo_node, u"bar.txt")
2018 d.addCallback(lambda res:
2019 self.failUnlessChildContentsAre(self._foo_node, u"bar.txt",
2023 def test_POST_put_uri_no_replace_queryarg(self):
2024 contents, n, newuri = self.makefile(8)
2025 d = self.POST(self.public_url + "/foo?replace=false", t="uri",
2026 name="bar.txt", uri=newuri)
2027 d.addBoth(self.shouldFail, error.Error,
2028 "POST_put_uri_no_replace_queryarg",
2030 "There was already a child by that name, and you asked me "
2031 "to not replace it")
2032 d.addCallback(lambda res: self.GET(self.public_url + "/foo/bar.txt"))
2033 d.addCallback(self.failUnlessIsBarDotTxt)
2036 def test_POST_put_uri_no_replace_field(self):
2037 contents, n, newuri = self.makefile(8)
2038 d = self.POST(self.public_url + "/foo", t="uri", replace="false",
2039 name="bar.txt", uri=newuri)
2040 d.addBoth(self.shouldFail, error.Error,
2041 "POST_put_uri_no_replace_field",
2043 "There was already a child by that name, and you asked me "
2044 "to not replace it")
2045 d.addCallback(lambda res: self.GET(self.public_url + "/foo/bar.txt"))
2046 d.addCallback(self.failUnlessIsBarDotTxt)
2049 def test_POST_delete(self):
2050 d = self.POST(self.public_url + "/foo", t="delete", name="bar.txt")
2051 d.addCallback(lambda res: self._foo_node.list())
2052 def _check(children):
2053 self.failIf(u"bar.txt" in children)
2054 d.addCallback(_check)
2057 def test_POST_rename_file(self):
2058 d = self.POST(self.public_url + "/foo", t="rename",
2059 from_name="bar.txt", to_name='wibble.txt')
2060 d.addCallback(lambda res:
2061 self.failIfNodeHasChild(self._foo_node, u"bar.txt"))
2062 d.addCallback(lambda res:
2063 self.failUnlessNodeHasChild(self._foo_node, u"wibble.txt"))
2064 d.addCallback(lambda res: self.GET(self.public_url + "/foo/wibble.txt"))
2065 d.addCallback(self.failUnlessIsBarDotTxt)
2066 d.addCallback(lambda res: self.GET(self.public_url + "/foo/wibble.txt?t=json"))
2067 d.addCallback(self.failUnlessIsBarJSON)
2070 def test_POST_rename_file_redundant(self):
2071 d = self.POST(self.public_url + "/foo", t="rename",
2072 from_name="bar.txt", to_name='bar.txt')
2073 d.addCallback(lambda res:
2074 self.failUnlessNodeHasChild(self._foo_node, u"bar.txt"))
2075 d.addCallback(lambda res: self.GET(self.public_url + "/foo/bar.txt"))
2076 d.addCallback(self.failUnlessIsBarDotTxt)
2077 d.addCallback(lambda res: self.GET(self.public_url + "/foo/bar.txt?t=json"))
2078 d.addCallback(self.failUnlessIsBarJSON)
2081 def test_POST_rename_file_replace(self):
2082 # rename a file and replace a directory with it
2083 d = self.POST(self.public_url + "/foo", t="rename",
2084 from_name="bar.txt", to_name='empty')
2085 d.addCallback(lambda res:
2086 self.failIfNodeHasChild(self._foo_node, u"bar.txt"))
2087 d.addCallback(lambda res:
2088 self.failUnlessNodeHasChild(self._foo_node, u"empty"))
2089 d.addCallback(lambda res: self.GET(self.public_url + "/foo/empty"))
2090 d.addCallback(self.failUnlessIsBarDotTxt)
2091 d.addCallback(lambda res: self.GET(self.public_url + "/foo/empty?t=json"))
2092 d.addCallback(self.failUnlessIsBarJSON)
2095 def test_POST_rename_file_no_replace_queryarg(self):
2096 # rename a file and replace a directory with it
2097 d = self.POST(self.public_url + "/foo?replace=false", t="rename",
2098 from_name="bar.txt", to_name='empty')
2099 d.addBoth(self.shouldFail, error.Error,
2100 "POST_rename_file_no_replace_queryarg",
2102 "There was already a child by that name, and you asked me "
2103 "to not replace it")
2104 d.addCallback(lambda res: self.GET(self.public_url + "/foo/empty?t=json"))
2105 d.addCallback(self.failUnlessIsEmptyJSON)
2108 def test_POST_rename_file_no_replace_field(self):
2109 # rename a file and replace a directory with it
2110 d = self.POST(self.public_url + "/foo", t="rename", replace="false",
2111 from_name="bar.txt", to_name='empty')
2112 d.addBoth(self.shouldFail, error.Error,
2113 "POST_rename_file_no_replace_field",
2115 "There was already a child by that name, and you asked me "
2116 "to not replace it")
2117 d.addCallback(lambda res: self.GET(self.public_url + "/foo/empty?t=json"))
2118 d.addCallback(self.failUnlessIsEmptyJSON)
2121 def failUnlessIsEmptyJSON(self, res):
2122 data = simplejson.loads(res)
2123 self.failUnlessEqual(data[0], "dirnode", data)
2124 self.failUnlessEqual(len(data[1]["children"]), 0)
2126 def test_POST_rename_file_slash_fail(self):
2127 d = self.POST(self.public_url + "/foo", t="rename",
2128 from_name="bar.txt", to_name='kirk/spock.txt')
2129 d.addBoth(self.shouldFail, error.Error,
2130 "test_POST_rename_file_slash_fail",
2132 "to_name= may not contain a slash",
2134 d.addCallback(lambda res:
2135 self.failUnlessNodeHasChild(self._foo_node, u"bar.txt"))
2138 def test_POST_rename_dir(self):
2139 d = self.POST(self.public_url, t="rename",
2140 from_name="foo", to_name='plunk')
2141 d.addCallback(lambda res:
2142 self.failIfNodeHasChild(self.public_root, u"foo"))
2143 d.addCallback(lambda res:
2144 self.failUnlessNodeHasChild(self.public_root, u"plunk"))
2145 d.addCallback(lambda res: self.GET(self.public_url + "/plunk?t=json"))
2146 d.addCallback(self.failUnlessIsFooJSON)
2149 def shouldRedirect(self, res, target=None, statuscode=None, which=""):
2150 """ If target is not None then the redirection has to go to target. If
2151 statuscode is not None then the redirection has to be accomplished with
2152 that HTTP status code."""
2153 if not isinstance(res, failure.Failure):
2154 to_where = (target is None) and "somewhere" or ("to " + target)
2155 self.fail("%s: we were expecting to get redirected %s, not get an"
2156 " actual page: %s" % (which, to_where, res))
2157 res.trap(error.PageRedirect)
2158 if statuscode is not None:
2159 self.failUnlessEqual(res.value.status, statuscode,
2160 "%s: not a redirect" % which)
2161 if target is not None:
2162 # the PageRedirect does not seem to capture the uri= query arg
2163 # properly, so we can't check for it.
2164 realtarget = self.webish_url + target
2165 self.failUnlessEqual(res.value.location, realtarget,
2166 "%s: wrong target" % which)
2167 return res.value.location
2169 def test_GET_URI_form(self):
2170 base = "/uri?uri=%s" % self._bar_txt_uri
2171 # this is supposed to give us a redirect to /uri/$URI, plus arguments
2172 targetbase = "/uri/%s" % urllib.quote(self._bar_txt_uri)
2174 d.addBoth(self.shouldRedirect, targetbase)
2175 d.addCallback(lambda res: self.GET(base+"&filename=bar.txt"))
2176 d.addBoth(self.shouldRedirect, targetbase+"?filename=bar.txt")
2177 d.addCallback(lambda res: self.GET(base+"&t=json"))
2178 d.addBoth(self.shouldRedirect, targetbase+"?t=json")
2179 d.addCallback(self.log, "about to get file by uri")
2180 d.addCallback(lambda res: self.GET(base, followRedirect=True))
2181 d.addCallback(self.failUnlessIsBarDotTxt)
2182 d.addCallback(self.log, "got file by uri, about to get dir by uri")
2183 d.addCallback(lambda res: self.GET("/uri?uri=%s&t=json" % self._foo_uri,
2184 followRedirect=True))
2185 d.addCallback(self.failUnlessIsFooJSON)
2186 d.addCallback(self.log, "got dir by uri")
2190 def test_GET_URI_form_bad(self):
2191 d = self.shouldFail2(error.Error, "test_GET_URI_form_bad",
2192 "400 Bad Request", "GET /uri requires uri=",
2196 def test_GET_rename_form(self):
2197 d = self.GET(self.public_url + "/foo?t=rename-form&name=bar.txt",
2198 followRedirect=True)
2200 self.failUnless('name="when_done" value="."' in res, res)
2201 self.failUnless(re.search(r'name="from_name" value="bar\.txt"', res))
2202 d.addCallback(_check)
2205 def log(self, res, msg):
2206 #print "MSG: %s RES: %s" % (msg, res)
2210 def test_GET_URI_URL(self):
2211 base = "/uri/%s" % self._bar_txt_uri
2213 d.addCallback(self.failUnlessIsBarDotTxt)
2214 d.addCallback(lambda res: self.GET(base+"?filename=bar.txt"))
2215 d.addCallback(self.failUnlessIsBarDotTxt)
2216 d.addCallback(lambda res: self.GET(base+"?filename=bar.txt&save=true"))
2217 d.addCallback(self.failUnlessIsBarDotTxt)
2220 def test_GET_URI_URL_dir(self):
2221 base = "/uri/%s?t=json" % self._foo_uri
2223 d.addCallback(self.failUnlessIsFooJSON)
2226 def test_GET_URI_URL_missing(self):
2227 base = "/uri/%s" % self._bad_file_uri
2228 d = self.shouldHTTPError("test_GET_URI_URL_missing",
2229 http.GONE, None, "NotEnoughSharesError",
2231 # TODO: how can we exercise both sides of WebDownloadTarget.fail
2232 # here? we must arrange for a download to fail after target.open()
2233 # has been called, and then inspect the response to see that it is
2234 # shorter than we expected.
2237 def test_PUT_DIRURL_uri(self):
2238 d = self.s.create_empty_dirnode()
2240 new_uri = dn.get_uri()
2241 # replace /foo with a new (empty) directory
2242 d = self.PUT(self.public_url + "/foo?t=uri", new_uri)
2243 d.addCallback(lambda res:
2244 self.failUnlessEqual(res.strip(), new_uri))
2245 d.addCallback(lambda res:
2246 self.failUnlessChildURIIs(self.public_root,
2250 d.addCallback(_made_dir)
2253 def test_PUT_DIRURL_uri_noreplace(self):
2254 d = self.s.create_empty_dirnode()
2256 new_uri = dn.get_uri()
2257 # replace /foo with a new (empty) directory, but ask that
2258 # replace=false, so it should fail
2259 d = self.shouldFail2(error.Error, "test_PUT_DIRURL_uri_noreplace",
2260 "409 Conflict", "There was already a child by that name, and you asked me to not replace it",
2262 self.public_url + "/foo?t=uri&replace=false",
2264 d.addCallback(lambda res:
2265 self.failUnlessChildURIIs(self.public_root,
2269 d.addCallback(_made_dir)
2272 def test_PUT_DIRURL_bad_t(self):
2273 d = self.shouldFail2(error.Error, "test_PUT_DIRURL_bad_t",
2274 "400 Bad Request", "PUT to a directory",
2275 self.PUT, self.public_url + "/foo?t=BOGUS", "")
2276 d.addCallback(lambda res:
2277 self.failUnlessChildURIIs(self.public_root,
2282 def test_PUT_NEWFILEURL_uri(self):
2283 contents, n, new_uri = self.makefile(8)
2284 d = self.PUT(self.public_url + "/foo/new.txt?t=uri", new_uri)
2285 d.addCallback(lambda res: self.failUnlessEqual(res.strip(), new_uri))
2286 d.addCallback(lambda res:
2287 self.failUnlessChildContentsAre(self._foo_node, u"new.txt",
2291 def test_PUT_NEWFILEURL_uri_replace(self):
2292 contents, n, new_uri = self.makefile(8)
2293 d = self.PUT(self.public_url + "/foo/bar.txt?t=uri", new_uri)
2294 d.addCallback(lambda res: self.failUnlessEqual(res.strip(), new_uri))
2295 d.addCallback(lambda res:
2296 self.failUnlessChildContentsAre(self._foo_node, u"bar.txt",
2300 def test_PUT_NEWFILEURL_uri_no_replace(self):
2301 contents, n, new_uri = self.makefile(8)
2302 d = self.PUT(self.public_url + "/foo/bar.txt?t=uri&replace=false", new_uri)
2303 d.addBoth(self.shouldFail, error.Error, "PUT_NEWFILEURL_uri_no_replace",
2305 "There was already a child by that name, and you asked me "
2306 "to not replace it")
2309 def test_PUT_NEWFILE_URI(self):
2310 file_contents = "New file contents here\n"
2311 d = self.PUT("/uri", file_contents)
2313 assert isinstance(uri, str), uri
2314 self.failUnless(uri in FakeCHKFileNode.all_contents)
2315 self.failUnlessEqual(FakeCHKFileNode.all_contents[uri],
2317 return self.GET("/uri/%s" % uri)
2318 d.addCallback(_check)
2320 self.failUnlessEqual(res, file_contents)
2321 d.addCallback(_check2)
2324 def test_PUT_NEWFILE_URI_not_mutable(self):
2325 file_contents = "New file contents here\n"
2326 d = self.PUT("/uri?mutable=false", file_contents)
2328 assert isinstance(uri, str), uri
2329 self.failUnless(uri in FakeCHKFileNode.all_contents)
2330 self.failUnlessEqual(FakeCHKFileNode.all_contents[uri],
2332 return self.GET("/uri/%s" % uri)
2333 d.addCallback(_check)
2335 self.failUnlessEqual(res, file_contents)
2336 d.addCallback(_check2)
2339 def test_PUT_NEWFILE_URI_only_PUT(self):
2340 d = self.PUT("/uri?t=bogus", "")
2341 d.addBoth(self.shouldFail, error.Error,
2342 "PUT_NEWFILE_URI_only_PUT",
2344 "/uri accepts only PUT, PUT?t=mkdir, POST?t=upload, and POST?t=mkdir")
2347 def test_PUT_NEWFILE_URI_mutable(self):
2348 file_contents = "New file contents here\n"
2349 d = self.PUT("/uri?mutable=true", file_contents)
2350 def _check_mutable(uri):
2353 self.failUnless(IMutableFileURI.providedBy(u))
2354 self.failUnless(u.storage_index in FakeMutableFileNode.all_contents)
2355 n = self.s.create_node_from_uri(uri)
2356 return n.download_best_version()
2357 d.addCallback(_check_mutable)
2358 def _check2_mutable(data):
2359 self.failUnlessEqual(data, file_contents)
2360 d.addCallback(_check2_mutable)
2364 self.failUnless(uri.to_string() in FakeCHKFileNode.all_contents)
2365 self.failUnlessEqual(FakeCHKFileNode.all_contents[uri.to_string()],
2367 return self.GET("/uri/%s" % uri)
2368 d.addCallback(_check)
2370 self.failUnlessEqual(res, file_contents)
2371 d.addCallback(_check2)
2374 def test_PUT_mkdir(self):
2375 d = self.PUT("/uri?t=mkdir", "")
2377 n = self.s.create_node_from_uri(uri.strip())
2378 d2 = self.failUnlessNodeKeysAre(n, [])
2379 d2.addCallback(lambda res:
2380 self.GET("/uri/%s?t=json" % uri))
2382 d.addCallback(_check)
2383 d.addCallback(self.failUnlessIsEmptyJSON)
2386 def test_POST_check(self):
2387 d = self.POST(self.public_url + "/foo", t="check", name="bar.txt")
2389 # this returns a string form of the results, which are probably
2390 # None since we're using fake filenodes.
2391 # TODO: verify that the check actually happened, by changing
2392 # FakeCHKFileNode to count how many times .check() has been
2395 d.addCallback(_done)
2398 def test_bad_method(self):
2399 url = self.webish_url + self.public_url + "/foo/bar.txt"
2400 d = self.shouldHTTPError("test_bad_method",
2401 501, "Not Implemented",
2402 "I don't know how to treat a BOGUS request.",
2403 client.getPage, url, method="BOGUS")
2406 def test_short_url(self):
2407 url = self.webish_url + "/uri"
2408 d = self.shouldHTTPError("test_short_url", 501, "Not Implemented",
2409 "I don't know how to treat a DELETE request.",
2410 client.getPage, url, method="DELETE")
2413 def test_ophandle_bad(self):
2414 url = self.webish_url + "/operations/bogus?t=status"
2415 d = self.shouldHTTPError("test_ophandle_bad", 404, "404 Not Found",
2416 "unknown/expired handle 'bogus'",
2417 client.getPage, url)
2420 def test_ophandle_cancel(self):
2421 d = self.POST(self.public_url + "/foo/?t=start-manifest&ophandle=128",
2422 followRedirect=True)
2423 d.addCallback(lambda ignored:
2424 self.GET("/operations/128?t=status&output=JSON"))
2426 data = simplejson.loads(res)
2427 self.failUnless("finished" in data, res)
2428 monitor = self.ws.root.child_operations.handles["128"][0]
2429 d = self.POST("/operations/128?t=cancel&output=JSON")
2431 data = simplejson.loads(res)
2432 self.failUnless("finished" in data, res)
2433 # t=cancel causes the handle to be forgotten
2434 self.failUnless(monitor.is_cancelled())
2435 d.addCallback(_check2)
2437 d.addCallback(_check1)
2438 d.addCallback(lambda ignored:
2439 self.shouldHTTPError("test_ophandle_cancel",
2440 404, "404 Not Found",
2441 "unknown/expired handle '128'",
2443 "/operations/128?t=status&output=JSON"))
2446 def test_ophandle_retainfor(self):
2447 d = self.POST(self.public_url + "/foo/?t=start-manifest&ophandle=129&retain-for=60",
2448 followRedirect=True)
2449 d.addCallback(lambda ignored:
2450 self.GET("/operations/129?t=status&output=JSON&retain-for=0"))
2452 data = simplejson.loads(res)
2453 self.failUnless("finished" in data, res)
2454 d.addCallback(_check1)
2455 # the retain-for=0 will cause the handle to be expired very soon
2456 d.addCallback(self.stall, 2.0)
2457 d.addCallback(lambda ignored:
2458 self.shouldHTTPError("test_ophandle_retainfor",
2459 404, "404 Not Found",
2460 "unknown/expired handle '129'",
2462 "/operations/129?t=status&output=JSON"))
2465 def test_ophandle_release_after_complete(self):
2466 d = self.POST(self.public_url + "/foo/?t=start-manifest&ophandle=130",
2467 followRedirect=True)
2468 d.addCallback(self.wait_for_operation, "130")
2469 d.addCallback(lambda ignored:
2470 self.GET("/operations/130?t=status&output=JSON&release-after-complete=true"))
2471 # the release-after-complete=true will cause the handle to be expired
2472 d.addCallback(lambda ignored:
2473 self.shouldHTTPError("test_ophandle_release_after_complete",
2474 404, "404 Not Found",
2475 "unknown/expired handle '130'",
2477 "/operations/130?t=status&output=JSON"))
2480 def test_incident(self):
2481 d = self.POST("/report_incident", details="eek")
2483 self.failUnless("Thank you for your report!" in res, res)
2484 d.addCallback(_done)
2487 def test_static(self):
2488 webdir = os.path.join(self.staticdir, "subdir")
2489 fileutil.make_dirs(webdir)
2490 f = open(os.path.join(webdir, "hello.txt"), "wb")
2494 d = self.GET("/static/subdir/hello.txt")
2496 self.failUnlessEqual(res, "hello")
2497 d.addCallback(_check)
2501 class Util(unittest.TestCase):
2502 def test_abbreviate_time(self):
2503 self.failUnlessEqual(common.abbreviate_time(None), "")
2504 self.failUnlessEqual(common.abbreviate_time(1.234), "1.23s")
2505 self.failUnlessEqual(common.abbreviate_time(0.123), "123ms")
2506 self.failUnlessEqual(common.abbreviate_time(0.00123), "1.2ms")
2507 self.failUnlessEqual(common.abbreviate_time(0.000123), "123us")
2509 def test_abbreviate_rate(self):
2510 self.failUnlessEqual(common.abbreviate_rate(None), "")
2511 self.failUnlessEqual(common.abbreviate_rate(1234000), "1.23MBps")
2512 self.failUnlessEqual(common.abbreviate_rate(12340), "12.3kBps")
2513 self.failUnlessEqual(common.abbreviate_rate(123), "123Bps")
2515 def test_abbreviate_size(self):
2516 self.failUnlessEqual(common.abbreviate_size(None), "")
2517 self.failUnlessEqual(common.abbreviate_size(1.23*1000*1000*1000), "1.23GB")
2518 self.failUnlessEqual(common.abbreviate_size(1.23*1000*1000), "1.23MB")
2519 self.failUnlessEqual(common.abbreviate_size(1230), "1.2kB")
2520 self.failUnlessEqual(common.abbreviate_size(123), "123B")
2522 def test_plural(self):
2524 return "%d second%s" % (s, status.plural(s))
2525 self.failUnlessEqual(convert(0), "0 seconds")
2526 self.failUnlessEqual(convert(1), "1 second")
2527 self.failUnlessEqual(convert(2), "2 seconds")
2529 return "has share%s: %s" % (status.plural(s), ",".join(s))
2530 self.failUnlessEqual(convert2([]), "has shares: ")
2531 self.failUnlessEqual(convert2(["1"]), "has share: 1")
2532 self.failUnlessEqual(convert2(["1","2"]), "has shares: 1,2")
2535 class Grid(GridTestMixin, WebErrorMixin, unittest.TestCase, ShouldFailMixin):
2537 def CHECK(self, ign, which, args, clientnum=0):
2538 fileurl = self.fileurls[which]
2539 url = fileurl + "?" + args
2540 return self.GET(url, method="POST", clientnum=clientnum)
2542 def test_filecheck(self):
2543 self.basedir = "web/Grid/filecheck"
2545 c0 = self.g.clients[0]
2548 d = c0.upload(upload.Data(DATA, convergence=""))
2549 def _stash_uri(ur, which):
2550 self.uris[which] = ur.uri
2551 d.addCallback(_stash_uri, "good")
2552 d.addCallback(lambda ign:
2553 c0.upload(upload.Data(DATA+"1", convergence="")))
2554 d.addCallback(_stash_uri, "sick")
2555 d.addCallback(lambda ign:
2556 c0.upload(upload.Data(DATA+"2", convergence="")))
2557 d.addCallback(_stash_uri, "dead")
2558 def _stash_mutable_uri(n, which):
2559 self.uris[which] = n.get_uri()
2560 assert isinstance(self.uris[which], str)
2561 d.addCallback(lambda ign: c0.create_mutable_file(DATA+"3"))
2562 d.addCallback(_stash_mutable_uri, "corrupt")
2563 d.addCallback(lambda ign:
2564 c0.upload(upload.Data("literal", convergence="")))
2565 d.addCallback(_stash_uri, "small")
2567 def _compute_fileurls(ignored):
2569 for which in self.uris:
2570 self.fileurls[which] = "uri/" + urllib.quote(self.uris[which])
2571 d.addCallback(_compute_fileurls)
2573 def _clobber_shares(ignored):
2574 good_shares = self.find_shares(self.uris["good"])
2575 self.failUnlessEqual(len(good_shares), 10)
2576 sick_shares = self.find_shares(self.uris["sick"])
2577 os.unlink(sick_shares[0][2])
2578 dead_shares = self.find_shares(self.uris["dead"])
2579 for i in range(1, 10):
2580 os.unlink(dead_shares[i][2])
2581 c_shares = self.find_shares(self.uris["corrupt"])
2582 cso = CorruptShareOptions()
2583 cso.stdout = StringIO()
2584 cso.parseOptions([c_shares[0][2]])
2586 d.addCallback(_clobber_shares)
2588 d.addCallback(self.CHECK, "good", "t=check")
2589 def _got_html_good(res):
2590 self.failUnless("Healthy" in res, res)
2591 self.failIf("Not Healthy" in res, res)
2592 d.addCallback(_got_html_good)
2593 d.addCallback(self.CHECK, "good", "t=check&return_to=somewhere")
2594 def _got_html_good_return_to(res):
2595 self.failUnless("Healthy" in res, res)
2596 self.failIf("Not Healthy" in res, res)
2597 self.failUnless('<a href="somewhere">Return to file'
2599 d.addCallback(_got_html_good_return_to)
2600 d.addCallback(self.CHECK, "good", "t=check&output=json")
2601 def _got_json_good(res):
2602 r = simplejson.loads(res)
2603 self.failUnlessEqual(r["summary"], "Healthy")
2604 self.failUnless(r["results"]["healthy"])
2605 self.failIf(r["results"]["needs-rebalancing"])
2606 self.failUnless(r["results"]["recoverable"])
2607 d.addCallback(_got_json_good)
2609 d.addCallback(self.CHECK, "small", "t=check")
2610 def _got_html_small(res):
2611 self.failUnless("Literal files are always healthy" in res, res)
2612 self.failIf("Not Healthy" in res, res)
2613 d.addCallback(_got_html_small)
2614 d.addCallback(self.CHECK, "small", "t=check&return_to=somewhere")
2615 def _got_html_small_return_to(res):
2616 self.failUnless("Literal files are always healthy" in res, res)
2617 self.failIf("Not Healthy" in res, res)
2618 self.failUnless('<a href="somewhere">Return to file'
2620 d.addCallback(_got_html_small_return_to)
2621 d.addCallback(self.CHECK, "small", "t=check&output=json")
2622 def _got_json_small(res):
2623 r = simplejson.loads(res)
2624 self.failUnlessEqual(r["storage-index"], "")
2625 self.failUnless(r["results"]["healthy"])
2626 d.addCallback(_got_json_small)
2628 d.addCallback(self.CHECK, "sick", "t=check")
2629 def _got_html_sick(res):
2630 self.failUnless("Not Healthy" in res, res)
2631 d.addCallback(_got_html_sick)
2632 d.addCallback(self.CHECK, "sick", "t=check&output=json")
2633 def _got_json_sick(res):
2634 r = simplejson.loads(res)
2635 self.failUnlessEqual(r["summary"],
2636 "Not Healthy: 9 shares (enc 3-of-10)")
2637 self.failIf(r["results"]["healthy"])
2638 self.failIf(r["results"]["needs-rebalancing"])
2639 self.failUnless(r["results"]["recoverable"])
2640 d.addCallback(_got_json_sick)
2642 d.addCallback(self.CHECK, "dead", "t=check")
2643 def _got_html_dead(res):
2644 self.failUnless("Not Healthy" in res, res)
2645 d.addCallback(_got_html_dead)
2646 d.addCallback(self.CHECK, "dead", "t=check&output=json")
2647 def _got_json_dead(res):
2648 r = simplejson.loads(res)
2649 self.failUnlessEqual(r["summary"],
2650 "Not Healthy: 1 shares (enc 3-of-10)")
2651 self.failIf(r["results"]["healthy"])
2652 self.failIf(r["results"]["needs-rebalancing"])
2653 self.failIf(r["results"]["recoverable"])
2654 d.addCallback(_got_json_dead)
2656 d.addCallback(self.CHECK, "corrupt", "t=check&verify=true")
2657 def _got_html_corrupt(res):
2658 self.failUnless("Not Healthy! : Unhealthy" in res, res)
2659 d.addCallback(_got_html_corrupt)
2660 d.addCallback(self.CHECK, "corrupt", "t=check&verify=true&output=json")
2661 def _got_json_corrupt(res):
2662 r = simplejson.loads(res)
2663 self.failUnless("Unhealthy: 9 shares (enc 3-of-10)" in r["summary"],
2665 self.failIf(r["results"]["healthy"])
2666 self.failUnless(r["results"]["recoverable"])
2667 self.failUnlessEqual(r["results"]["count-shares-good"], 9)
2668 self.failUnlessEqual(r["results"]["count-corrupt-shares"], 1)
2669 d.addCallback(_got_json_corrupt)
2671 d.addErrback(self.explain_web_error)
2674 def test_repair_html(self):
2675 self.basedir = "web/Grid/repair_html"
2677 c0 = self.g.clients[0]
2680 d = c0.upload(upload.Data(DATA, convergence=""))
2681 def _stash_uri(ur, which):
2682 self.uris[which] = ur.uri
2683 d.addCallback(_stash_uri, "good")
2684 d.addCallback(lambda ign:
2685 c0.upload(upload.Data(DATA+"1", convergence="")))
2686 d.addCallback(_stash_uri, "sick")
2687 d.addCallback(lambda ign:
2688 c0.upload(upload.Data(DATA+"2", convergence="")))
2689 d.addCallback(_stash_uri, "dead")
2690 def _stash_mutable_uri(n, which):
2691 self.uris[which] = n.get_uri()
2692 assert isinstance(self.uris[which], str)
2693 d.addCallback(lambda ign: c0.create_mutable_file(DATA+"3"))
2694 d.addCallback(_stash_mutable_uri, "corrupt")
2696 def _compute_fileurls(ignored):
2698 for which in self.uris:
2699 self.fileurls[which] = "uri/" + urllib.quote(self.uris[which])
2700 d.addCallback(_compute_fileurls)
2702 def _clobber_shares(ignored):
2703 good_shares = self.find_shares(self.uris["good"])
2704 self.failUnlessEqual(len(good_shares), 10)
2705 sick_shares = self.find_shares(self.uris["sick"])
2706 os.unlink(sick_shares[0][2])
2707 dead_shares = self.find_shares(self.uris["dead"])
2708 for i in range(1, 10):
2709 os.unlink(dead_shares[i][2])
2710 c_shares = self.find_shares(self.uris["corrupt"])
2711 cso = CorruptShareOptions()
2712 cso.stdout = StringIO()
2713 cso.parseOptions([c_shares[0][2]])
2715 d.addCallback(_clobber_shares)
2717 d.addCallback(self.CHECK, "good", "t=check&repair=true")
2718 def _got_html_good(res):
2719 self.failUnless("Healthy" in res, res)
2720 self.failIf("Not Healthy" in res, res)
2721 self.failUnless("No repair necessary" in res, res)
2722 d.addCallback(_got_html_good)
2724 d.addCallback(self.CHECK, "sick", "t=check&repair=true")
2725 def _got_html_sick(res):
2726 self.failUnless("Healthy : healthy" in res, res)
2727 self.failIf("Not Healthy" in res, res)
2728 self.failUnless("Repair successful" in res, res)
2729 d.addCallback(_got_html_sick)
2731 # repair of a dead file will fail, of course, but it isn't yet
2732 # clear how this should be reported. Right now it shows up as
2735 #d.addCallback(self.CHECK, "dead", "t=check&repair=true")
2736 #def _got_html_dead(res):
2738 # self.failUnless("Healthy : healthy" in res, res)
2739 # self.failIf("Not Healthy" in res, res)
2740 # self.failUnless("No repair necessary" in res, res)
2741 #d.addCallback(_got_html_dead)
2743 d.addCallback(self.CHECK, "corrupt", "t=check&verify=true&repair=true")
2744 def _got_html_corrupt(res):
2745 self.failUnless("Healthy : Healthy" in res, res)
2746 self.failIf("Not Healthy" in res, res)
2747 self.failUnless("Repair successful" in res, res)
2748 d.addCallback(_got_html_corrupt)
2750 d.addErrback(self.explain_web_error)
2753 def test_repair_json(self):
2754 self.basedir = "web/Grid/repair_json"
2756 c0 = self.g.clients[0]
2759 d = c0.upload(upload.Data(DATA+"1", convergence=""))
2760 def _stash_uri(ur, which):
2761 self.uris[which] = ur.uri
2762 d.addCallback(_stash_uri, "sick")
2764 def _compute_fileurls(ignored):
2766 for which in self.uris:
2767 self.fileurls[which] = "uri/" + urllib.quote(self.uris[which])
2768 d.addCallback(_compute_fileurls)
2770 def _clobber_shares(ignored):
2771 sick_shares = self.find_shares(self.uris["sick"])
2772 os.unlink(sick_shares[0][2])
2773 d.addCallback(_clobber_shares)
2775 d.addCallback(self.CHECK, "sick", "t=check&repair=true&output=json")
2776 def _got_json_sick(res):
2777 r = simplejson.loads(res)
2778 self.failUnlessEqual(r["repair-attempted"], True)
2779 self.failUnlessEqual(r["repair-successful"], True)
2780 self.failUnlessEqual(r["pre-repair-results"]["summary"],
2781 "Not Healthy: 9 shares (enc 3-of-10)")
2782 self.failIf(r["pre-repair-results"]["results"]["healthy"])
2783 self.failUnlessEqual(r["post-repair-results"]["summary"], "healthy")
2784 self.failUnless(r["post-repair-results"]["results"]["healthy"])
2785 d.addCallback(_got_json_sick)
2787 d.addErrback(self.explain_web_error)
2790 def test_deep_check(self):
2791 self.basedir = "web/Grid/deep_check"
2793 c0 = self.g.clients[0]
2797 d = c0.create_empty_dirnode()
2798 def _stash_root_and_create_file(n):
2800 self.fileurls["root"] = "uri/" + urllib.quote(n.get_uri()) + "/"
2801 return n.add_file(u"good", upload.Data(DATA, convergence=""))
2802 d.addCallback(_stash_root_and_create_file)
2803 def _stash_uri(fn, which):
2804 self.uris[which] = fn.get_uri()
2806 d.addCallback(_stash_uri, "good")
2807 d.addCallback(lambda ign:
2808 self.rootnode.add_file(u"small",
2809 upload.Data("literal",
2811 d.addCallback(_stash_uri, "small")
2812 d.addCallback(lambda ign:
2813 self.rootnode.add_file(u"sick",
2814 upload.Data(DATA+"1",
2816 d.addCallback(_stash_uri, "sick")
2818 def _clobber_shares(ignored):
2819 self.delete_shares_numbered(self.uris["sick"], [0,1])
2820 d.addCallback(_clobber_shares)
2827 d.addCallback(self.CHECK, "root", "t=stream-deep-check")
2829 units = [simplejson.loads(line)
2830 for line in res.splitlines()
2832 self.failUnlessEqual(len(units), 4+1)
2833 # should be parent-first
2835 self.failUnlessEqual(u0["path"], [])
2836 self.failUnlessEqual(u0["type"], "directory")
2837 self.failUnlessEqual(u0["cap"], self.rootnode.get_uri())
2838 u0cr = u0["check-results"]
2839 self.failUnlessEqual(u0cr["results"]["count-shares-good"], 10)
2841 ugood = [u for u in units
2842 if u["type"] == "file" and u["path"] == [u"good"]][0]
2843 self.failUnlessEqual(ugood["cap"], self.uris["good"])
2844 ugoodcr = ugood["check-results"]
2845 self.failUnlessEqual(ugoodcr["results"]["count-shares-good"], 10)
2848 self.failUnlessEqual(stats["type"], "stats")
2850 self.failUnlessEqual(s["count-immutable-files"], 2)
2851 self.failUnlessEqual(s["count-literal-files"], 1)
2852 self.failUnlessEqual(s["count-directories"], 1)
2853 d.addCallback(_done)
2855 # now add root/subdir and root/subdir/grandchild, then make subdir
2856 # unrecoverable, then see what happens
2858 d.addCallback(lambda ign:
2859 self.rootnode.create_empty_directory(u"subdir"))
2860 d.addCallback(_stash_uri, "subdir")
2861 d.addCallback(lambda subdir_node:
2862 subdir_node.add_file(u"grandchild",
2863 upload.Data(DATA+"2",
2865 d.addCallback(_stash_uri, "grandchild")
2867 d.addCallback(lambda ign:
2868 self.delete_shares_numbered(self.uris["subdir"],
2875 # root/subdir [unrecoverable]
2876 # root/subdir/grandchild
2878 # how should a streaming-JSON API indicate fatal error?
2879 # answer: emit ERROR: instead of a JSON string
2881 d.addCallback(self.CHECK, "root", "t=stream-manifest")
2882 def _check_broken_manifest(res):
2883 lines = res.splitlines()
2885 for (i,line) in enumerate(lines)
2886 if line.startswith("ERROR:")]
2888 self.fail("no ERROR: in output: %s" % (res,))
2889 first_error = error_lines[0]
2890 error_line = lines[first_error]
2891 error_msg = lines[first_error+1:]
2892 error_msg_s = "\n".join(error_msg) + "\n"
2893 self.failUnlessIn("ERROR: UnrecoverableFileError(no recoverable versions)",
2895 self.failUnless(len(error_msg) > 2, error_msg_s) # some traceback
2896 units = [simplejson.loads(line) for line in lines[:first_error]]
2897 self.failUnlessEqual(len(units), 5) # includes subdir
2898 last_unit = units[-1]
2899 self.failUnlessEqual(last_unit["path"], ["subdir"])
2900 d.addCallback(_check_broken_manifest)
2902 d.addCallback(self.CHECK, "root", "t=stream-deep-check")
2903 def _check_broken_deepcheck(res):
2904 lines = res.splitlines()
2906 for (i,line) in enumerate(lines)
2907 if line.startswith("ERROR:")]
2909 self.fail("no ERROR: in output: %s" % (res,))
2910 first_error = error_lines[0]
2911 error_line = lines[first_error]
2912 error_msg = lines[first_error+1:]
2913 error_msg_s = "\n".join(error_msg) + "\n"
2914 self.failUnlessIn("ERROR: UnrecoverableFileError(no recoverable versions)",
2916 self.failUnless(len(error_msg) > 2, error_msg_s) # some traceback
2917 units = [simplejson.loads(line) for line in lines[:first_error]]
2918 self.failUnlessEqual(len(units), 5) # includes subdir
2919 last_unit = units[-1]
2920 self.failUnlessEqual(last_unit["path"], ["subdir"])
2921 r = last_unit["check-results"]["results"]
2922 self.failUnlessEqual(r["count-recoverable-versions"], 0)
2923 self.failUnlessEqual(r["count-shares-good"], 1)
2924 self.failUnlessEqual(r["recoverable"], False)
2925 d.addCallback(_check_broken_deepcheck)
2927 d.addErrback(self.explain_web_error)
2930 def test_deep_check_and_repair(self):
2931 self.basedir = "web/Grid/deep_check_and_repair"
2933 c0 = self.g.clients[0]
2937 d = c0.create_empty_dirnode()
2938 def _stash_root_and_create_file(n):
2940 self.fileurls["root"] = "uri/" + urllib.quote(n.get_uri()) + "/"
2941 return n.add_file(u"good", upload.Data(DATA, convergence=""))
2942 d.addCallback(_stash_root_and_create_file)
2943 def _stash_uri(fn, which):
2944 self.uris[which] = fn.get_uri()
2945 d.addCallback(_stash_uri, "good")
2946 d.addCallback(lambda ign:
2947 self.rootnode.add_file(u"small",
2948 upload.Data("literal",
2950 d.addCallback(_stash_uri, "small")
2951 d.addCallback(lambda ign:
2952 self.rootnode.add_file(u"sick",
2953 upload.Data(DATA+"1",
2955 d.addCallback(_stash_uri, "sick")
2956 #d.addCallback(lambda ign:
2957 # self.rootnode.add_file(u"dead",
2958 # upload.Data(DATA+"2",
2960 #d.addCallback(_stash_uri, "dead")
2962 #d.addCallback(lambda ign: c0.create_mutable_file("mutable"))
2963 #d.addCallback(lambda fn: self.rootnode.set_node(u"corrupt", fn))
2964 #d.addCallback(_stash_uri, "corrupt")
2966 def _clobber_shares(ignored):
2967 good_shares = self.find_shares(self.uris["good"])
2968 self.failUnlessEqual(len(good_shares), 10)
2969 sick_shares = self.find_shares(self.uris["sick"])
2970 os.unlink(sick_shares[0][2])
2971 #dead_shares = self.find_shares(self.uris["dead"])
2972 #for i in range(1, 10):
2973 # os.unlink(dead_shares[i][2])
2975 #c_shares = self.find_shares(self.uris["corrupt"])
2976 #cso = CorruptShareOptions()
2977 #cso.stdout = StringIO()
2978 #cso.parseOptions([c_shares[0][2]])
2980 d.addCallback(_clobber_shares)
2983 # root/good CHK, 10 shares
2985 # root/sick CHK, 9 shares
2987 d.addCallback(self.CHECK, "root", "t=stream-deep-check&repair=true")
2989 units = [simplejson.loads(line)
2990 for line in res.splitlines()
2992 self.failUnlessEqual(len(units), 4+1)
2993 # should be parent-first
2995 self.failUnlessEqual(u0["path"], [])
2996 self.failUnlessEqual(u0["type"], "directory")
2997 self.failUnlessEqual(u0["cap"], self.rootnode.get_uri())
2998 u0crr = u0["check-and-repair-results"]
2999 self.failUnlessEqual(u0crr["repair-attempted"], False)
3000 self.failUnlessEqual(u0crr["pre-repair-results"]["results"]["count-shares-good"], 10)
3002 ugood = [u for u in units
3003 if u["type"] == "file" and u["path"] == [u"good"]][0]
3004 self.failUnlessEqual(ugood["cap"], self.uris["good"])
3005 ugoodcrr = ugood["check-and-repair-results"]
3006 self.failUnlessEqual(u0crr["repair-attempted"], False)
3007 self.failUnlessEqual(u0crr["pre-repair-results"]["results"]["count-shares-good"], 10)
3009 usick = [u for u in units
3010 if u["type"] == "file" and u["path"] == [u"sick"]][0]
3011 self.failUnlessEqual(usick["cap"], self.uris["sick"])
3012 usickcrr = usick["check-and-repair-results"]
3013 self.failUnlessEqual(usickcrr["repair-attempted"], True)
3014 self.failUnlessEqual(usickcrr["repair-successful"], True)
3015 self.failUnlessEqual(usickcrr["pre-repair-results"]["results"]["count-shares-good"], 9)
3016 self.failUnlessEqual(usickcrr["post-repair-results"]["results"]["count-shares-good"], 10)
3019 self.failUnlessEqual(stats["type"], "stats")
3021 self.failUnlessEqual(s["count-immutable-files"], 2)
3022 self.failUnlessEqual(s["count-literal-files"], 1)
3023 self.failUnlessEqual(s["count-directories"], 1)
3024 d.addCallback(_done)
3026 d.addErrback(self.explain_web_error)
3029 def _count_leases(self, ignored, which):
3030 u = self.uris[which]
3031 shares = self.find_shares(u)
3033 for shnum, serverid, fn in shares:
3034 sf = get_share_file(fn)
3035 num_leases = len(list(sf.get_leases()))
3036 lease_counts.append( (fn, num_leases) )
3039 def _assert_leasecount(self, lease_counts, expected):
3040 for (fn, num_leases) in lease_counts:
3041 if num_leases != expected:
3042 self.fail("expected %d leases, have %d, on %s" %
3043 (expected, num_leases, fn))
3045 def test_add_lease(self):
3046 self.basedir = "web/Grid/add_lease"
3047 self.set_up_grid(num_clients=2)
3048 c0 = self.g.clients[0]
3051 d = c0.upload(upload.Data(DATA, convergence=""))
3052 def _stash_uri(ur, which):
3053 self.uris[which] = ur.uri
3054 d.addCallback(_stash_uri, "one")
3055 d.addCallback(lambda ign:
3056 c0.upload(upload.Data(DATA+"1", convergence="")))
3057 d.addCallback(_stash_uri, "two")
3058 def _stash_mutable_uri(n, which):
3059 self.uris[which] = n.get_uri()
3060 assert isinstance(self.uris[which], str)
3061 d.addCallback(lambda ign: c0.create_mutable_file(DATA+"2"))
3062 d.addCallback(_stash_mutable_uri, "mutable")
3064 def _compute_fileurls(ignored):
3066 for which in self.uris:
3067 self.fileurls[which] = "uri/" + urllib.quote(self.uris[which])
3068 d.addCallback(_compute_fileurls)
3070 d.addCallback(self._count_leases, "one")
3071 d.addCallback(self._assert_leasecount, 1)
3072 d.addCallback(self._count_leases, "two")
3073 d.addCallback(self._assert_leasecount, 1)
3074 d.addCallback(self._count_leases, "mutable")
3075 d.addCallback(self._assert_leasecount, 1)
3077 d.addCallback(self.CHECK, "one", "t=check") # no add-lease
3078 def _got_html_good(res):
3079 self.failUnless("Healthy" in res, res)
3080 self.failIf("Not Healthy" in res, res)
3081 d.addCallback(_got_html_good)
3083 d.addCallback(self._count_leases, "one")
3084 d.addCallback(self._assert_leasecount, 1)
3085 d.addCallback(self._count_leases, "two")
3086 d.addCallback(self._assert_leasecount, 1)
3087 d.addCallback(self._count_leases, "mutable")
3088 d.addCallback(self._assert_leasecount, 1)
3090 # this CHECK uses the original client, which uses the same
3091 # lease-secrets, so it will just renew the original lease
3092 d.addCallback(self.CHECK, "one", "t=check&add-lease=true")
3093 d.addCallback(_got_html_good)
3095 d.addCallback(self._count_leases, "one")
3096 d.addCallback(self._assert_leasecount, 1)
3097 d.addCallback(self._count_leases, "two")
3098 d.addCallback(self._assert_leasecount, 1)
3099 d.addCallback(self._count_leases, "mutable")
3100 d.addCallback(self._assert_leasecount, 1)
3102 # this CHECK uses an alternate client, which adds a second lease
3103 d.addCallback(self.CHECK, "one", "t=check&add-lease=true", clientnum=1)
3104 d.addCallback(_got_html_good)
3106 d.addCallback(self._count_leases, "one")
3107 d.addCallback(self._assert_leasecount, 2)
3108 d.addCallback(self._count_leases, "two")
3109 d.addCallback(self._assert_leasecount, 1)
3110 d.addCallback(self._count_leases, "mutable")
3111 d.addCallback(self._assert_leasecount, 1)
3113 d.addCallback(self.CHECK, "mutable", "t=check&add-lease=true")
3114 d.addCallback(_got_html_good)
3116 d.addCallback(self._count_leases, "one")
3117 d.addCallback(self._assert_leasecount, 2)
3118 d.addCallback(self._count_leases, "two")
3119 d.addCallback(self._assert_leasecount, 1)
3120 d.addCallback(self._count_leases, "mutable")
3121 d.addCallback(self._assert_leasecount, 1)
3123 d.addCallback(self.CHECK, "mutable", "t=check&add-lease=true",
3125 d.addCallback(_got_html_good)
3127 d.addCallback(self._count_leases, "one")
3128 d.addCallback(self._assert_leasecount, 2)
3129 d.addCallback(self._count_leases, "two")
3130 d.addCallback(self._assert_leasecount, 1)
3131 d.addCallback(self._count_leases, "mutable")
3132 d.addCallback(self._assert_leasecount, 2)
3134 d.addErrback(self.explain_web_error)
3137 def test_deep_add_lease(self):
3138 self.basedir = "web/Grid/deep_add_lease"
3139 self.set_up_grid(num_clients=2)
3140 c0 = self.g.clients[0]
3144 d = c0.create_empty_dirnode()
3145 def _stash_root_and_create_file(n):
3147 self.uris["root"] = n.get_uri()
3148 self.fileurls["root"] = "uri/" + urllib.quote(n.get_uri()) + "/"
3149 return n.add_file(u"one", upload.Data(DATA, convergence=""))
3150 d.addCallback(_stash_root_and_create_file)
3151 def _stash_uri(fn, which):
3152 self.uris[which] = fn.get_uri()
3153 d.addCallback(_stash_uri, "one")
3154 d.addCallback(lambda ign:
3155 self.rootnode.add_file(u"small",
3156 upload.Data("literal",
3158 d.addCallback(_stash_uri, "small")
3160 d.addCallback(lambda ign: c0.create_mutable_file("mutable"))
3161 d.addCallback(lambda fn: self.rootnode.set_node(u"mutable", fn))
3162 d.addCallback(_stash_uri, "mutable")
3164 d.addCallback(self.CHECK, "root", "t=stream-deep-check") # no add-lease
3166 units = [simplejson.loads(line)
3167 for line in res.splitlines()
3169 # root, one, small, mutable, stats
3170 self.failUnlessEqual(len(units), 4+1)
3171 d.addCallback(_done)
3173 d.addCallback(self._count_leases, "root")
3174 d.addCallback(self._assert_leasecount, 1)
3175 d.addCallback(self._count_leases, "one")
3176 d.addCallback(self._assert_leasecount, 1)
3177 d.addCallback(self._count_leases, "mutable")
3178 d.addCallback(self._assert_leasecount, 1)
3180 d.addCallback(self.CHECK, "root", "t=stream-deep-check&add-lease=true")
3181 d.addCallback(_done)
3183 d.addCallback(self._count_leases, "root")
3184 d.addCallback(self._assert_leasecount, 1)
3185 d.addCallback(self._count_leases, "one")
3186 d.addCallback(self._assert_leasecount, 1)
3187 d.addCallback(self._count_leases, "mutable")
3188 d.addCallback(self._assert_leasecount, 1)
3190 d.addCallback(self.CHECK, "root", "t=stream-deep-check&add-lease=true",
3192 d.addCallback(_done)
3194 d.addCallback(self._count_leases, "root")
3195 d.addCallback(self._assert_leasecount, 2)
3196 d.addCallback(self._count_leases, "one")
3197 d.addCallback(self._assert_leasecount, 2)
3198 d.addCallback(self._count_leases, "mutable")
3199 d.addCallback(self._assert_leasecount, 2)
3201 d.addErrback(self.explain_web_error)
3205 def test_exceptions(self):
3206 self.basedir = "web/Grid/exceptions"
3207 self.set_up_grid(num_clients=1, num_servers=2)
3208 c0 = self.g.clients[0]
3211 d = c0.create_empty_dirnode()
3213 self.fileurls["root"] = "uri/" + urllib.quote(n.get_uri()) + "/"
3214 self.fileurls["imaginary"] = self.fileurls["root"] + "imaginary"
3216 d.addCallback(_stash_root)
3217 d.addCallback(lambda ign: c0.upload(upload.Data(DATA, convergence="")))
3219 self.fileurls["1share"] = "uri/" + urllib.quote(ur.uri)
3220 self.delete_shares_numbered(ur.uri, range(1,10))
3222 u = uri.from_string(ur.uri)
3223 u.key = testutil.flip_bit(u.key, 0)
3224 baduri = u.to_string()
3225 self.fileurls["0shares"] = "uri/" + urllib.quote(baduri)
3226 d.addCallback(_stash_bad)
3227 d.addCallback(lambda ign: c0.create_empty_dirnode())
3228 def _mangle_dirnode_1share(n):
3230 url = self.fileurls["dir-1share"] = "uri/" + urllib.quote(u) + "/"
3231 self.fileurls["dir-1share-json"] = url + "?t=json"
3232 self.delete_shares_numbered(u, range(1,10))
3233 d.addCallback(_mangle_dirnode_1share)
3234 d.addCallback(lambda ign: c0.create_empty_dirnode())
3235 def _mangle_dirnode_0share(n):
3237 url = self.fileurls["dir-0share"] = "uri/" + urllib.quote(u) + "/"
3238 self.fileurls["dir-0share-json"] = url + "?t=json"
3239 self.delete_shares_numbered(u, range(0,10))
3240 d.addCallback(_mangle_dirnode_0share)
3242 # NotEnoughSharesError should be reported sensibly, with a
3243 # text/plain explanation of the problem, and perhaps some
3244 # information on which shares *could* be found.
3246 d.addCallback(lambda ignored:
3247 self.shouldHTTPError("GET unrecoverable",
3248 410, "Gone", "NotEnoughSharesError",
3249 self.GET, self.fileurls["0shares"]))
3250 def _check_zero_shares(body):
3251 self.failIf("<html>" in body, body)
3252 body = " ".join(body.strip().split())
3253 exp = ("NotEnoughSharesError: no shares could be found. "
3254 "Zero shares usually indicates a corrupt URI, or that "
3255 "no servers were connected, but it might also indicate "
3256 "severe corruption. You should perform a filecheck on "
3257 "this object to learn more.")
3258 self.failUnlessEqual(exp, body)
3259 d.addCallback(_check_zero_shares)
3262 d.addCallback(lambda ignored:
3263 self.shouldHTTPError("GET 1share",
3264 410, "Gone", "NotEnoughSharesError",
3265 self.GET, self.fileurls["1share"]))
3266 def _check_one_share(body):
3267 self.failIf("<html>" in body, body)
3268 body = " ".join(body.strip().split())
3269 exp = ("NotEnoughSharesError: 1 share found, but we need "
3270 "3 to recover the file. This indicates that some "
3271 "servers were unavailable, or that shares have been "
3272 "lost to server departure, hard drive failure, or disk "
3273 "corruption. You should perform a filecheck on "
3274 "this object to learn more.")
3275 self.failUnlessEqual(exp, body)
3276 d.addCallback(_check_one_share)
3278 d.addCallback(lambda ignored:
3279 self.shouldHTTPError("GET imaginary",
3280 404, "Not Found", None,
3281 self.GET, self.fileurls["imaginary"]))
3282 def _missing_child(body):
3283 self.failUnless("No such child: imaginary" in body, body)
3284 d.addCallback(_missing_child)
3286 d.addCallback(lambda ignored: self.GET(self.fileurls["dir-0share"]))
3287 def _check_0shares_dir_html(body):
3288 self.failUnless("<html>" in body, body)
3289 # we should see the regular page, but without the child table or
3291 body = " ".join(body.strip().split())
3292 self.failUnlessIn('href="?t=info">More info on this directory',
3294 exp = ("UnrecoverableFileError: the directory (or mutable file) "
3295 "could not be retrieved, because there were insufficient "
3296 "good shares. This might indicate that no servers were "
3297 "connected, insufficient servers were connected, the URI "
3298 "was corrupt, or that shares have been lost due to server "
3299 "departure, hard drive failure, or disk corruption. You "
3300 "should perform a filecheck on this object to learn more.")
3301 self.failUnlessIn(exp, body)
3302 self.failUnlessIn("No upload forms: directory is unreadable", body)
3303 d.addCallback(_check_0shares_dir_html)
3305 d.addCallback(lambda ignored: self.GET(self.fileurls["dir-1share"]))
3306 def _check_1shares_dir_html(body):
3307 # at some point, we'll split UnrecoverableFileError into 0-shares
3308 # and some-shares like we did for immutable files (since there
3309 # are different sorts of advice to offer in each case). For now,
3310 # they present the same way.
3311 self.failUnless("<html>" in body, body)
3312 body = " ".join(body.strip().split())
3313 self.failUnlessIn('href="?t=info">More info on this directory',
3315 exp = ("UnrecoverableFileError: the directory (or mutable file) "
3316 "could not be retrieved, because there were insufficient "
3317 "good shares. This might indicate that no servers were "
3318 "connected, insufficient servers were connected, the URI "
3319 "was corrupt, or that shares have been lost due to server "
3320 "departure, hard drive failure, or disk corruption. You "
3321 "should perform a filecheck on this object to learn more.")
3322 self.failUnlessIn(exp, body)
3323 self.failUnlessIn("No upload forms: directory is unreadable", body)
3324 d.addCallback(_check_1shares_dir_html)
3326 d.addCallback(lambda ignored:
3327 self.shouldHTTPError("GET dir-0share-json",
3328 410, "Gone", "UnrecoverableFileError",
3330 self.fileurls["dir-0share-json"]))
3331 def _check_unrecoverable_file(body):
3332 self.failIf("<html>" in body, body)
3333 body = " ".join(body.strip().split())
3334 exp = ("UnrecoverableFileError: the directory (or mutable file) "
3335 "could not be retrieved, because there were insufficient "
3336 "good shares. This might indicate that no servers were "
3337 "connected, insufficient servers were connected, the URI "
3338 "was corrupt, or that shares have been lost due to server "
3339 "departure, hard drive failure, or disk corruption. You "
3340 "should perform a filecheck on this object to learn more.")
3341 self.failUnlessEqual(exp, body)
3342 d.addCallback(_check_unrecoverable_file)
3344 d.addCallback(lambda ignored:
3345 self.shouldHTTPError("GET dir-1share-json",
3346 410, "Gone", "UnrecoverableFileError",
3348 self.fileurls["dir-1share-json"]))
3349 d.addCallback(_check_unrecoverable_file)
3351 d.addCallback(lambda ignored:
3352 self.shouldHTTPError("GET imaginary",
3353 404, "Not Found", None,
3354 self.GET, self.fileurls["imaginary"]))
3356 # attach a webapi child that throws a random error, to test how it
3358 w = c0.getServiceNamed("webish")
3359 w.root.putChild("ERRORBOOM", ErrorBoom())
3361 d.addCallback(lambda ignored:
3362 self.shouldHTTPError("GET errorboom_html",
3363 500, "Internal Server Error", None,
3364 self.GET, "ERRORBOOM"))
3365 def _internal_error_html(body):
3366 # test that a weird exception during a webapi operation with
3367 # Accept:*/* results in a text/html stack trace, while one
3368 # without that Accept: line gets us a text/plain stack trace
3369 self.failUnless("<html>" in body, "expected HTML, not '%s'" % body)
3370 d.addCallback(_internal_error_html)
3372 d.addCallback(lambda ignored:
3373 self.shouldHTTPError("GET errorboom_text",
3374 500, "Internal Server Error", None,
3375 self.GET, "ERRORBOOM",
3376 headers={"accept": ["text/plain"]}))
3377 def _internal_error_text(body):
3378 # test that a weird exception during a webapi operation with
3379 # Accept:*/* results in a text/html stack trace, while one
3380 # without that Accept: line gets us a text/plain stack trace
3381 self.failIf("<html>" in body, body)
3382 self.failUnless(body.startswith("Traceback "), body)
3383 d.addCallback(_internal_error_text)
3385 def _flush_errors(res):
3386 # Trial: please ignore the CompletelyUnhandledError in the logs
3387 self.flushLoggedErrors(CompletelyUnhandledError)
3389 d.addBoth(_flush_errors)
3393 class CompletelyUnhandledError(Exception):
3395 class ErrorBoom(rend.Page):
3396 def beforeRender(self, ctx):
3397 raise CompletelyUnhandledError("whoops")