1 import os.path, re, urllib
3 from StringIO import StringIO
4 from twisted.application import service
5 from twisted.trial import unittest
6 from twisted.internet import defer, reactor
7 from twisted.web import client, error, http
8 from twisted.python import failure, log
10 from allmydata import interfaces, uri, webish
11 from allmydata.storage.shares import get_share_file
12 from allmydata.storage_client import StorageFarmBroker
13 from allmydata.immutable import upload, download
14 from allmydata.web import status, common
15 from allmydata.scripts.debug import CorruptShareOptions, corrupt_share
16 from allmydata.util import fileutil, base32
17 from allmydata.util.assertutil import precondition
18 from allmydata.test.common import FakeDirectoryNode, FakeCHKFileNode, \
19 FakeMutableFileNode, create_chk_filenode, WebErrorMixin, ShouldFailMixin
20 from allmydata.interfaces import IURI, INewDirectoryURI, \
21 IReadonlyNewDirectoryURI, IFileURI, IMutableFileURI, IMutableFileNode
22 from allmydata.mutable import servermap, publish, retrieve
23 import common_util as testutil
24 from allmydata.test.no_network import GridTestMixin
26 from allmydata.test.common_web import HTTPClientGETFactory, \
29 # create a fake uploader/downloader, and a couple of fake dirnodes, then
30 # create a webserver that works against them
32 timeout = 480 # Most of these take longer than 240 seconds on Francois's arm box.
34 class FakeStatsProvider:
36 stats = {'stats': {}, 'counters': {}}
39 class FakeClient(service.MultiService):
40 nodeid = "fake_nodeid"
41 nickname = "fake_nickname"
42 basedir = "fake_basedir"
43 def get_versions(self):
44 return {'allmydata': "fake",
49 introducer_furl = "None"
51 _all_upload_status = [upload.UploadStatus()]
52 _all_download_status = [download.DownloadStatus()]
53 _all_mapupdate_statuses = [servermap.UpdateStatus()]
54 _all_publish_statuses = [publish.PublishStatus()]
55 _all_retrieve_statuses = [retrieve.RetrieveStatus()]
56 convergence = "some random string"
57 stats_provider = FakeStatsProvider()
59 def connected_to_introducer(self):
62 storage_broker = StorageFarmBroker(None, permute_peers=True)
63 def get_storage_broker(self):
64 return self.storage_broker
66 def create_node_from_uri(self, auri):
67 precondition(isinstance(auri, str), auri)
68 u = uri.from_string(auri)
69 if (INewDirectoryURI.providedBy(u)
70 or IReadonlyNewDirectoryURI.providedBy(u)):
71 return FakeDirectoryNode(self).init_from_uri(u)
72 if IFileURI.providedBy(u):
73 return FakeCHKFileNode(u, self)
74 assert IMutableFileURI.providedBy(u), u
75 return FakeMutableFileNode(self).init_from_uri(u)
77 def create_empty_dirnode(self):
78 n = FakeDirectoryNode(self)
80 d.addCallback(lambda res: n)
83 MUTABLE_SIZELIMIT = FakeMutableFileNode.MUTABLE_SIZELIMIT
84 def create_mutable_file(self, contents=""):
85 n = FakeMutableFileNode(self)
86 return n.create(contents)
88 def upload(self, uploadable):
89 d = uploadable.get_size()
90 d.addCallback(lambda size: uploadable.read(size))
93 n = create_chk_filenode(self, data)
94 results = upload.UploadResults()
95 results.uri = n.get_uri()
97 d.addCallback(_got_data)
100 def list_all_upload_statuses(self):
101 return self._all_upload_status
102 def list_all_download_statuses(self):
103 return self._all_download_status
104 def list_all_mapupdate_statuses(self):
105 return self._all_mapupdate_statuses
106 def list_all_publish_statuses(self):
107 return self._all_publish_statuses
108 def list_all_retrieve_statuses(self):
109 return self._all_retrieve_statuses
110 def list_all_helper_statuses(self):
113 class WebMixin(object):
115 self.s = FakeClient()
116 self.s.startService()
117 self.staticdir = self.mktemp()
118 self.ws = s = webish.WebishServer(self.s, "0", staticdir=self.staticdir)
119 s.setServiceParent(self.s)
120 self.webish_port = port = s.listener._port.getHost().port
121 self.webish_url = "http://localhost:%d" % port
123 l = [ self.s.create_empty_dirnode() for x in range(6) ]
124 d = defer.DeferredList(l)
126 self.public_root = res[0][1]
127 assert interfaces.IDirectoryNode.providedBy(self.public_root), res
128 self.public_url = "/uri/" + self.public_root.get_uri()
129 self.private_root = res[1][1]
133 self._foo_uri = foo.get_uri()
134 self._foo_readonly_uri = foo.get_readonly_uri()
135 self._foo_verifycap = foo.get_verify_cap().to_string()
136 # NOTE: we ignore the deferred on all set_uri() calls, because we
137 # know the fake nodes do these synchronously
138 self.public_root.set_uri(u"foo", foo.get_uri())
140 self.BAR_CONTENTS, n, self._bar_txt_uri = self.makefile(0)
141 foo.set_uri(u"bar.txt", self._bar_txt_uri)
142 self._bar_txt_verifycap = n.get_verify_cap().to_string()
144 foo.set_uri(u"empty", res[3][1].get_uri())
145 sub_uri = res[4][1].get_uri()
146 self._sub_uri = sub_uri
147 foo.set_uri(u"sub", sub_uri)
148 sub = self.s.create_node_from_uri(sub_uri)
150 _ign, n, blocking_uri = self.makefile(1)
151 foo.set_uri(u"blockingfile", blocking_uri)
153 unicode_filename = u"n\u00fc.txt" # n u-umlaut . t x t
154 # ok, unicode calls it LATIN SMALL LETTER U WITH DIAERESIS but I
155 # still think of it as an umlaut
156 foo.set_uri(unicode_filename, self._bar_txt_uri)
158 _ign, n, baz_file = self.makefile(2)
159 self._baz_file_uri = baz_file
160 sub.set_uri(u"baz.txt", baz_file)
162 _ign, n, self._bad_file_uri = self.makefile(3)
163 # this uri should not be downloadable
164 del FakeCHKFileNode.all_contents[self._bad_file_uri]
167 self.public_root.set_uri(u"reedownlee", rodir.get_readonly_uri())
168 rodir.set_uri(u"nor", baz_file)
173 # public/foo/blockingfile
176 # public/foo/sub/baz.txt
178 # public/reedownlee/nor
179 self.NEWFILE_CONTENTS = "newfile contents\n"
181 return foo.get_metadata_for(u"bar.txt")
183 def _got_metadata(metadata):
184 self._bar_txt_metadata = metadata
185 d.addCallback(_got_metadata)
188 def makefile(self, number):
189 contents = "contents of file %s\n" % number
190 n = create_chk_filenode(self.s, contents)
191 return contents, n, n.get_uri()
194 return self.s.stopService()
196 def failUnlessIsBarDotTxt(self, res):
197 self.failUnlessEqual(res, self.BAR_CONTENTS, res)
199 def failUnlessIsBarJSON(self, res):
200 data = simplejson.loads(res)
201 self.failUnless(isinstance(data, list))
202 self.failUnlessEqual(data[0], u"filenode")
203 self.failUnless(isinstance(data[1], dict))
204 self.failIf(data[1]["mutable"])
205 self.failIf("rw_uri" in data[1]) # immutable
206 self.failUnlessEqual(data[1]["ro_uri"], self._bar_txt_uri)
207 self.failUnlessEqual(data[1]["verify_uri"], self._bar_txt_verifycap)
208 self.failUnlessEqual(data[1]["size"], len(self.BAR_CONTENTS))
210 def failUnlessIsFooJSON(self, res):
211 data = simplejson.loads(res)
212 self.failUnless(isinstance(data, list))
213 self.failUnlessEqual(data[0], "dirnode", res)
214 self.failUnless(isinstance(data[1], dict))
215 self.failUnless(data[1]["mutable"])
216 self.failUnless("rw_uri" in data[1]) # mutable
217 self.failUnlessEqual(data[1]["rw_uri"], self._foo_uri)
218 self.failUnlessEqual(data[1]["ro_uri"], self._foo_readonly_uri)
219 self.failUnlessEqual(data[1]["verify_uri"], self._foo_verifycap)
221 kidnames = sorted([unicode(n) for n in data[1]["children"]])
222 self.failUnlessEqual(kidnames,
223 [u"bar.txt", u"blockingfile", u"empty",
224 u"n\u00fc.txt", u"sub"])
225 kids = dict( [(unicode(name),value)
227 in data[1]["children"].iteritems()] )
228 self.failUnlessEqual(kids[u"sub"][0], "dirnode")
229 self.failUnless("metadata" in kids[u"sub"][1])
230 self.failUnless("ctime" in kids[u"sub"][1]["metadata"])
231 self.failUnless("mtime" in kids[u"sub"][1]["metadata"])
232 self.failUnlessEqual(kids[u"bar.txt"][0], "filenode")
233 self.failUnlessEqual(kids[u"bar.txt"][1]["size"], len(self.BAR_CONTENTS))
234 self.failUnlessEqual(kids[u"bar.txt"][1]["ro_uri"], self._bar_txt_uri)
235 self.failUnlessEqual(kids[u"bar.txt"][1]["verify_uri"],
236 self._bar_txt_verifycap)
237 self.failUnlessEqual(kids[u"bar.txt"][1]["metadata"]["ctime"],
238 self._bar_txt_metadata["ctime"])
239 self.failUnlessEqual(kids[u"n\u00fc.txt"][1]["ro_uri"],
242 def GET(self, urlpath, followRedirect=False, return_response=False,
244 # if return_response=True, this fires with (data, statuscode,
245 # respheaders) instead of just data.
246 assert not isinstance(urlpath, unicode)
247 url = self.webish_url + urlpath
248 factory = HTTPClientGETFactory(url, method="GET",
249 followRedirect=followRedirect, **kwargs)
250 reactor.connectTCP("localhost", self.webish_port, factory)
253 return (data, factory.status, factory.response_headers)
255 d.addCallback(_got_data)
256 return factory.deferred
258 def HEAD(self, urlpath, return_response=False, **kwargs):
259 # this requires some surgery, because twisted.web.client doesn't want
260 # to give us back the response headers.
261 factory = HTTPClientHEADFactory(urlpath, method="HEAD", **kwargs)
262 reactor.connectTCP("localhost", self.webish_port, factory)
265 return (data, factory.status, factory.response_headers)
267 d.addCallback(_got_data)
268 return factory.deferred
270 def PUT(self, urlpath, data, **kwargs):
271 url = self.webish_url + urlpath
272 return client.getPage(url, method="PUT", postdata=data, **kwargs)
274 def DELETE(self, urlpath):
275 url = self.webish_url + urlpath
276 return client.getPage(url, method="DELETE")
278 def POST(self, urlpath, followRedirect=False, **fields):
279 url = self.webish_url + urlpath
280 sepbase = "boogabooga"
284 form.append('Content-Disposition: form-data; name="_charset"')
288 for name, value in fields.iteritems():
289 if isinstance(value, tuple):
290 filename, value = value
291 form.append('Content-Disposition: form-data; name="%s"; '
292 'filename="%s"' % (name, filename.encode("utf-8")))
294 form.append('Content-Disposition: form-data; name="%s"' % name)
296 if isinstance(value, unicode):
297 value = value.encode("utf-8")
300 assert isinstance(value, str)
304 body = "\r\n".join(form) + "\r\n"
305 headers = {"content-type": "multipart/form-data; boundary=%s" % sepbase,
307 return client.getPage(url, method="POST", postdata=body,
308 headers=headers, followRedirect=followRedirect)
310 def shouldFail(self, res, expected_failure, which,
311 substring=None, response_substring=None):
312 if isinstance(res, failure.Failure):
313 res.trap(expected_failure)
315 self.failUnless(substring in str(res),
316 "substring '%s' not in '%s'"
317 % (substring, str(res)))
318 if response_substring:
319 self.failUnless(response_substring in res.value.response,
320 "response substring '%s' not in '%s'"
321 % (response_substring, res.value.response))
323 self.fail("%s was supposed to raise %s, not get '%s'" %
324 (which, expected_failure, res))
326 def shouldFail2(self, expected_failure, which, substring,
328 callable, *args, **kwargs):
329 assert substring is None or isinstance(substring, str)
330 assert response_substring is None or isinstance(response_substring, str)
331 d = defer.maybeDeferred(callable, *args, **kwargs)
333 if isinstance(res, failure.Failure):
334 res.trap(expected_failure)
336 self.failUnless(substring in str(res),
337 "%s: substring '%s' not in '%s'"
338 % (which, substring, str(res)))
339 if response_substring:
340 self.failUnless(response_substring in res.value.response,
341 "%s: response substring '%s' not in '%s'"
343 response_substring, res.value.response))
345 self.fail("%s was supposed to raise %s, not get '%s'" %
346 (which, expected_failure, res))
350 def should404(self, res, which):
351 if isinstance(res, failure.Failure):
352 res.trap(error.Error)
353 self.failUnlessEqual(res.value.status, "404")
355 self.fail("%s was supposed to Error(404), not get '%s'" %
359 class Web(WebMixin, WebErrorMixin, testutil.StallMixin, unittest.TestCase):
360 def test_create(self):
363 def test_welcome(self):
366 self.failUnless('Welcome To AllMyData' in res)
367 self.failUnless('Tahoe' in res)
369 self.s.basedir = 'web/test_welcome'
370 fileutil.make_dirs("web/test_welcome")
371 fileutil.make_dirs("web/test_welcome/private")
373 d.addCallback(_check)
376 def test_provisioning(self):
377 d = self.GET("/provisioning/")
379 self.failUnless('Tahoe Provisioning Tool' in res)
380 fields = {'filled': True,
381 "num_users": int(50e3),
382 "files_per_user": 1000,
383 "space_per_user": int(1e9),
384 "sharing_ratio": 1.0,
385 "encoding_parameters": "3-of-10-5",
387 "ownership_mode": "A",
388 "download_rate": 100,
393 return self.POST("/provisioning/", **fields)
395 d.addCallback(_check)
397 self.failUnless('Tahoe Provisioning Tool' in res)
398 self.failUnless("Share space consumed: 167.01TB" in res)
400 fields = {'filled': True,
401 "num_users": int(50e6),
402 "files_per_user": 1000,
403 "space_per_user": int(5e9),
404 "sharing_ratio": 1.0,
405 "encoding_parameters": "25-of-100-50",
406 "num_servers": 30000,
407 "ownership_mode": "E",
408 "drive_failure_model": "U",
410 "download_rate": 1000,
415 return self.POST("/provisioning/", **fields)
416 d.addCallback(_check2)
418 self.failUnless("Share space consumed: huge!" in res)
419 fields = {'filled': True}
420 return self.POST("/provisioning/", **fields)
421 d.addCallback(_check3)
423 self.failUnless("Share space consumed:" in res)
424 d.addCallback(_check4)
427 def test_reliability_tool(self):
429 from allmydata import reliability
430 _hush_pyflakes = reliability
432 raise unittest.SkipTest("reliability tool requires NumPy")
434 d = self.GET("/reliability/")
436 self.failUnless('Tahoe Reliability Tool' in res)
437 fields = {'drive_lifetime': "8Y",
442 "check_period": "1M",
443 "report_period": "3M",
446 return self.POST("/reliability/", **fields)
448 d.addCallback(_check)
450 self.failUnless('Tahoe Reliability Tool' in res)
451 r = r'Probability of loss \(no maintenance\):\s+<span>0.033591'
452 self.failUnless(re.search(r, res), res)
453 d.addCallback(_check2)
456 def test_status(self):
457 dl_num = self.s.list_all_download_statuses()[0].get_counter()
458 ul_num = self.s.list_all_upload_statuses()[0].get_counter()
459 mu_num = self.s.list_all_mapupdate_statuses()[0].get_counter()
460 pub_num = self.s.list_all_publish_statuses()[0].get_counter()
461 ret_num = self.s.list_all_retrieve_statuses()[0].get_counter()
462 d = self.GET("/status", followRedirect=True)
464 self.failUnless('Upload and Download Status' in res, res)
465 self.failUnless('"down-%d"' % dl_num in res, res)
466 self.failUnless('"up-%d"' % ul_num in res, res)
467 self.failUnless('"mapupdate-%d"' % mu_num in res, res)
468 self.failUnless('"publish-%d"' % pub_num in res, res)
469 self.failUnless('"retrieve-%d"' % ret_num in res, res)
470 d.addCallback(_check)
471 d.addCallback(lambda res: self.GET("/status/?t=json"))
472 def _check_json(res):
473 data = simplejson.loads(res)
474 self.failUnless(isinstance(data, dict))
475 active = data["active"]
476 # TODO: test more. We need a way to fake an active operation
478 d.addCallback(_check_json)
480 d.addCallback(lambda res: self.GET("/status/down-%d" % dl_num))
482 self.failUnless("File Download Status" in res, res)
483 d.addCallback(_check_dl)
484 d.addCallback(lambda res: self.GET("/status/up-%d" % ul_num))
486 self.failUnless("File Upload Status" in res, res)
487 d.addCallback(_check_ul)
488 d.addCallback(lambda res: self.GET("/status/mapupdate-%d" % mu_num))
489 def _check_mapupdate(res):
490 self.failUnless("Mutable File Servermap Update Status" in res, res)
491 d.addCallback(_check_mapupdate)
492 d.addCallback(lambda res: self.GET("/status/publish-%d" % pub_num))
493 def _check_publish(res):
494 self.failUnless("Mutable File Publish Status" in res, res)
495 d.addCallback(_check_publish)
496 d.addCallback(lambda res: self.GET("/status/retrieve-%d" % ret_num))
497 def _check_retrieve(res):
498 self.failUnless("Mutable File Retrieve Status" in res, res)
499 d.addCallback(_check_retrieve)
503 def test_status_numbers(self):
504 drrm = status.DownloadResultsRendererMixin()
505 self.failUnlessEqual(drrm.render_time(None, None), "")
506 self.failUnlessEqual(drrm.render_time(None, 2.5), "2.50s")
507 self.failUnlessEqual(drrm.render_time(None, 0.25), "250ms")
508 self.failUnlessEqual(drrm.render_time(None, 0.0021), "2.1ms")
509 self.failUnlessEqual(drrm.render_time(None, 0.000123), "123us")
510 self.failUnlessEqual(drrm.render_rate(None, None), "")
511 self.failUnlessEqual(drrm.render_rate(None, 2500000), "2.50MBps")
512 self.failUnlessEqual(drrm.render_rate(None, 30100), "30.1kBps")
513 self.failUnlessEqual(drrm.render_rate(None, 123), "123Bps")
515 urrm = status.UploadResultsRendererMixin()
516 self.failUnlessEqual(urrm.render_time(None, None), "")
517 self.failUnlessEqual(urrm.render_time(None, 2.5), "2.50s")
518 self.failUnlessEqual(urrm.render_time(None, 0.25), "250ms")
519 self.failUnlessEqual(urrm.render_time(None, 0.0021), "2.1ms")
520 self.failUnlessEqual(urrm.render_time(None, 0.000123), "123us")
521 self.failUnlessEqual(urrm.render_rate(None, None), "")
522 self.failUnlessEqual(urrm.render_rate(None, 2500000), "2.50MBps")
523 self.failUnlessEqual(urrm.render_rate(None, 30100), "30.1kBps")
524 self.failUnlessEqual(urrm.render_rate(None, 123), "123Bps")
526 def test_GET_FILEURL(self):
527 d = self.GET(self.public_url + "/foo/bar.txt")
528 d.addCallback(self.failUnlessIsBarDotTxt)
531 def test_GET_FILEURL_range(self):
532 headers = {"range": "bytes=1-10"}
533 d = self.GET(self.public_url + "/foo/bar.txt", headers=headers,
534 return_response=True)
535 def _got((res, status, headers)):
536 self.failUnlessEqual(int(status), 206)
537 self.failUnless(headers.has_key("content-range"))
538 self.failUnlessEqual(headers["content-range"][0],
539 "bytes 1-10/%d" % len(self.BAR_CONTENTS))
540 self.failUnlessEqual(res, self.BAR_CONTENTS[1:11])
544 def test_GET_FILEURL_partial_range(self):
545 headers = {"range": "bytes=5-"}
546 length = len(self.BAR_CONTENTS)
547 d = self.GET(self.public_url + "/foo/bar.txt", headers=headers,
548 return_response=True)
549 def _got((res, status, headers)):
550 self.failUnlessEqual(int(status), 206)
551 self.failUnless(headers.has_key("content-range"))
552 self.failUnlessEqual(headers["content-range"][0],
553 "bytes 5-%d/%d" % (length-1, length))
554 self.failUnlessEqual(res, self.BAR_CONTENTS[5:])
558 def test_HEAD_FILEURL_range(self):
559 headers = {"range": "bytes=1-10"}
560 d = self.HEAD(self.public_url + "/foo/bar.txt", headers=headers,
561 return_response=True)
562 def _got((res, status, headers)):
563 self.failUnlessEqual(res, "")
564 self.failUnlessEqual(int(status), 206)
565 self.failUnless(headers.has_key("content-range"))
566 self.failUnlessEqual(headers["content-range"][0],
567 "bytes 1-10/%d" % len(self.BAR_CONTENTS))
571 def test_HEAD_FILEURL_partial_range(self):
572 headers = {"range": "bytes=5-"}
573 length = len(self.BAR_CONTENTS)
574 d = self.HEAD(self.public_url + "/foo/bar.txt", headers=headers,
575 return_response=True)
576 def _got((res, status, headers)):
577 self.failUnlessEqual(int(status), 206)
578 self.failUnless(headers.has_key("content-range"))
579 self.failUnlessEqual(headers["content-range"][0],
580 "bytes 5-%d/%d" % (length-1, length))
584 def test_GET_FILEURL_range_bad(self):
585 headers = {"range": "BOGUS=fizbop-quarnak"}
586 d = self.shouldFail2(error.Error, "test_GET_FILEURL_range_bad",
588 "Syntactically invalid http range header",
589 self.GET, self.public_url + "/foo/bar.txt",
593 def test_HEAD_FILEURL(self):
594 d = self.HEAD(self.public_url + "/foo/bar.txt", return_response=True)
595 def _got((res, status, headers)):
596 self.failUnlessEqual(res, "")
597 self.failUnlessEqual(headers["content-length"][0],
598 str(len(self.BAR_CONTENTS)))
599 self.failUnlessEqual(headers["content-type"], ["text/plain"])
603 def test_GET_FILEURL_named(self):
604 base = "/file/%s" % urllib.quote(self._bar_txt_uri)
605 base2 = "/named/%s" % urllib.quote(self._bar_txt_uri)
606 d = self.GET(base + "/@@name=/blah.txt")
607 d.addCallback(self.failUnlessIsBarDotTxt)
608 d.addCallback(lambda res: self.GET(base + "/blah.txt"))
609 d.addCallback(self.failUnlessIsBarDotTxt)
610 d.addCallback(lambda res: self.GET(base + "/ignore/lots/blah.txt"))
611 d.addCallback(self.failUnlessIsBarDotTxt)
612 d.addCallback(lambda res: self.GET(base2 + "/@@name=/blah.txt"))
613 d.addCallback(self.failUnlessIsBarDotTxt)
614 save_url = base + "?save=true&filename=blah.txt"
615 d.addCallback(lambda res: self.GET(save_url))
616 d.addCallback(self.failUnlessIsBarDotTxt) # TODO: check headers
617 u_filename = u"n\u00e9wer.txt" # n e-acute w e r . t x t
618 u_fn_e = urllib.quote(u_filename.encode("utf-8"))
619 u_url = base + "?save=true&filename=" + u_fn_e
620 d.addCallback(lambda res: self.GET(u_url))
621 d.addCallback(self.failUnlessIsBarDotTxt) # TODO: check headers
624 def test_PUT_FILEURL_named_bad(self):
625 base = "/file/%s" % urllib.quote(self._bar_txt_uri)
626 d = self.shouldFail2(error.Error, "test_PUT_FILEURL_named_bad",
628 "/file can only be used with GET or HEAD",
629 self.PUT, base + "/@@name=/blah.txt", "")
632 def test_GET_DIRURL_named_bad(self):
633 base = "/file/%s" % urllib.quote(self._foo_uri)
634 d = self.shouldFail2(error.Error, "test_PUT_DIRURL_named_bad",
637 self.GET, base + "/@@name=/blah.txt")
640 def test_GET_slash_file_bad(self):
641 d = self.shouldFail2(error.Error, "test_GET_slash_file_bad",
643 "/file must be followed by a file-cap and a name",
647 def test_GET_unhandled_URI_named(self):
648 contents, n, newuri = self.makefile(12)
649 verifier_cap = n.get_verify_cap().to_string()
650 base = "/file/%s" % urllib.quote(verifier_cap)
651 # client.create_node_from_uri() can't handle verify-caps
652 d = self.shouldFail2(error.Error, "GET_unhandled_URI_named",
654 "is not a valid file- or directory- cap",
658 def test_GET_unhandled_URI(self):
659 contents, n, newuri = self.makefile(12)
660 verifier_cap = n.get_verify_cap().to_string()
661 base = "/uri/%s" % urllib.quote(verifier_cap)
662 # client.create_node_from_uri() can't handle verify-caps
663 d = self.shouldFail2(error.Error, "test_GET_unhandled_URI",
665 "is not a valid file- or directory- cap",
669 def test_GET_FILE_URI(self):
670 base = "/uri/%s" % urllib.quote(self._bar_txt_uri)
672 d.addCallback(self.failUnlessIsBarDotTxt)
675 def test_GET_FILE_URI_badchild(self):
676 base = "/uri/%s/boguschild" % urllib.quote(self._bar_txt_uri)
677 errmsg = "Files have no children, certainly not named 'boguschild'"
678 d = self.shouldFail2(error.Error, "test_GET_FILE_URI_badchild",
679 "400 Bad Request", errmsg,
683 def test_PUT_FILE_URI_badchild(self):
684 base = "/uri/%s/boguschild" % urllib.quote(self._bar_txt_uri)
685 errmsg = "Cannot create directory 'boguschild', because its parent is a file, not a directory"
686 d = self.shouldFail2(error.Error, "test_GET_FILE_URI_badchild",
687 "400 Bad Request", errmsg,
691 def test_GET_FILEURL_save(self):
692 d = self.GET(self.public_url + "/foo/bar.txt?filename=bar.txt&save=true")
693 # TODO: look at the headers, expect a Content-Disposition: attachment
695 d.addCallback(self.failUnlessIsBarDotTxt)
698 def test_GET_FILEURL_missing(self):
699 d = self.GET(self.public_url + "/foo/missing")
700 d.addBoth(self.should404, "test_GET_FILEURL_missing")
703 def test_PUT_NEWFILEURL(self):
704 d = self.PUT(self.public_url + "/foo/new.txt", self.NEWFILE_CONTENTS)
705 # TODO: we lose the response code, so we can't check this
706 #self.failUnlessEqual(responsecode, 201)
707 d.addCallback(self.failUnlessURIMatchesChild, self._foo_node, u"new.txt")
708 d.addCallback(lambda res:
709 self.failUnlessChildContentsAre(self._foo_node, u"new.txt",
710 self.NEWFILE_CONTENTS))
713 def test_PUT_NEWFILEURL_not_mutable(self):
714 d = self.PUT(self.public_url + "/foo/new.txt?mutable=false",
715 self.NEWFILE_CONTENTS)
716 # TODO: we lose the response code, so we can't check this
717 #self.failUnlessEqual(responsecode, 201)
718 d.addCallback(self.failUnlessURIMatchesChild, self._foo_node, u"new.txt")
719 d.addCallback(lambda res:
720 self.failUnlessChildContentsAre(self._foo_node, u"new.txt",
721 self.NEWFILE_CONTENTS))
724 def test_PUT_NEWFILEURL_range_bad(self):
725 headers = {"content-range": "bytes 1-10/%d" % len(self.NEWFILE_CONTENTS)}
726 target = self.public_url + "/foo/new.txt"
727 d = self.shouldFail2(error.Error, "test_PUT_NEWFILEURL_range_bad",
728 "501 Not Implemented",
729 "Content-Range in PUT not yet supported",
730 # (and certainly not for immutable files)
731 self.PUT, target, self.NEWFILE_CONTENTS[1:11],
733 d.addCallback(lambda res:
734 self.failIfNodeHasChild(self._foo_node, u"new.txt"))
737 def test_PUT_NEWFILEURL_mutable(self):
738 d = self.PUT(self.public_url + "/foo/new.txt?mutable=true",
739 self.NEWFILE_CONTENTS)
740 # TODO: we lose the response code, so we can't check this
741 #self.failUnlessEqual(responsecode, 201)
743 u = uri.from_string_mutable_filenode(res)
744 self.failUnless(u.is_mutable())
745 self.failIf(u.is_readonly())
747 d.addCallback(_check_uri)
748 d.addCallback(self.failUnlessURIMatchesChild, self._foo_node, u"new.txt")
749 d.addCallback(lambda res:
750 self.failUnlessMutableChildContentsAre(self._foo_node,
752 self.NEWFILE_CONTENTS))
755 def test_PUT_NEWFILEURL_mutable_toobig(self):
756 d = self.shouldFail2(error.Error, "test_PUT_NEWFILEURL_mutable_toobig",
757 "413 Request Entity Too Large",
758 "SDMF is limited to one segment, and 10001 > 10000",
760 self.public_url + "/foo/new.txt?mutable=true",
761 "b" * (self.s.MUTABLE_SIZELIMIT+1))
764 def test_PUT_NEWFILEURL_replace(self):
765 d = self.PUT(self.public_url + "/foo/bar.txt", self.NEWFILE_CONTENTS)
766 # TODO: we lose the response code, so we can't check this
767 #self.failUnlessEqual(responsecode, 200)
768 d.addCallback(self.failUnlessURIMatchesChild, self._foo_node, u"bar.txt")
769 d.addCallback(lambda res:
770 self.failUnlessChildContentsAre(self._foo_node, u"bar.txt",
771 self.NEWFILE_CONTENTS))
774 def test_PUT_NEWFILEURL_bad_t(self):
775 d = self.shouldFail2(error.Error, "PUT_bad_t", "400 Bad Request",
776 "PUT to a file: bad t=bogus",
777 self.PUT, self.public_url + "/foo/bar.txt?t=bogus",
781 def test_PUT_NEWFILEURL_no_replace(self):
782 d = self.PUT(self.public_url + "/foo/bar.txt?replace=false",
783 self.NEWFILE_CONTENTS)
784 d.addBoth(self.shouldFail, error.Error, "PUT_NEWFILEURL_no_replace",
786 "There was already a child by that name, and you asked me "
790 def test_PUT_NEWFILEURL_mkdirs(self):
791 d = self.PUT(self.public_url + "/foo/newdir/new.txt", self.NEWFILE_CONTENTS)
793 d.addCallback(self.failUnlessURIMatchesChild, fn, u"newdir/new.txt")
794 d.addCallback(lambda res: self.failIfNodeHasChild(fn, u"new.txt"))
795 d.addCallback(lambda res: self.failUnlessNodeHasChild(fn, u"newdir"))
796 d.addCallback(lambda res:
797 self.failUnlessChildContentsAre(fn, u"newdir/new.txt",
798 self.NEWFILE_CONTENTS))
801 def test_PUT_NEWFILEURL_blocked(self):
802 d = self.PUT(self.public_url + "/foo/blockingfile/new.txt",
803 self.NEWFILE_CONTENTS)
804 d.addBoth(self.shouldFail, error.Error, "PUT_NEWFILEURL_blocked",
806 "Unable to create directory 'blockingfile': a file was in the way")
809 def test_DELETE_FILEURL(self):
810 d = self.DELETE(self.public_url + "/foo/bar.txt")
811 d.addCallback(lambda res:
812 self.failIfNodeHasChild(self._foo_node, u"bar.txt"))
815 def test_DELETE_FILEURL_missing(self):
816 d = self.DELETE(self.public_url + "/foo/missing")
817 d.addBoth(self.should404, "test_DELETE_FILEURL_missing")
820 def test_DELETE_FILEURL_missing2(self):
821 d = self.DELETE(self.public_url + "/missing/missing")
822 d.addBoth(self.should404, "test_DELETE_FILEURL_missing2")
825 def test_GET_FILEURL_json(self):
826 # twisted.web.http.parse_qs ignores any query args without an '=', so
827 # I can't do "GET /path?json", I have to do "GET /path/t=json"
828 # instead. This may make it tricky to emulate the S3 interface
830 d = self.GET(self.public_url + "/foo/bar.txt?t=json")
831 d.addCallback(self.failUnlessIsBarJSON)
834 def test_GET_FILEURL_json_missing(self):
835 d = self.GET(self.public_url + "/foo/missing?json")
836 d.addBoth(self.should404, "test_GET_FILEURL_json_missing")
839 def test_GET_FILEURL_uri(self):
840 d = self.GET(self.public_url + "/foo/bar.txt?t=uri")
842 self.failUnlessEqual(res, self._bar_txt_uri)
843 d.addCallback(_check)
844 d.addCallback(lambda res:
845 self.GET(self.public_url + "/foo/bar.txt?t=readonly-uri"))
847 # for now, for files, uris and readonly-uris are the same
848 self.failUnlessEqual(res, self._bar_txt_uri)
849 d.addCallback(_check2)
852 def test_GET_FILEURL_badtype(self):
853 d = self.shouldHTTPError("GET t=bogus", 400, "Bad Request",
856 self.public_url + "/foo/bar.txt?t=bogus")
859 def test_GET_FILEURL_uri_missing(self):
860 d = self.GET(self.public_url + "/foo/missing?t=uri")
861 d.addBoth(self.should404, "test_GET_FILEURL_uri_missing")
864 def test_GET_DIRURL(self):
865 # the addSlash means we get a redirect here
866 # from /uri/$URI/foo/ , we need ../../../ to get back to the root
868 d = self.GET(self.public_url + "/foo", followRedirect=True)
870 self.failUnless(('<a href="%s">Return to Welcome page' % ROOT)
872 # the FILE reference points to a URI, but it should end in bar.txt
873 bar_url = ("%s/file/%s/@@named=/bar.txt" %
874 (ROOT, urllib.quote(self._bar_txt_uri)))
875 get_bar = "".join([r'<td>FILE</td>',
877 r'<a href="%s">bar.txt</a>' % bar_url,
879 r'\s+<td>%d</td>' % len(self.BAR_CONTENTS),
881 self.failUnless(re.search(get_bar, res), res)
882 for line in res.split("\n"):
883 # find the line that contains the delete button for bar.txt
884 if ("form action" in line and
885 'value="delete"' in line and
886 'value="bar.txt"' in line):
887 # the form target should use a relative URL
888 foo_url = urllib.quote("%s/uri/%s/" % (ROOT, self._foo_uri))
889 self.failUnless(('action="%s"' % foo_url) in line, line)
890 # and the when_done= should too
891 #done_url = urllib.quote(???)
892 #self.failUnless(('name="when_done" value="%s"' % done_url)
896 self.fail("unable to find delete-bar.txt line", res)
898 # the DIR reference just points to a URI
899 sub_url = ("%s/uri/%s/" % (ROOT, urllib.quote(self._sub_uri)))
900 get_sub = ((r'<td>DIR</td>')
901 +r'\s+<td><a href="%s">sub</a></td>' % sub_url)
902 self.failUnless(re.search(get_sub, res), res)
903 d.addCallback(_check)
905 # look at a directory which is readonly
906 d.addCallback(lambda res:
907 self.GET(self.public_url + "/reedownlee", followRedirect=True))
909 self.failUnless("(read-only)" in res, res)
910 self.failIf("Upload a file" in res, res)
911 d.addCallback(_check2)
913 # and at a directory that contains a readonly directory
914 d.addCallback(lambda res:
915 self.GET(self.public_url, followRedirect=True))
917 self.failUnless(re.search('<td>DIR-RO</td>'
918 r'\s+<td><a href="[\.\/]+/uri/URI%3ADIR2-RO%3A[^"]+">reedownlee</a></td>', res), res)
919 d.addCallback(_check3)
921 # and an empty directory
922 d.addCallback(lambda res: self.GET(self.public_url + "/foo/empty/"))
924 self.failUnless("directory is empty" in res, res)
925 MKDIR_BUTTON_RE=re.compile('<input type="hidden" name="t" value="mkdir" />.*<legend class="freeform-form-label">Create a new directory in this directory</legend>.*<input type="submit" value="Create" />', re.I)
926 self.failUnless(MKDIR_BUTTON_RE.search(res), res)
927 d.addCallback(_check4)
931 def test_GET_DIRURL_badtype(self):
932 d = self.shouldHTTPError("test_GET_DIRURL_badtype",
936 self.public_url + "/foo?t=bogus")
939 def test_GET_DIRURL_json(self):
940 d = self.GET(self.public_url + "/foo?t=json")
941 d.addCallback(self.failUnlessIsFooJSON)
945 def test_POST_DIRURL_manifest_no_ophandle(self):
946 d = self.shouldFail2(error.Error,
947 "test_POST_DIRURL_manifest_no_ophandle",
949 "slow operation requires ophandle=",
950 self.POST, self.public_url, t="start-manifest")
953 def test_POST_DIRURL_manifest(self):
954 d = defer.succeed(None)
955 def getman(ignored, output):
956 d = self.POST(self.public_url + "/foo/?t=start-manifest&ophandle=125",
958 d.addCallback(self.wait_for_operation, "125")
959 d.addCallback(self.get_operation_results, "125", output)
961 d.addCallback(getman, None)
962 def _got_html(manifest):
963 self.failUnless("Manifest of SI=" in manifest)
964 self.failUnless("<td>sub</td>" in manifest)
965 self.failUnless(self._sub_uri in manifest)
966 self.failUnless("<td>sub/baz.txt</td>" in manifest)
967 d.addCallback(_got_html)
969 # both t=status and unadorned GET should be identical
970 d.addCallback(lambda res: self.GET("/operations/125"))
971 d.addCallback(_got_html)
973 d.addCallback(getman, "html")
974 d.addCallback(_got_html)
975 d.addCallback(getman, "text")
976 def _got_text(manifest):
977 self.failUnless("\nsub " + self._sub_uri + "\n" in manifest)
978 self.failUnless("\nsub/baz.txt URI:CHK:" in manifest)
979 d.addCallback(_got_text)
980 d.addCallback(getman, "JSON")
982 data = res["manifest"]
984 for (path_list, cap) in data:
985 got[tuple(path_list)] = cap
986 self.failUnlessEqual(got[(u"sub",)], self._sub_uri)
987 self.failUnless((u"sub",u"baz.txt") in got)
988 self.failUnless("finished" in res)
989 self.failUnless("origin" in res)
990 self.failUnless("storage-index" in res)
991 self.failUnless("verifycaps" in res)
992 self.failUnless("stats" in res)
993 d.addCallback(_got_json)
996 def test_POST_DIRURL_deepsize_no_ophandle(self):
997 d = self.shouldFail2(error.Error,
998 "test_POST_DIRURL_deepsize_no_ophandle",
1000 "slow operation requires ophandle=",
1001 self.POST, self.public_url, t="start-deep-size")
1004 def test_POST_DIRURL_deepsize(self):
1005 d = self.POST(self.public_url + "/foo/?t=start-deep-size&ophandle=126",
1006 followRedirect=True)
1007 d.addCallback(self.wait_for_operation, "126")
1008 d.addCallback(self.get_operation_results, "126", "json")
1009 def _got_json(data):
1010 self.failUnlessEqual(data["finished"], True)
1012 self.failUnless(size > 1000)
1013 d.addCallback(_got_json)
1014 d.addCallback(self.get_operation_results, "126", "text")
1016 mo = re.search(r'^size: (\d+)$', res, re.M)
1017 self.failUnless(mo, res)
1018 size = int(mo.group(1))
1019 # with directories, the size varies.
1020 self.failUnless(size > 1000)
1021 d.addCallback(_got_text)
1024 def test_POST_DIRURL_deepstats_no_ophandle(self):
1025 d = self.shouldFail2(error.Error,
1026 "test_POST_DIRURL_deepstats_no_ophandle",
1028 "slow operation requires ophandle=",
1029 self.POST, self.public_url, t="start-deep-stats")
1032 def test_POST_DIRURL_deepstats(self):
1033 d = self.POST(self.public_url + "/foo/?t=start-deep-stats&ophandle=127",
1034 followRedirect=True)
1035 d.addCallback(self.wait_for_operation, "127")
1036 d.addCallback(self.get_operation_results, "127", "json")
1037 def _got_json(stats):
1038 expected = {"count-immutable-files": 3,
1039 "count-mutable-files": 0,
1040 "count-literal-files": 0,
1042 "count-directories": 3,
1043 "size-immutable-files": 57,
1044 "size-literal-files": 0,
1045 #"size-directories": 1912, # varies
1046 #"largest-directory": 1590,
1047 "largest-directory-children": 5,
1048 "largest-immutable-file": 19,
1050 for k,v in expected.iteritems():
1051 self.failUnlessEqual(stats[k], v,
1052 "stats[%s] was %s, not %s" %
1054 self.failUnlessEqual(stats["size-files-histogram"],
1056 d.addCallback(_got_json)
1059 def test_POST_DIRURL_stream_manifest(self):
1060 d = self.POST(self.public_url + "/foo/?t=stream-manifest")
1062 self.failUnless(res.endswith("\n"))
1063 units = [simplejson.loads(t) for t in res[:-1].split("\n")]
1064 self.failUnlessEqual(len(units), 7)
1065 self.failUnlessEqual(units[-1]["type"], "stats")
1067 self.failUnlessEqual(first["path"], [])
1068 self.failUnlessEqual(first["cap"], self._foo_uri)
1069 self.failUnlessEqual(first["type"], "directory")
1070 baz = [u for u in units[:-1] if u["cap"] == self._baz_file_uri][0]
1071 self.failUnlessEqual(baz["path"], ["sub", "baz.txt"])
1072 self.failIfEqual(baz["storage-index"], None)
1073 self.failIfEqual(baz["verifycap"], None)
1074 self.failIfEqual(baz["repaircap"], None)
1076 d.addCallback(_check)
1079 def test_GET_DIRURL_uri(self):
1080 d = self.GET(self.public_url + "/foo?t=uri")
1082 self.failUnlessEqual(res, self._foo_uri)
1083 d.addCallback(_check)
1086 def test_GET_DIRURL_readonly_uri(self):
1087 d = self.GET(self.public_url + "/foo?t=readonly-uri")
1089 self.failUnlessEqual(res, self._foo_readonly_uri)
1090 d.addCallback(_check)
1093 def test_PUT_NEWDIRURL(self):
1094 d = self.PUT(self.public_url + "/foo/newdir?t=mkdir", "")
1095 d.addCallback(lambda res:
1096 self.failUnlessNodeHasChild(self._foo_node, u"newdir"))
1097 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
1098 d.addCallback(self.failUnlessNodeKeysAre, [])
1101 def test_PUT_NEWDIRURL_exists(self):
1102 d = self.PUT(self.public_url + "/foo/sub?t=mkdir", "")
1103 d.addCallback(lambda res:
1104 self.failUnlessNodeHasChild(self._foo_node, u"sub"))
1105 d.addCallback(lambda res: self._foo_node.get(u"sub"))
1106 d.addCallback(self.failUnlessNodeKeysAre, [u"baz.txt"])
1109 def test_PUT_NEWDIRURL_blocked(self):
1110 d = self.shouldFail2(error.Error, "PUT_NEWDIRURL_blocked",
1111 "409 Conflict", "Unable to create directory 'bar.txt': a file was in the way",
1113 self.public_url + "/foo/bar.txt/sub?t=mkdir", "")
1114 d.addCallback(lambda res:
1115 self.failUnlessNodeHasChild(self._foo_node, u"sub"))
1116 d.addCallback(lambda res: self._foo_node.get(u"sub"))
1117 d.addCallback(self.failUnlessNodeKeysAre, [u"baz.txt"])
1120 def test_PUT_NEWDIRURL_mkdir_p(self):
1121 d = defer.succeed(None)
1122 d.addCallback(lambda res: self.POST(self.public_url + "/foo", t='mkdir', name='mkp'))
1123 d.addCallback(lambda res: self.failUnlessNodeHasChild(self._foo_node, u"mkp"))
1124 d.addCallback(lambda res: self._foo_node.get(u"mkp"))
1125 def mkdir_p(mkpnode):
1126 url = '/uri/%s?t=mkdir-p&path=/sub1/sub2' % urllib.quote(mkpnode.get_uri())
1128 def made_subsub(ssuri):
1129 d = self._foo_node.get_child_at_path(u"mkp/sub1/sub2")
1130 d.addCallback(lambda ssnode: self.failUnlessEqual(ssnode.get_uri(), ssuri))
1132 d.addCallback(lambda uri2: self.failUnlessEqual(uri2, ssuri))
1134 d.addCallback(made_subsub)
1136 d.addCallback(mkdir_p)
1139 def test_PUT_NEWDIRURL_mkdirs(self):
1140 d = self.PUT(self.public_url + "/foo/subdir/newdir?t=mkdir", "")
1141 d.addCallback(lambda res:
1142 self.failIfNodeHasChild(self._foo_node, u"newdir"))
1143 d.addCallback(lambda res:
1144 self.failUnlessNodeHasChild(self._foo_node, u"subdir"))
1145 d.addCallback(lambda res:
1146 self._foo_node.get_child_at_path(u"subdir/newdir"))
1147 d.addCallback(self.failUnlessNodeKeysAre, [])
1150 def test_DELETE_DIRURL(self):
1151 d = self.DELETE(self.public_url + "/foo")
1152 d.addCallback(lambda res:
1153 self.failIfNodeHasChild(self.public_root, u"foo"))
1156 def test_DELETE_DIRURL_missing(self):
1157 d = self.DELETE(self.public_url + "/foo/missing")
1158 d.addBoth(self.should404, "test_DELETE_DIRURL_missing")
1159 d.addCallback(lambda res:
1160 self.failUnlessNodeHasChild(self.public_root, u"foo"))
1163 def test_DELETE_DIRURL_missing2(self):
1164 d = self.DELETE(self.public_url + "/missing")
1165 d.addBoth(self.should404, "test_DELETE_DIRURL_missing2")
1168 def dump_root(self):
1170 w = webish.DirnodeWalkerMixin()
1171 def visitor(childpath, childnode, metadata):
1173 d = w.walk(self.public_root, visitor)
1176 def failUnlessNodeKeysAre(self, node, expected_keys):
1177 for k in expected_keys:
1178 assert isinstance(k, unicode)
1180 def _check(children):
1181 self.failUnlessEqual(sorted(children.keys()), sorted(expected_keys))
1182 d.addCallback(_check)
1184 def failUnlessNodeHasChild(self, node, name):
1185 assert isinstance(name, unicode)
1187 def _check(children):
1188 self.failUnless(name in children)
1189 d.addCallback(_check)
1191 def failIfNodeHasChild(self, node, name):
1192 assert isinstance(name, unicode)
1194 def _check(children):
1195 self.failIf(name in children)
1196 d.addCallback(_check)
1199 def failUnlessChildContentsAre(self, node, name, expected_contents):
1200 assert isinstance(name, unicode)
1201 d = node.get_child_at_path(name)
1202 d.addCallback(lambda node: node.download_to_data())
1203 def _check(contents):
1204 self.failUnlessEqual(contents, expected_contents)
1205 d.addCallback(_check)
1208 def failUnlessMutableChildContentsAre(self, node, name, expected_contents):
1209 assert isinstance(name, unicode)
1210 d = node.get_child_at_path(name)
1211 d.addCallback(lambda node: node.download_best_version())
1212 def _check(contents):
1213 self.failUnlessEqual(contents, expected_contents)
1214 d.addCallback(_check)
1217 def failUnlessChildURIIs(self, node, name, expected_uri):
1218 assert isinstance(name, unicode)
1219 d = node.get_child_at_path(name)
1221 self.failUnlessEqual(child.get_uri(), expected_uri.strip())
1222 d.addCallback(_check)
1225 def failUnlessURIMatchesChild(self, got_uri, node, name):
1226 assert isinstance(name, unicode)
1227 d = node.get_child_at_path(name)
1229 self.failUnlessEqual(got_uri.strip(), child.get_uri())
1230 d.addCallback(_check)
1233 def failUnlessCHKURIHasContents(self, got_uri, contents):
1234 self.failUnless(FakeCHKFileNode.all_contents[got_uri] == contents)
1236 def test_POST_upload(self):
1237 d = self.POST(self.public_url + "/foo", t="upload",
1238 file=("new.txt", self.NEWFILE_CONTENTS))
1240 d.addCallback(self.failUnlessURIMatchesChild, fn, u"new.txt")
1241 d.addCallback(lambda res:
1242 self.failUnlessChildContentsAre(fn, u"new.txt",
1243 self.NEWFILE_CONTENTS))
1246 def test_POST_upload_unicode(self):
1247 filename = u"n\u00e9wer.txt" # n e-acute w e r . t x t
1248 d = self.POST(self.public_url + "/foo", t="upload",
1249 file=(filename, self.NEWFILE_CONTENTS))
1251 d.addCallback(self.failUnlessURIMatchesChild, fn, filename)
1252 d.addCallback(lambda res:
1253 self.failUnlessChildContentsAre(fn, filename,
1254 self.NEWFILE_CONTENTS))
1255 target_url = self.public_url + "/foo/" + filename.encode("utf-8")
1256 d.addCallback(lambda res: self.GET(target_url))
1257 d.addCallback(lambda contents: self.failUnlessEqual(contents,
1258 self.NEWFILE_CONTENTS,
1262 def test_POST_upload_unicode_named(self):
1263 filename = u"n\u00e9wer.txt" # n e-acute w e r . t x t
1264 d = self.POST(self.public_url + "/foo", t="upload",
1266 file=("overridden", self.NEWFILE_CONTENTS))
1268 d.addCallback(self.failUnlessURIMatchesChild, fn, filename)
1269 d.addCallback(lambda res:
1270 self.failUnlessChildContentsAre(fn, filename,
1271 self.NEWFILE_CONTENTS))
1272 target_url = self.public_url + "/foo/" + filename.encode("utf-8")
1273 d.addCallback(lambda res: self.GET(target_url))
1274 d.addCallback(lambda contents: self.failUnlessEqual(contents,
1275 self.NEWFILE_CONTENTS,
1279 def test_POST_upload_no_link(self):
1280 d = self.POST("/uri", t="upload",
1281 file=("new.txt", self.NEWFILE_CONTENTS))
1282 def _check_upload_results(page):
1283 # this should be a page which describes the results of the upload
1284 # that just finished.
1285 self.failUnless("Upload Results:" in page)
1286 self.failUnless("URI:" in page)
1287 uri_re = re.compile("URI: <tt><span>(.*)</span>")
1288 mo = uri_re.search(page)
1289 self.failUnless(mo, page)
1290 new_uri = mo.group(1)
1292 d.addCallback(_check_upload_results)
1293 d.addCallback(self.failUnlessCHKURIHasContents, self.NEWFILE_CONTENTS)
1296 def test_POST_upload_no_link_whendone(self):
1297 d = self.POST("/uri", t="upload", when_done="/",
1298 file=("new.txt", self.NEWFILE_CONTENTS))
1299 d.addBoth(self.shouldRedirect, "/")
1302 def shouldRedirect2(self, which, checker, callable, *args, **kwargs):
1303 d = defer.maybeDeferred(callable, *args, **kwargs)
1305 if isinstance(res, failure.Failure):
1306 res.trap(error.PageRedirect)
1307 statuscode = res.value.status
1308 target = res.value.location
1309 return checker(statuscode, target)
1310 self.fail("%s: callable was supposed to redirect, not return '%s'"
1315 def test_POST_upload_no_link_whendone_results(self):
1316 def check(statuscode, target):
1317 self.failUnlessEqual(statuscode, str(http.FOUND))
1318 self.failUnless(target.startswith(self.webish_url), target)
1319 return client.getPage(target, method="GET")
1320 d = self.shouldRedirect2("test_POST_upload_no_link_whendone_results",
1322 self.POST, "/uri", t="upload",
1323 when_done="/uri/%(uri)s",
1324 file=("new.txt", self.NEWFILE_CONTENTS))
1325 d.addCallback(lambda res:
1326 self.failUnlessEqual(res, self.NEWFILE_CONTENTS))
1329 def test_POST_upload_no_link_mutable(self):
1330 d = self.POST("/uri", t="upload", mutable="true",
1331 file=("new.txt", self.NEWFILE_CONTENTS))
1332 def _check(new_uri):
1333 new_uri = new_uri.strip()
1334 self.new_uri = new_uri
1336 self.failUnless(IMutableFileURI.providedBy(u))
1337 self.failUnless(u.storage_index in FakeMutableFileNode.all_contents)
1338 n = self.s.create_node_from_uri(new_uri)
1339 return n.download_best_version()
1340 d.addCallback(_check)
1342 self.failUnlessEqual(data, self.NEWFILE_CONTENTS)
1343 return self.GET("/uri/%s" % urllib.quote(self.new_uri))
1344 d.addCallback(_check2)
1346 self.failUnlessEqual(data, self.NEWFILE_CONTENTS)
1347 return self.GET("/file/%s" % urllib.quote(self.new_uri))
1348 d.addCallback(_check3)
1350 self.failUnlessEqual(data, self.NEWFILE_CONTENTS)
1351 d.addCallback(_check4)
1354 def test_POST_upload_no_link_mutable_toobig(self):
1355 d = self.shouldFail2(error.Error,
1356 "test_POST_upload_no_link_mutable_toobig",
1357 "413 Request Entity Too Large",
1358 "SDMF is limited to one segment, and 10001 > 10000",
1360 "/uri", t="upload", mutable="true",
1362 "b" * (self.s.MUTABLE_SIZELIMIT+1)) )
1365 def test_POST_upload_mutable(self):
1366 # this creates a mutable file
1367 d = self.POST(self.public_url + "/foo", t="upload", mutable="true",
1368 file=("new.txt", self.NEWFILE_CONTENTS))
1370 d.addCallback(self.failUnlessURIMatchesChild, fn, u"new.txt")
1371 d.addCallback(lambda res:
1372 self.failUnlessMutableChildContentsAre(fn, u"new.txt",
1373 self.NEWFILE_CONTENTS))
1374 d.addCallback(lambda res: self._foo_node.get(u"new.txt"))
1376 self.failUnless(IMutableFileNode.providedBy(newnode))
1377 self.failUnless(newnode.is_mutable())
1378 self.failIf(newnode.is_readonly())
1379 self._mutable_node = newnode
1380 self._mutable_uri = newnode.get_uri()
1383 # now upload it again and make sure that the URI doesn't change
1384 NEWER_CONTENTS = self.NEWFILE_CONTENTS + "newer\n"
1385 d.addCallback(lambda res:
1386 self.POST(self.public_url + "/foo", t="upload",
1388 file=("new.txt", NEWER_CONTENTS)))
1389 d.addCallback(self.failUnlessURIMatchesChild, fn, u"new.txt")
1390 d.addCallback(lambda res:
1391 self.failUnlessMutableChildContentsAre(fn, u"new.txt",
1393 d.addCallback(lambda res: self._foo_node.get(u"new.txt"))
1395 self.failUnless(IMutableFileNode.providedBy(newnode))
1396 self.failUnless(newnode.is_mutable())
1397 self.failIf(newnode.is_readonly())
1398 self.failUnlessEqual(self._mutable_uri, newnode.get_uri())
1399 d.addCallback(_got2)
1401 # upload a second time, using PUT instead of POST
1402 NEW2_CONTENTS = NEWER_CONTENTS + "overwrite with PUT\n"
1403 d.addCallback(lambda res:
1404 self.PUT(self.public_url + "/foo/new.txt", NEW2_CONTENTS))
1405 d.addCallback(self.failUnlessURIMatchesChild, fn, u"new.txt")
1406 d.addCallback(lambda res:
1407 self.failUnlessMutableChildContentsAre(fn, u"new.txt",
1410 # finally list the directory, since mutable files are displayed
1411 # slightly differently
1413 d.addCallback(lambda res:
1414 self.GET(self.public_url + "/foo/",
1415 followRedirect=True))
1416 def _check_page(res):
1417 # TODO: assert more about the contents
1418 self.failUnless("SSK" in res)
1420 d.addCallback(_check_page)
1422 d.addCallback(lambda res: self._foo_node.get(u"new.txt"))
1424 self.failUnless(IMutableFileNode.providedBy(newnode))
1425 self.failUnless(newnode.is_mutable())
1426 self.failIf(newnode.is_readonly())
1427 self.failUnlessEqual(self._mutable_uri, newnode.get_uri())
1428 d.addCallback(_got3)
1430 # look at the JSON form of the enclosing directory
1431 d.addCallback(lambda res:
1432 self.GET(self.public_url + "/foo/?t=json",
1433 followRedirect=True))
1434 def _check_page_json(res):
1435 parsed = simplejson.loads(res)
1436 self.failUnlessEqual(parsed[0], "dirnode")
1437 children = dict( [(unicode(name),value)
1439 in parsed[1]["children"].iteritems()] )
1440 self.failUnless("new.txt" in children)
1441 new_json = children["new.txt"]
1442 self.failUnlessEqual(new_json[0], "filenode")
1443 self.failUnless(new_json[1]["mutable"])
1444 self.failUnlessEqual(new_json[1]["rw_uri"], self._mutable_uri)
1445 ro_uri = unicode(self._mutable_node.get_readonly().to_string())
1446 self.failUnlessEqual(new_json[1]["ro_uri"], ro_uri)
1447 d.addCallback(_check_page_json)
1449 # and the JSON form of the file
1450 d.addCallback(lambda res:
1451 self.GET(self.public_url + "/foo/new.txt?t=json"))
1452 def _check_file_json(res):
1453 parsed = simplejson.loads(res)
1454 self.failUnlessEqual(parsed[0], "filenode")
1455 self.failUnless(parsed[1]["mutable"])
1456 self.failUnlessEqual(parsed[1]["rw_uri"], self._mutable_uri)
1457 ro_uri = unicode(self._mutable_node.get_readonly().to_string())
1458 self.failUnlessEqual(parsed[1]["ro_uri"], ro_uri)
1459 d.addCallback(_check_file_json)
1461 # and look at t=uri and t=readonly-uri
1462 d.addCallback(lambda res:
1463 self.GET(self.public_url + "/foo/new.txt?t=uri"))
1464 d.addCallback(lambda res: self.failUnlessEqual(res, self._mutable_uri))
1465 d.addCallback(lambda res:
1466 self.GET(self.public_url + "/foo/new.txt?t=readonly-uri"))
1467 def _check_ro_uri(res):
1468 ro_uri = unicode(self._mutable_node.get_readonly().to_string())
1469 self.failUnlessEqual(res, ro_uri)
1470 d.addCallback(_check_ro_uri)
1472 # make sure we can get to it from /uri/URI
1473 d.addCallback(lambda res:
1474 self.GET("/uri/%s" % urllib.quote(self._mutable_uri)))
1475 d.addCallback(lambda res:
1476 self.failUnlessEqual(res, NEW2_CONTENTS))
1478 # and that HEAD computes the size correctly
1479 d.addCallback(lambda res:
1480 self.HEAD(self.public_url + "/foo/new.txt",
1481 return_response=True))
1482 def _got_headers((res, status, headers)):
1483 self.failUnlessEqual(res, "")
1484 self.failUnlessEqual(headers["content-length"][0],
1485 str(len(NEW2_CONTENTS)))
1486 self.failUnlessEqual(headers["content-type"], ["text/plain"])
1487 d.addCallback(_got_headers)
1489 # make sure that size errors are displayed correctly for overwrite
1490 d.addCallback(lambda res:
1491 self.shouldFail2(error.Error,
1492 "test_POST_upload_mutable-toobig",
1493 "413 Request Entity Too Large",
1494 "SDMF is limited to one segment, and 10001 > 10000",
1496 self.public_url + "/foo", t="upload",
1499 "b" * (self.s.MUTABLE_SIZELIMIT+1)),
1502 d.addErrback(self.dump_error)
1505 def test_POST_upload_mutable_toobig(self):
1506 d = self.shouldFail2(error.Error,
1507 "test_POST_upload_no_link_mutable_toobig",
1508 "413 Request Entity Too Large",
1509 "SDMF is limited to one segment, and 10001 > 10000",
1511 self.public_url + "/foo",
1512 t="upload", mutable="true",
1514 "b" * (self.s.MUTABLE_SIZELIMIT+1)) )
1517 def dump_error(self, f):
1518 # if the web server returns an error code (like 400 Bad Request),
1519 # web.client.getPage puts the HTTP response body into the .response
1520 # attribute of the exception object that it gives back. It does not
1521 # appear in the Failure's repr(), so the ERROR that trial displays
1522 # will be rather terse and unhelpful. addErrback this method to the
1523 # end of your chain to get more information out of these errors.
1524 if f.check(error.Error):
1525 print "web.error.Error:"
1527 print f.value.response
1530 def test_POST_upload_replace(self):
1531 d = self.POST(self.public_url + "/foo", t="upload",
1532 file=("bar.txt", self.NEWFILE_CONTENTS))
1534 d.addCallback(self.failUnlessURIMatchesChild, fn, u"bar.txt")
1535 d.addCallback(lambda res:
1536 self.failUnlessChildContentsAre(fn, u"bar.txt",
1537 self.NEWFILE_CONTENTS))
1540 def test_POST_upload_no_replace_ok(self):
1541 d = self.POST(self.public_url + "/foo?replace=false", t="upload",
1542 file=("new.txt", self.NEWFILE_CONTENTS))
1543 d.addCallback(lambda res: self.GET(self.public_url + "/foo/new.txt"))
1544 d.addCallback(lambda res: self.failUnlessEqual(res,
1545 self.NEWFILE_CONTENTS))
1548 def test_POST_upload_no_replace_queryarg(self):
1549 d = self.POST(self.public_url + "/foo?replace=false", t="upload",
1550 file=("bar.txt", self.NEWFILE_CONTENTS))
1551 d.addBoth(self.shouldFail, error.Error,
1552 "POST_upload_no_replace_queryarg",
1554 "There was already a child by that name, and you asked me "
1555 "to not replace it")
1556 d.addCallback(lambda res: self.GET(self.public_url + "/foo/bar.txt"))
1557 d.addCallback(self.failUnlessIsBarDotTxt)
1560 def test_POST_upload_no_replace_field(self):
1561 d = self.POST(self.public_url + "/foo", t="upload", replace="false",
1562 file=("bar.txt", self.NEWFILE_CONTENTS))
1563 d.addBoth(self.shouldFail, error.Error, "POST_upload_no_replace_field",
1565 "There was already a child by that name, and you asked me "
1566 "to not replace it")
1567 d.addCallback(lambda res: self.GET(self.public_url + "/foo/bar.txt"))
1568 d.addCallback(self.failUnlessIsBarDotTxt)
1571 def test_POST_upload_whendone(self):
1572 d = self.POST(self.public_url + "/foo", t="upload", when_done="/THERE",
1573 file=("new.txt", self.NEWFILE_CONTENTS))
1574 d.addBoth(self.shouldRedirect, "/THERE")
1576 d.addCallback(lambda res:
1577 self.failUnlessChildContentsAre(fn, u"new.txt",
1578 self.NEWFILE_CONTENTS))
1581 def test_POST_upload_named(self):
1583 d = self.POST(self.public_url + "/foo", t="upload",
1584 name="new.txt", file=self.NEWFILE_CONTENTS)
1585 d.addCallback(self.failUnlessURIMatchesChild, fn, u"new.txt")
1586 d.addCallback(lambda res:
1587 self.failUnlessChildContentsAre(fn, u"new.txt",
1588 self.NEWFILE_CONTENTS))
1591 def test_POST_upload_named_badfilename(self):
1592 d = self.POST(self.public_url + "/foo", t="upload",
1593 name="slashes/are/bad.txt", file=self.NEWFILE_CONTENTS)
1594 d.addBoth(self.shouldFail, error.Error,
1595 "test_POST_upload_named_badfilename",
1597 "name= may not contain a slash",
1599 # make sure that nothing was added
1600 d.addCallback(lambda res:
1601 self.failUnlessNodeKeysAre(self._foo_node,
1602 [u"bar.txt", u"blockingfile",
1603 u"empty", u"n\u00fc.txt",
1607 def test_POST_FILEURL_check(self):
1608 bar_url = self.public_url + "/foo/bar.txt"
1609 d = self.POST(bar_url, t="check")
1611 self.failUnless("Healthy :" in res)
1612 d.addCallback(_check)
1613 redir_url = "http://allmydata.org/TARGET"
1614 def _check2(statuscode, target):
1615 self.failUnlessEqual(statuscode, str(http.FOUND))
1616 self.failUnlessEqual(target, redir_url)
1617 d.addCallback(lambda res:
1618 self.shouldRedirect2("test_POST_FILEURL_check",
1622 when_done=redir_url))
1623 d.addCallback(lambda res:
1624 self.POST(bar_url, t="check", return_to=redir_url))
1626 self.failUnless("Healthy :" in res)
1627 self.failUnless("Return to file" in res)
1628 self.failUnless(redir_url in res)
1629 d.addCallback(_check3)
1631 d.addCallback(lambda res:
1632 self.POST(bar_url, t="check", output="JSON"))
1633 def _check_json(res):
1634 data = simplejson.loads(res)
1635 self.failUnless("storage-index" in data)
1636 self.failUnless(data["results"]["healthy"])
1637 d.addCallback(_check_json)
1641 def test_POST_FILEURL_check_and_repair(self):
1642 bar_url = self.public_url + "/foo/bar.txt"
1643 d = self.POST(bar_url, t="check", repair="true")
1645 self.failUnless("Healthy :" in res)
1646 d.addCallback(_check)
1647 redir_url = "http://allmydata.org/TARGET"
1648 def _check2(statuscode, target):
1649 self.failUnlessEqual(statuscode, str(http.FOUND))
1650 self.failUnlessEqual(target, redir_url)
1651 d.addCallback(lambda res:
1652 self.shouldRedirect2("test_POST_FILEURL_check_and_repair",
1655 t="check", repair="true",
1656 when_done=redir_url))
1657 d.addCallback(lambda res:
1658 self.POST(bar_url, t="check", return_to=redir_url))
1660 self.failUnless("Healthy :" in res)
1661 self.failUnless("Return to file" in res)
1662 self.failUnless(redir_url in res)
1663 d.addCallback(_check3)
1666 def test_POST_DIRURL_check(self):
1667 foo_url = self.public_url + "/foo/"
1668 d = self.POST(foo_url, t="check")
1670 self.failUnless("Healthy :" in res, res)
1671 d.addCallback(_check)
1672 redir_url = "http://allmydata.org/TARGET"
1673 def _check2(statuscode, target):
1674 self.failUnlessEqual(statuscode, str(http.FOUND))
1675 self.failUnlessEqual(target, redir_url)
1676 d.addCallback(lambda res:
1677 self.shouldRedirect2("test_POST_DIRURL_check",
1681 when_done=redir_url))
1682 d.addCallback(lambda res:
1683 self.POST(foo_url, t="check", return_to=redir_url))
1685 self.failUnless("Healthy :" in res, res)
1686 self.failUnless("Return to file/directory" in res)
1687 self.failUnless(redir_url in res)
1688 d.addCallback(_check3)
1690 d.addCallback(lambda res:
1691 self.POST(foo_url, t="check", output="JSON"))
1692 def _check_json(res):
1693 data = simplejson.loads(res)
1694 self.failUnless("storage-index" in data)
1695 self.failUnless(data["results"]["healthy"])
1696 d.addCallback(_check_json)
1700 def test_POST_DIRURL_check_and_repair(self):
1701 foo_url = self.public_url + "/foo/"
1702 d = self.POST(foo_url, t="check", repair="true")
1704 self.failUnless("Healthy :" in res, res)
1705 d.addCallback(_check)
1706 redir_url = "http://allmydata.org/TARGET"
1707 def _check2(statuscode, target):
1708 self.failUnlessEqual(statuscode, str(http.FOUND))
1709 self.failUnlessEqual(target, redir_url)
1710 d.addCallback(lambda res:
1711 self.shouldRedirect2("test_POST_DIRURL_check_and_repair",
1714 t="check", repair="true",
1715 when_done=redir_url))
1716 d.addCallback(lambda res:
1717 self.POST(foo_url, t="check", return_to=redir_url))
1719 self.failUnless("Healthy :" in res)
1720 self.failUnless("Return to file/directory" in res)
1721 self.failUnless(redir_url in res)
1722 d.addCallback(_check3)
1725 def wait_for_operation(self, ignored, ophandle):
1726 url = "/operations/" + ophandle
1727 url += "?t=status&output=JSON"
1730 data = simplejson.loads(res)
1731 if not data["finished"]:
1732 d = self.stall(delay=1.0)
1733 d.addCallback(self.wait_for_operation, ophandle)
1739 def get_operation_results(self, ignored, ophandle, output=None):
1740 url = "/operations/" + ophandle
1743 url += "&output=" + output
1746 if output and output.lower() == "json":
1747 return simplejson.loads(res)
1752 def test_POST_DIRURL_deepcheck_no_ophandle(self):
1753 d = self.shouldFail2(error.Error,
1754 "test_POST_DIRURL_deepcheck_no_ophandle",
1756 "slow operation requires ophandle=",
1757 self.POST, self.public_url, t="start-deep-check")
1760 def test_POST_DIRURL_deepcheck(self):
1761 def _check_redirect(statuscode, target):
1762 self.failUnlessEqual(statuscode, str(http.FOUND))
1763 self.failUnless(target.endswith("/operations/123"))
1764 d = self.shouldRedirect2("test_POST_DIRURL_deepcheck", _check_redirect,
1765 self.POST, self.public_url,
1766 t="start-deep-check", ophandle="123")
1767 d.addCallback(self.wait_for_operation, "123")
1768 def _check_json(data):
1769 self.failUnlessEqual(data["finished"], True)
1770 self.failUnlessEqual(data["count-objects-checked"], 8)
1771 self.failUnlessEqual(data["count-objects-healthy"], 8)
1772 d.addCallback(_check_json)
1773 d.addCallback(self.get_operation_results, "123", "html")
1774 def _check_html(res):
1775 self.failUnless("Objects Checked: <span>8</span>" in res)
1776 self.failUnless("Objects Healthy: <span>8</span>" in res)
1777 d.addCallback(_check_html)
1779 d.addCallback(lambda res:
1780 self.GET("/operations/123/"))
1781 d.addCallback(_check_html) # should be the same as without the slash
1783 d.addCallback(lambda res:
1784 self.shouldFail2(error.Error, "one", "404 Not Found",
1785 "No detailed results for SI bogus",
1786 self.GET, "/operations/123/bogus"))
1788 foo_si = self._foo_node.get_storage_index()
1789 foo_si_s = base32.b2a(foo_si)
1790 d.addCallback(lambda res:
1791 self.GET("/operations/123/%s?output=JSON" % foo_si_s))
1792 def _check_foo_json(res):
1793 data = simplejson.loads(res)
1794 self.failUnlessEqual(data["storage-index"], foo_si_s)
1795 self.failUnless(data["results"]["healthy"])
1796 d.addCallback(_check_foo_json)
1799 def test_POST_DIRURL_deepcheck_and_repair(self):
1800 d = self.POST(self.public_url, t="start-deep-check", repair="true",
1801 ophandle="124", output="json", followRedirect=True)
1802 d.addCallback(self.wait_for_operation, "124")
1803 def _check_json(data):
1804 self.failUnlessEqual(data["finished"], True)
1805 self.failUnlessEqual(data["count-objects-checked"], 8)
1806 self.failUnlessEqual(data["count-objects-healthy-pre-repair"], 8)
1807 self.failUnlessEqual(data["count-objects-unhealthy-pre-repair"], 0)
1808 self.failUnlessEqual(data["count-corrupt-shares-pre-repair"], 0)
1809 self.failUnlessEqual(data["count-repairs-attempted"], 0)
1810 self.failUnlessEqual(data["count-repairs-successful"], 0)
1811 self.failUnlessEqual(data["count-repairs-unsuccessful"], 0)
1812 self.failUnlessEqual(data["count-objects-healthy-post-repair"], 8)
1813 self.failUnlessEqual(data["count-objects-unhealthy-post-repair"], 0)
1814 self.failUnlessEqual(data["count-corrupt-shares-post-repair"], 0)
1815 d.addCallback(_check_json)
1816 d.addCallback(self.get_operation_results, "124", "html")
1817 def _check_html(res):
1818 self.failUnless("Objects Checked: <span>8</span>" in res)
1820 self.failUnless("Objects Healthy (before repair): <span>8</span>" in res)
1821 self.failUnless("Objects Unhealthy (before repair): <span>0</span>" in res)
1822 self.failUnless("Corrupt Shares (before repair): <span>0</span>" in res)
1824 self.failUnless("Repairs Attempted: <span>0</span>" in res)
1825 self.failUnless("Repairs Successful: <span>0</span>" in res)
1826 self.failUnless("Repairs Unsuccessful: <span>0</span>" in res)
1828 self.failUnless("Objects Healthy (after repair): <span>8</span>" in res)
1829 self.failUnless("Objects Unhealthy (after repair): <span>0</span>" in res)
1830 self.failUnless("Corrupt Shares (after repair): <span>0</span>" in res)
1831 d.addCallback(_check_html)
1834 def test_POST_FILEURL_bad_t(self):
1835 d = self.shouldFail2(error.Error, "POST_bad_t", "400 Bad Request",
1836 "POST to file: bad t=bogus",
1837 self.POST, self.public_url + "/foo/bar.txt",
1841 def test_POST_mkdir(self): # return value?
1842 d = self.POST(self.public_url + "/foo", t="mkdir", name="newdir")
1843 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
1844 d.addCallback(self.failUnlessNodeKeysAre, [])
1847 def test_POST_mkdir_2(self):
1848 d = self.POST(self.public_url + "/foo/newdir?t=mkdir", "")
1849 d.addCallback(lambda res:
1850 self.failUnlessNodeHasChild(self._foo_node, u"newdir"))
1851 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
1852 d.addCallback(self.failUnlessNodeKeysAre, [])
1855 def test_POST_mkdirs_2(self):
1856 d = self.POST(self.public_url + "/foo/bardir/newdir?t=mkdir", "")
1857 d.addCallback(lambda res:
1858 self.failUnlessNodeHasChild(self._foo_node, u"bardir"))
1859 d.addCallback(lambda res: self._foo_node.get(u"bardir"))
1860 d.addCallback(lambda bardirnode: bardirnode.get(u"newdir"))
1861 d.addCallback(self.failUnlessNodeKeysAre, [])
1864 def test_POST_mkdir_no_parentdir_noredirect(self):
1865 d = self.POST("/uri?t=mkdir")
1866 def _after_mkdir(res):
1867 uri.NewDirectoryURI.init_from_string(res)
1868 d.addCallback(_after_mkdir)
1871 def test_POST_mkdir_no_parentdir_redirect(self):
1872 d = self.POST("/uri?t=mkdir&redirect_to_result=true")
1873 d.addBoth(self.shouldRedirect, None, statuscode='303')
1874 def _check_target(target):
1875 target = urllib.unquote(target)
1876 self.failUnless(target.startswith("uri/URI:DIR2:"), target)
1877 d.addCallback(_check_target)
1880 def test_POST_noparent_bad(self):
1881 d = self.shouldHTTPError("POST /uri?t=bogus", 400, "Bad Request",
1882 "/uri accepts only PUT, PUT?t=mkdir, "
1883 "POST?t=upload, and POST?t=mkdir",
1884 self.POST, "/uri?t=bogus")
1887 def test_welcome_page_mkdir_button(self):
1888 # Fetch the welcome page.
1890 def _after_get_welcome_page(res):
1891 MKDIR_BUTTON_RE=re.compile('<form action="([^"]*)" method="post".*?<input type="hidden" name="t" value="([^"]*)" /><input type="hidden" name="([^"]*)" value="([^"]*)" /><input type="submit" value="Create a directory" />', re.I)
1892 mo = MKDIR_BUTTON_RE.search(res)
1893 formaction = mo.group(1)
1895 formaname = mo.group(3)
1896 formavalue = mo.group(4)
1897 return (formaction, formt, formaname, formavalue)
1898 d.addCallback(_after_get_welcome_page)
1899 def _after_parse_form(res):
1900 (formaction, formt, formaname, formavalue) = res
1901 return self.POST("/%s?t=%s&%s=%s" % (formaction, formt, formaname, formavalue))
1902 d.addCallback(_after_parse_form)
1903 d.addBoth(self.shouldRedirect, None, statuscode='303')
1906 def test_POST_mkdir_replace(self): # return value?
1907 d = self.POST(self.public_url + "/foo", t="mkdir", name="sub")
1908 d.addCallback(lambda res: self._foo_node.get(u"sub"))
1909 d.addCallback(self.failUnlessNodeKeysAre, [])
1912 def test_POST_mkdir_no_replace_queryarg(self): # return value?
1913 d = self.POST(self.public_url + "/foo?replace=false", t="mkdir", name="sub")
1914 d.addBoth(self.shouldFail, error.Error,
1915 "POST_mkdir_no_replace_queryarg",
1917 "There was already a child by that name, and you asked me "
1918 "to not replace it")
1919 d.addCallback(lambda res: self._foo_node.get(u"sub"))
1920 d.addCallback(self.failUnlessNodeKeysAre, [u"baz.txt"])
1923 def test_POST_mkdir_no_replace_field(self): # return value?
1924 d = self.POST(self.public_url + "/foo", t="mkdir", name="sub",
1926 d.addBoth(self.shouldFail, error.Error, "POST_mkdir_no_replace_field",
1928 "There was already a child by that name, and you asked me "
1929 "to not replace it")
1930 d.addCallback(lambda res: self._foo_node.get(u"sub"))
1931 d.addCallback(self.failUnlessNodeKeysAre, [u"baz.txt"])
1934 def test_POST_mkdir_whendone_field(self):
1935 d = self.POST(self.public_url + "/foo",
1936 t="mkdir", name="newdir", when_done="/THERE")
1937 d.addBoth(self.shouldRedirect, "/THERE")
1938 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
1939 d.addCallback(self.failUnlessNodeKeysAre, [])
1942 def test_POST_mkdir_whendone_queryarg(self):
1943 d = self.POST(self.public_url + "/foo?when_done=/THERE",
1944 t="mkdir", name="newdir")
1945 d.addBoth(self.shouldRedirect, "/THERE")
1946 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
1947 d.addCallback(self.failUnlessNodeKeysAre, [])
1950 def test_POST_bad_t(self):
1951 d = self.shouldFail2(error.Error, "POST_bad_t", "400 Bad Request",
1952 "POST to a directory with bad t=BOGUS",
1953 self.POST, self.public_url + "/foo", t="BOGUS")
1956 def test_POST_set_children(self):
1957 contents9, n9, newuri9 = self.makefile(9)
1958 contents10, n10, newuri10 = self.makefile(10)
1959 contents11, n11, newuri11 = self.makefile(11)
1962 "atomic_added_1": [ "filenode", { "rw_uri": "%s",
1965 "ctime": 1002777696.7564139,
1966 "mtime": 1002777696.7564139
1969 "atomic_added_2": [ "filenode", { "rw_uri": "%s",
1972 "ctime": 1002777696.7564139,
1973 "mtime": 1002777696.7564139
1976 "atomic_added_3": [ "filenode", { "rw_uri": "%s",
1979 "ctime": 1002777696.7564139,
1980 "mtime": 1002777696.7564139
1983 }""" % (newuri9, newuri10, newuri11)
1985 url = self.webish_url + self.public_url + "/foo" + "?t=set_children"
1987 d = client.getPage(url, method="POST", postdata=reqbody)
1989 self.failUnlessURIMatchesChild(newuri9, self._foo_node, u"atomic_added_1")
1990 self.failUnlessURIMatchesChild(newuri10, self._foo_node, u"atomic_added_2")
1991 self.failUnlessURIMatchesChild(newuri11, self._foo_node, u"atomic_added_3")
1993 d.addCallback(_then)
1994 d.addErrback(self.dump_error)
1997 def test_POST_put_uri(self):
1998 contents, n, newuri = self.makefile(8)
1999 d = self.POST(self.public_url + "/foo", t="uri", name="new.txt", uri=newuri)
2000 d.addCallback(self.failUnlessURIMatchesChild, self._foo_node, u"new.txt")
2001 d.addCallback(lambda res:
2002 self.failUnlessChildContentsAre(self._foo_node, u"new.txt",
2006 def test_POST_put_uri_replace(self):
2007 contents, n, newuri = self.makefile(8)
2008 d = self.POST(self.public_url + "/foo", t="uri", name="bar.txt", uri=newuri)
2009 d.addCallback(self.failUnlessURIMatchesChild, self._foo_node, u"bar.txt")
2010 d.addCallback(lambda res:
2011 self.failUnlessChildContentsAre(self._foo_node, u"bar.txt",
2015 def test_POST_put_uri_no_replace_queryarg(self):
2016 contents, n, newuri = self.makefile(8)
2017 d = self.POST(self.public_url + "/foo?replace=false", t="uri",
2018 name="bar.txt", uri=newuri)
2019 d.addBoth(self.shouldFail, error.Error,
2020 "POST_put_uri_no_replace_queryarg",
2022 "There was already a child by that name, and you asked me "
2023 "to not replace it")
2024 d.addCallback(lambda res: self.GET(self.public_url + "/foo/bar.txt"))
2025 d.addCallback(self.failUnlessIsBarDotTxt)
2028 def test_POST_put_uri_no_replace_field(self):
2029 contents, n, newuri = self.makefile(8)
2030 d = self.POST(self.public_url + "/foo", t="uri", replace="false",
2031 name="bar.txt", uri=newuri)
2032 d.addBoth(self.shouldFail, error.Error,
2033 "POST_put_uri_no_replace_field",
2035 "There was already a child by that name, and you asked me "
2036 "to not replace it")
2037 d.addCallback(lambda res: self.GET(self.public_url + "/foo/bar.txt"))
2038 d.addCallback(self.failUnlessIsBarDotTxt)
2041 def test_POST_delete(self):
2042 d = self.POST(self.public_url + "/foo", t="delete", name="bar.txt")
2043 d.addCallback(lambda res: self._foo_node.list())
2044 def _check(children):
2045 self.failIf(u"bar.txt" in children)
2046 d.addCallback(_check)
2049 def test_POST_rename_file(self):
2050 d = self.POST(self.public_url + "/foo", t="rename",
2051 from_name="bar.txt", to_name='wibble.txt')
2052 d.addCallback(lambda res:
2053 self.failIfNodeHasChild(self._foo_node, u"bar.txt"))
2054 d.addCallback(lambda res:
2055 self.failUnlessNodeHasChild(self._foo_node, u"wibble.txt"))
2056 d.addCallback(lambda res: self.GET(self.public_url + "/foo/wibble.txt"))
2057 d.addCallback(self.failUnlessIsBarDotTxt)
2058 d.addCallback(lambda res: self.GET(self.public_url + "/foo/wibble.txt?t=json"))
2059 d.addCallback(self.failUnlessIsBarJSON)
2062 def test_POST_rename_file_redundant(self):
2063 d = self.POST(self.public_url + "/foo", t="rename",
2064 from_name="bar.txt", to_name='bar.txt')
2065 d.addCallback(lambda res:
2066 self.failUnlessNodeHasChild(self._foo_node, u"bar.txt"))
2067 d.addCallback(lambda res: self.GET(self.public_url + "/foo/bar.txt"))
2068 d.addCallback(self.failUnlessIsBarDotTxt)
2069 d.addCallback(lambda res: self.GET(self.public_url + "/foo/bar.txt?t=json"))
2070 d.addCallback(self.failUnlessIsBarJSON)
2073 def test_POST_rename_file_replace(self):
2074 # rename a file and replace a directory with it
2075 d = self.POST(self.public_url + "/foo", t="rename",
2076 from_name="bar.txt", to_name='empty')
2077 d.addCallback(lambda res:
2078 self.failIfNodeHasChild(self._foo_node, u"bar.txt"))
2079 d.addCallback(lambda res:
2080 self.failUnlessNodeHasChild(self._foo_node, u"empty"))
2081 d.addCallback(lambda res: self.GET(self.public_url + "/foo/empty"))
2082 d.addCallback(self.failUnlessIsBarDotTxt)
2083 d.addCallback(lambda res: self.GET(self.public_url + "/foo/empty?t=json"))
2084 d.addCallback(self.failUnlessIsBarJSON)
2087 def test_POST_rename_file_no_replace_queryarg(self):
2088 # rename a file and replace a directory with it
2089 d = self.POST(self.public_url + "/foo?replace=false", t="rename",
2090 from_name="bar.txt", to_name='empty')
2091 d.addBoth(self.shouldFail, error.Error,
2092 "POST_rename_file_no_replace_queryarg",
2094 "There was already a child by that name, and you asked me "
2095 "to not replace it")
2096 d.addCallback(lambda res: self.GET(self.public_url + "/foo/empty?t=json"))
2097 d.addCallback(self.failUnlessIsEmptyJSON)
2100 def test_POST_rename_file_no_replace_field(self):
2101 # rename a file and replace a directory with it
2102 d = self.POST(self.public_url + "/foo", t="rename", replace="false",
2103 from_name="bar.txt", to_name='empty')
2104 d.addBoth(self.shouldFail, error.Error,
2105 "POST_rename_file_no_replace_field",
2107 "There was already a child by that name, and you asked me "
2108 "to not replace it")
2109 d.addCallback(lambda res: self.GET(self.public_url + "/foo/empty?t=json"))
2110 d.addCallback(self.failUnlessIsEmptyJSON)
2113 def failUnlessIsEmptyJSON(self, res):
2114 data = simplejson.loads(res)
2115 self.failUnlessEqual(data[0], "dirnode", data)
2116 self.failUnlessEqual(len(data[1]["children"]), 0)
2118 def test_POST_rename_file_slash_fail(self):
2119 d = self.POST(self.public_url + "/foo", t="rename",
2120 from_name="bar.txt", to_name='kirk/spock.txt')
2121 d.addBoth(self.shouldFail, error.Error,
2122 "test_POST_rename_file_slash_fail",
2124 "to_name= may not contain a slash",
2126 d.addCallback(lambda res:
2127 self.failUnlessNodeHasChild(self._foo_node, u"bar.txt"))
2130 def test_POST_rename_dir(self):
2131 d = self.POST(self.public_url, t="rename",
2132 from_name="foo", to_name='plunk')
2133 d.addCallback(lambda res:
2134 self.failIfNodeHasChild(self.public_root, u"foo"))
2135 d.addCallback(lambda res:
2136 self.failUnlessNodeHasChild(self.public_root, u"plunk"))
2137 d.addCallback(lambda res: self.GET(self.public_url + "/plunk?t=json"))
2138 d.addCallback(self.failUnlessIsFooJSON)
2141 def shouldRedirect(self, res, target=None, statuscode=None, which=""):
2142 """ If target is not None then the redirection has to go to target. If
2143 statuscode is not None then the redirection has to be accomplished with
2144 that HTTP status code."""
2145 if not isinstance(res, failure.Failure):
2146 to_where = (target is None) and "somewhere" or ("to " + target)
2147 self.fail("%s: we were expecting to get redirected %s, not get an"
2148 " actual page: %s" % (which, to_where, res))
2149 res.trap(error.PageRedirect)
2150 if statuscode is not None:
2151 self.failUnlessEqual(res.value.status, statuscode,
2152 "%s: not a redirect" % which)
2153 if target is not None:
2154 # the PageRedirect does not seem to capture the uri= query arg
2155 # properly, so we can't check for it.
2156 realtarget = self.webish_url + target
2157 self.failUnlessEqual(res.value.location, realtarget,
2158 "%s: wrong target" % which)
2159 return res.value.location
2161 def test_GET_URI_form(self):
2162 base = "/uri?uri=%s" % self._bar_txt_uri
2163 # this is supposed to give us a redirect to /uri/$URI, plus arguments
2164 targetbase = "/uri/%s" % urllib.quote(self._bar_txt_uri)
2166 d.addBoth(self.shouldRedirect, targetbase)
2167 d.addCallback(lambda res: self.GET(base+"&filename=bar.txt"))
2168 d.addBoth(self.shouldRedirect, targetbase+"?filename=bar.txt")
2169 d.addCallback(lambda res: self.GET(base+"&t=json"))
2170 d.addBoth(self.shouldRedirect, targetbase+"?t=json")
2171 d.addCallback(self.log, "about to get file by uri")
2172 d.addCallback(lambda res: self.GET(base, followRedirect=True))
2173 d.addCallback(self.failUnlessIsBarDotTxt)
2174 d.addCallback(self.log, "got file by uri, about to get dir by uri")
2175 d.addCallback(lambda res: self.GET("/uri?uri=%s&t=json" % self._foo_uri,
2176 followRedirect=True))
2177 d.addCallback(self.failUnlessIsFooJSON)
2178 d.addCallback(self.log, "got dir by uri")
2182 def test_GET_URI_form_bad(self):
2183 d = self.shouldFail2(error.Error, "test_GET_URI_form_bad",
2184 "400 Bad Request", "GET /uri requires uri=",
2188 def test_GET_rename_form(self):
2189 d = self.GET(self.public_url + "/foo?t=rename-form&name=bar.txt",
2190 followRedirect=True)
2192 self.failUnless('name="when_done" value="."' in res, res)
2193 self.failUnless(re.search(r'name="from_name" value="bar\.txt"', res))
2194 d.addCallback(_check)
2197 def log(self, res, msg):
2198 #print "MSG: %s RES: %s" % (msg, res)
2202 def test_GET_URI_URL(self):
2203 base = "/uri/%s" % self._bar_txt_uri
2205 d.addCallback(self.failUnlessIsBarDotTxt)
2206 d.addCallback(lambda res: self.GET(base+"?filename=bar.txt"))
2207 d.addCallback(self.failUnlessIsBarDotTxt)
2208 d.addCallback(lambda res: self.GET(base+"?filename=bar.txt&save=true"))
2209 d.addCallback(self.failUnlessIsBarDotTxt)
2212 def test_GET_URI_URL_dir(self):
2213 base = "/uri/%s?t=json" % self._foo_uri
2215 d.addCallback(self.failUnlessIsFooJSON)
2218 def test_GET_URI_URL_missing(self):
2219 base = "/uri/%s" % self._bad_file_uri
2220 d = self.shouldHTTPError("test_GET_URI_URL_missing",
2221 http.GONE, None, "NotEnoughSharesError",
2223 # TODO: how can we exercise both sides of WebDownloadTarget.fail
2224 # here? we must arrange for a download to fail after target.open()
2225 # has been called, and then inspect the response to see that it is
2226 # shorter than we expected.
2229 def test_PUT_DIRURL_uri(self):
2230 d = self.s.create_empty_dirnode()
2232 new_uri = dn.get_uri()
2233 # replace /foo with a new (empty) directory
2234 d = self.PUT(self.public_url + "/foo?t=uri", new_uri)
2235 d.addCallback(lambda res:
2236 self.failUnlessEqual(res.strip(), new_uri))
2237 d.addCallback(lambda res:
2238 self.failUnlessChildURIIs(self.public_root,
2242 d.addCallback(_made_dir)
2245 def test_PUT_DIRURL_uri_noreplace(self):
2246 d = self.s.create_empty_dirnode()
2248 new_uri = dn.get_uri()
2249 # replace /foo with a new (empty) directory, but ask that
2250 # replace=false, so it should fail
2251 d = self.shouldFail2(error.Error, "test_PUT_DIRURL_uri_noreplace",
2252 "409 Conflict", "There was already a child by that name, and you asked me to not replace it",
2254 self.public_url + "/foo?t=uri&replace=false",
2256 d.addCallback(lambda res:
2257 self.failUnlessChildURIIs(self.public_root,
2261 d.addCallback(_made_dir)
2264 def test_PUT_DIRURL_bad_t(self):
2265 d = self.shouldFail2(error.Error, "test_PUT_DIRURL_bad_t",
2266 "400 Bad Request", "PUT to a directory",
2267 self.PUT, self.public_url + "/foo?t=BOGUS", "")
2268 d.addCallback(lambda res:
2269 self.failUnlessChildURIIs(self.public_root,
2274 def test_PUT_NEWFILEURL_uri(self):
2275 contents, n, new_uri = self.makefile(8)
2276 d = self.PUT(self.public_url + "/foo/new.txt?t=uri", new_uri)
2277 d.addCallback(lambda res: self.failUnlessEqual(res.strip(), new_uri))
2278 d.addCallback(lambda res:
2279 self.failUnlessChildContentsAre(self._foo_node, u"new.txt",
2283 def test_PUT_NEWFILEURL_uri_replace(self):
2284 contents, n, new_uri = self.makefile(8)
2285 d = self.PUT(self.public_url + "/foo/bar.txt?t=uri", new_uri)
2286 d.addCallback(lambda res: self.failUnlessEqual(res.strip(), new_uri))
2287 d.addCallback(lambda res:
2288 self.failUnlessChildContentsAre(self._foo_node, u"bar.txt",
2292 def test_PUT_NEWFILEURL_uri_no_replace(self):
2293 contents, n, new_uri = self.makefile(8)
2294 d = self.PUT(self.public_url + "/foo/bar.txt?t=uri&replace=false", new_uri)
2295 d.addBoth(self.shouldFail, error.Error, "PUT_NEWFILEURL_uri_no_replace",
2297 "There was already a child by that name, and you asked me "
2298 "to not replace it")
2301 def test_PUT_NEWFILE_URI(self):
2302 file_contents = "New file contents here\n"
2303 d = self.PUT("/uri", file_contents)
2305 assert isinstance(uri, str), uri
2306 self.failUnless(uri in FakeCHKFileNode.all_contents)
2307 self.failUnlessEqual(FakeCHKFileNode.all_contents[uri],
2309 return self.GET("/uri/%s" % uri)
2310 d.addCallback(_check)
2312 self.failUnlessEqual(res, file_contents)
2313 d.addCallback(_check2)
2316 def test_PUT_NEWFILE_URI_not_mutable(self):
2317 file_contents = "New file contents here\n"
2318 d = self.PUT("/uri?mutable=false", file_contents)
2320 assert isinstance(uri, str), uri
2321 self.failUnless(uri in FakeCHKFileNode.all_contents)
2322 self.failUnlessEqual(FakeCHKFileNode.all_contents[uri],
2324 return self.GET("/uri/%s" % uri)
2325 d.addCallback(_check)
2327 self.failUnlessEqual(res, file_contents)
2328 d.addCallback(_check2)
2331 def test_PUT_NEWFILE_URI_only_PUT(self):
2332 d = self.PUT("/uri?t=bogus", "")
2333 d.addBoth(self.shouldFail, error.Error,
2334 "PUT_NEWFILE_URI_only_PUT",
2336 "/uri accepts only PUT, PUT?t=mkdir, POST?t=upload, and POST?t=mkdir")
2339 def test_PUT_NEWFILE_URI_mutable(self):
2340 file_contents = "New file contents here\n"
2341 d = self.PUT("/uri?mutable=true", file_contents)
2342 def _check_mutable(uri):
2345 self.failUnless(IMutableFileURI.providedBy(u))
2346 self.failUnless(u.storage_index in FakeMutableFileNode.all_contents)
2347 n = self.s.create_node_from_uri(uri)
2348 return n.download_best_version()
2349 d.addCallback(_check_mutable)
2350 def _check2_mutable(data):
2351 self.failUnlessEqual(data, file_contents)
2352 d.addCallback(_check2_mutable)
2356 self.failUnless(uri.to_string() in FakeCHKFileNode.all_contents)
2357 self.failUnlessEqual(FakeCHKFileNode.all_contents[uri.to_string()],
2359 return self.GET("/uri/%s" % uri)
2360 d.addCallback(_check)
2362 self.failUnlessEqual(res, file_contents)
2363 d.addCallback(_check2)
2366 def test_PUT_mkdir(self):
2367 d = self.PUT("/uri?t=mkdir", "")
2369 n = self.s.create_node_from_uri(uri.strip())
2370 d2 = self.failUnlessNodeKeysAre(n, [])
2371 d2.addCallback(lambda res:
2372 self.GET("/uri/%s?t=json" % uri))
2374 d.addCallback(_check)
2375 d.addCallback(self.failUnlessIsEmptyJSON)
2378 def test_POST_check(self):
2379 d = self.POST(self.public_url + "/foo", t="check", name="bar.txt")
2381 # this returns a string form of the results, which are probably
2382 # None since we're using fake filenodes.
2383 # TODO: verify that the check actually happened, by changing
2384 # FakeCHKFileNode to count how many times .check() has been
2387 d.addCallback(_done)
2390 def test_bad_method(self):
2391 url = self.webish_url + self.public_url + "/foo/bar.txt"
2392 d = self.shouldHTTPError("test_bad_method",
2393 501, "Not Implemented",
2394 "I don't know how to treat a BOGUS request.",
2395 client.getPage, url, method="BOGUS")
2398 def test_short_url(self):
2399 url = self.webish_url + "/uri"
2400 d = self.shouldHTTPError("test_short_url", 501, "Not Implemented",
2401 "I don't know how to treat a DELETE request.",
2402 client.getPage, url, method="DELETE")
2405 def test_ophandle_bad(self):
2406 url = self.webish_url + "/operations/bogus?t=status"
2407 d = self.shouldHTTPError("test_ophandle_bad", 404, "404 Not Found",
2408 "unknown/expired handle 'bogus'",
2409 client.getPage, url)
2412 def test_ophandle_cancel(self):
2413 d = self.POST(self.public_url + "/foo/?t=start-manifest&ophandle=128",
2414 followRedirect=True)
2415 d.addCallback(lambda ignored:
2416 self.GET("/operations/128?t=status&output=JSON"))
2418 data = simplejson.loads(res)
2419 self.failUnless("finished" in data, res)
2420 monitor = self.ws.root.child_operations.handles["128"][0]
2421 d = self.POST("/operations/128?t=cancel&output=JSON")
2423 data = simplejson.loads(res)
2424 self.failUnless("finished" in data, res)
2425 # t=cancel causes the handle to be forgotten
2426 self.failUnless(monitor.is_cancelled())
2427 d.addCallback(_check2)
2429 d.addCallback(_check1)
2430 d.addCallback(lambda ignored:
2431 self.shouldHTTPError("test_ophandle_cancel",
2432 404, "404 Not Found",
2433 "unknown/expired handle '128'",
2435 "/operations/128?t=status&output=JSON"))
2438 def test_ophandle_retainfor(self):
2439 d = self.POST(self.public_url + "/foo/?t=start-manifest&ophandle=129&retain-for=60",
2440 followRedirect=True)
2441 d.addCallback(lambda ignored:
2442 self.GET("/operations/129?t=status&output=JSON&retain-for=0"))
2444 data = simplejson.loads(res)
2445 self.failUnless("finished" in data, res)
2446 d.addCallback(_check1)
2447 # the retain-for=0 will cause the handle to be expired very soon
2448 d.addCallback(self.stall, 2.0)
2449 d.addCallback(lambda ignored:
2450 self.shouldHTTPError("test_ophandle_retainfor",
2451 404, "404 Not Found",
2452 "unknown/expired handle '129'",
2454 "/operations/129?t=status&output=JSON"))
2457 def test_ophandle_release_after_complete(self):
2458 d = self.POST(self.public_url + "/foo/?t=start-manifest&ophandle=130",
2459 followRedirect=True)
2460 d.addCallback(self.wait_for_operation, "130")
2461 d.addCallback(lambda ignored:
2462 self.GET("/operations/130?t=status&output=JSON&release-after-complete=true"))
2463 # the release-after-complete=true will cause the handle to be expired
2464 d.addCallback(lambda ignored:
2465 self.shouldHTTPError("test_ophandle_release_after_complete",
2466 404, "404 Not Found",
2467 "unknown/expired handle '130'",
2469 "/operations/130?t=status&output=JSON"))
2472 def test_incident(self):
2473 d = self.POST("/report_incident", details="eek")
2475 self.failUnless("Thank you for your report!" in res, res)
2476 d.addCallback(_done)
2479 def test_static(self):
2480 webdir = os.path.join(self.staticdir, "subdir")
2481 fileutil.make_dirs(webdir)
2482 f = open(os.path.join(webdir, "hello.txt"), "wb")
2486 d = self.GET("/static/subdir/hello.txt")
2488 self.failUnlessEqual(res, "hello")
2489 d.addCallback(_check)
2493 class Util(unittest.TestCase):
2494 def test_abbreviate_time(self):
2495 self.failUnlessEqual(common.abbreviate_time(None), "")
2496 self.failUnlessEqual(common.abbreviate_time(1.234), "1.23s")
2497 self.failUnlessEqual(common.abbreviate_time(0.123), "123ms")
2498 self.failUnlessEqual(common.abbreviate_time(0.00123), "1.2ms")
2499 self.failUnlessEqual(common.abbreviate_time(0.000123), "123us")
2501 def test_abbreviate_rate(self):
2502 self.failUnlessEqual(common.abbreviate_rate(None), "")
2503 self.failUnlessEqual(common.abbreviate_rate(1234000), "1.23MBps")
2504 self.failUnlessEqual(common.abbreviate_rate(12340), "12.3kBps")
2505 self.failUnlessEqual(common.abbreviate_rate(123), "123Bps")
2507 def test_abbreviate_size(self):
2508 self.failUnlessEqual(common.abbreviate_size(None), "")
2509 self.failUnlessEqual(common.abbreviate_size(1.23*1000*1000*1000), "1.23GB")
2510 self.failUnlessEqual(common.abbreviate_size(1.23*1000*1000), "1.23MB")
2511 self.failUnlessEqual(common.abbreviate_size(1230), "1.2kB")
2512 self.failUnlessEqual(common.abbreviate_size(123), "123B")
2514 def test_plural(self):
2516 return "%d second%s" % (s, status.plural(s))
2517 self.failUnlessEqual(convert(0), "0 seconds")
2518 self.failUnlessEqual(convert(1), "1 second")
2519 self.failUnlessEqual(convert(2), "2 seconds")
2521 return "has share%s: %s" % (status.plural(s), ",".join(s))
2522 self.failUnlessEqual(convert2([]), "has shares: ")
2523 self.failUnlessEqual(convert2(["1"]), "has share: 1")
2524 self.failUnlessEqual(convert2(["1","2"]), "has shares: 1,2")
2527 class Grid(GridTestMixin, WebErrorMixin, unittest.TestCase, ShouldFailMixin):
2529 def CHECK(self, ign, which, args, clientnum=0):
2530 fileurl = self.fileurls[which]
2531 url = fileurl + "?" + args
2532 return self.GET(url, method="POST", clientnum=clientnum)
2534 def test_filecheck(self):
2535 self.basedir = "web/Grid/filecheck"
2537 c0 = self.g.clients[0]
2540 d = c0.upload(upload.Data(DATA, convergence=""))
2541 def _stash_uri(ur, which):
2542 self.uris[which] = ur.uri
2543 d.addCallback(_stash_uri, "good")
2544 d.addCallback(lambda ign:
2545 c0.upload(upload.Data(DATA+"1", convergence="")))
2546 d.addCallback(_stash_uri, "sick")
2547 d.addCallback(lambda ign:
2548 c0.upload(upload.Data(DATA+"2", convergence="")))
2549 d.addCallback(_stash_uri, "dead")
2550 def _stash_mutable_uri(n, which):
2551 self.uris[which] = n.get_uri()
2552 assert isinstance(self.uris[which], str)
2553 d.addCallback(lambda ign: c0.create_mutable_file(DATA+"3"))
2554 d.addCallback(_stash_mutable_uri, "corrupt")
2555 d.addCallback(lambda ign:
2556 c0.upload(upload.Data("literal", convergence="")))
2557 d.addCallback(_stash_uri, "small")
2559 def _compute_fileurls(ignored):
2561 for which in self.uris:
2562 self.fileurls[which] = "uri/" + urllib.quote(self.uris[which])
2563 d.addCallback(_compute_fileurls)
2565 def _clobber_shares(ignored):
2566 good_shares = self.find_shares(self.uris["good"])
2567 self.failUnlessEqual(len(good_shares), 10)
2568 sick_shares = self.find_shares(self.uris["sick"])
2569 os.unlink(sick_shares[0][2])
2570 dead_shares = self.find_shares(self.uris["dead"])
2571 for i in range(1, 10):
2572 os.unlink(dead_shares[i][2])
2573 c_shares = self.find_shares(self.uris["corrupt"])
2574 cso = CorruptShareOptions()
2575 cso.stdout = StringIO()
2576 cso.parseOptions([c_shares[0][2]])
2578 d.addCallback(_clobber_shares)
2580 d.addCallback(self.CHECK, "good", "t=check")
2581 def _got_html_good(res):
2582 self.failUnless("Healthy" in res, res)
2583 self.failIf("Not Healthy" in res, res)
2584 d.addCallback(_got_html_good)
2585 d.addCallback(self.CHECK, "good", "t=check&return_to=somewhere")
2586 def _got_html_good_return_to(res):
2587 self.failUnless("Healthy" in res, res)
2588 self.failIf("Not Healthy" in res, res)
2589 self.failUnless('<a href="somewhere">Return to file'
2591 d.addCallback(_got_html_good_return_to)
2592 d.addCallback(self.CHECK, "good", "t=check&output=json")
2593 def _got_json_good(res):
2594 r = simplejson.loads(res)
2595 self.failUnlessEqual(r["summary"], "Healthy")
2596 self.failUnless(r["results"]["healthy"])
2597 self.failIf(r["results"]["needs-rebalancing"])
2598 self.failUnless(r["results"]["recoverable"])
2599 d.addCallback(_got_json_good)
2601 d.addCallback(self.CHECK, "small", "t=check")
2602 def _got_html_small(res):
2603 self.failUnless("Literal files are always healthy" in res, res)
2604 self.failIf("Not Healthy" in res, res)
2605 d.addCallback(_got_html_small)
2606 d.addCallback(self.CHECK, "small", "t=check&return_to=somewhere")
2607 def _got_html_small_return_to(res):
2608 self.failUnless("Literal files are always healthy" in res, res)
2609 self.failIf("Not Healthy" in res, res)
2610 self.failUnless('<a href="somewhere">Return to file'
2612 d.addCallback(_got_html_small_return_to)
2613 d.addCallback(self.CHECK, "small", "t=check&output=json")
2614 def _got_json_small(res):
2615 r = simplejson.loads(res)
2616 self.failUnlessEqual(r["storage-index"], "")
2617 self.failUnless(r["results"]["healthy"])
2618 d.addCallback(_got_json_small)
2620 d.addCallback(self.CHECK, "sick", "t=check")
2621 def _got_html_sick(res):
2622 self.failUnless("Not Healthy" in res, res)
2623 d.addCallback(_got_html_sick)
2624 d.addCallback(self.CHECK, "sick", "t=check&output=json")
2625 def _got_json_sick(res):
2626 r = simplejson.loads(res)
2627 self.failUnlessEqual(r["summary"],
2628 "Not Healthy: 9 shares (enc 3-of-10)")
2629 self.failIf(r["results"]["healthy"])
2630 self.failIf(r["results"]["needs-rebalancing"])
2631 self.failUnless(r["results"]["recoverable"])
2632 d.addCallback(_got_json_sick)
2634 d.addCallback(self.CHECK, "dead", "t=check")
2635 def _got_html_dead(res):
2636 self.failUnless("Not Healthy" in res, res)
2637 d.addCallback(_got_html_dead)
2638 d.addCallback(self.CHECK, "dead", "t=check&output=json")
2639 def _got_json_dead(res):
2640 r = simplejson.loads(res)
2641 self.failUnlessEqual(r["summary"],
2642 "Not Healthy: 1 shares (enc 3-of-10)")
2643 self.failIf(r["results"]["healthy"])
2644 self.failIf(r["results"]["needs-rebalancing"])
2645 self.failIf(r["results"]["recoverable"])
2646 d.addCallback(_got_json_dead)
2648 d.addCallback(self.CHECK, "corrupt", "t=check&verify=true")
2649 def _got_html_corrupt(res):
2650 self.failUnless("Not Healthy! : Unhealthy" in res, res)
2651 d.addCallback(_got_html_corrupt)
2652 d.addCallback(self.CHECK, "corrupt", "t=check&verify=true&output=json")
2653 def _got_json_corrupt(res):
2654 r = simplejson.loads(res)
2655 self.failUnless("Unhealthy: 9 shares (enc 3-of-10)" in r["summary"],
2657 self.failIf(r["results"]["healthy"])
2658 self.failUnless(r["results"]["recoverable"])
2659 self.failUnlessEqual(r["results"]["count-shares-good"], 9)
2660 self.failUnlessEqual(r["results"]["count-corrupt-shares"], 1)
2661 d.addCallback(_got_json_corrupt)
2663 d.addErrback(self.explain_web_error)
2666 def test_repair_html(self):
2667 self.basedir = "web/Grid/repair_html"
2669 c0 = self.g.clients[0]
2672 d = c0.upload(upload.Data(DATA, convergence=""))
2673 def _stash_uri(ur, which):
2674 self.uris[which] = ur.uri
2675 d.addCallback(_stash_uri, "good")
2676 d.addCallback(lambda ign:
2677 c0.upload(upload.Data(DATA+"1", convergence="")))
2678 d.addCallback(_stash_uri, "sick")
2679 d.addCallback(lambda ign:
2680 c0.upload(upload.Data(DATA+"2", convergence="")))
2681 d.addCallback(_stash_uri, "dead")
2682 def _stash_mutable_uri(n, which):
2683 self.uris[which] = n.get_uri()
2684 assert isinstance(self.uris[which], str)
2685 d.addCallback(lambda ign: c0.create_mutable_file(DATA+"3"))
2686 d.addCallback(_stash_mutable_uri, "corrupt")
2688 def _compute_fileurls(ignored):
2690 for which in self.uris:
2691 self.fileurls[which] = "uri/" + urllib.quote(self.uris[which])
2692 d.addCallback(_compute_fileurls)
2694 def _clobber_shares(ignored):
2695 good_shares = self.find_shares(self.uris["good"])
2696 self.failUnlessEqual(len(good_shares), 10)
2697 sick_shares = self.find_shares(self.uris["sick"])
2698 os.unlink(sick_shares[0][2])
2699 dead_shares = self.find_shares(self.uris["dead"])
2700 for i in range(1, 10):
2701 os.unlink(dead_shares[i][2])
2702 c_shares = self.find_shares(self.uris["corrupt"])
2703 cso = CorruptShareOptions()
2704 cso.stdout = StringIO()
2705 cso.parseOptions([c_shares[0][2]])
2707 d.addCallback(_clobber_shares)
2709 d.addCallback(self.CHECK, "good", "t=check&repair=true")
2710 def _got_html_good(res):
2711 self.failUnless("Healthy" in res, res)
2712 self.failIf("Not Healthy" in res, res)
2713 self.failUnless("No repair necessary" in res, res)
2714 d.addCallback(_got_html_good)
2716 d.addCallback(self.CHECK, "sick", "t=check&repair=true")
2717 def _got_html_sick(res):
2718 self.failUnless("Healthy : healthy" in res, res)
2719 self.failIf("Not Healthy" in res, res)
2720 self.failUnless("Repair successful" in res, res)
2721 d.addCallback(_got_html_sick)
2723 # repair of a dead file will fail, of course, but it isn't yet
2724 # clear how this should be reported. Right now it shows up as
2727 #d.addCallback(self.CHECK, "dead", "t=check&repair=true")
2728 #def _got_html_dead(res):
2730 # self.failUnless("Healthy : healthy" in res, res)
2731 # self.failIf("Not Healthy" in res, res)
2732 # self.failUnless("No repair necessary" in res, res)
2733 #d.addCallback(_got_html_dead)
2735 d.addCallback(self.CHECK, "corrupt", "t=check&verify=true&repair=true")
2736 def _got_html_corrupt(res):
2737 self.failUnless("Healthy : Healthy" in res, res)
2738 self.failIf("Not Healthy" in res, res)
2739 self.failUnless("Repair successful" in res, res)
2740 d.addCallback(_got_html_corrupt)
2742 d.addErrback(self.explain_web_error)
2745 def test_repair_json(self):
2746 self.basedir = "web/Grid/repair_json"
2748 c0 = self.g.clients[0]
2751 d = c0.upload(upload.Data(DATA+"1", convergence=""))
2752 def _stash_uri(ur, which):
2753 self.uris[which] = ur.uri
2754 d.addCallback(_stash_uri, "sick")
2756 def _compute_fileurls(ignored):
2758 for which in self.uris:
2759 self.fileurls[which] = "uri/" + urllib.quote(self.uris[which])
2760 d.addCallback(_compute_fileurls)
2762 def _clobber_shares(ignored):
2763 sick_shares = self.find_shares(self.uris["sick"])
2764 os.unlink(sick_shares[0][2])
2765 d.addCallback(_clobber_shares)
2767 d.addCallback(self.CHECK, "sick", "t=check&repair=true&output=json")
2768 def _got_json_sick(res):
2769 r = simplejson.loads(res)
2770 self.failUnlessEqual(r["repair-attempted"], True)
2771 self.failUnlessEqual(r["repair-successful"], True)
2772 self.failUnlessEqual(r["pre-repair-results"]["summary"],
2773 "Not Healthy: 9 shares (enc 3-of-10)")
2774 self.failIf(r["pre-repair-results"]["results"]["healthy"])
2775 self.failUnlessEqual(r["post-repair-results"]["summary"], "healthy")
2776 self.failUnless(r["post-repair-results"]["results"]["healthy"])
2777 d.addCallback(_got_json_sick)
2779 d.addErrback(self.explain_web_error)
2782 def test_deep_check(self):
2783 self.basedir = "web/Grid/deep_check"
2785 c0 = self.g.clients[0]
2789 d = c0.create_empty_dirnode()
2790 def _stash_root_and_create_file(n):
2792 self.fileurls["root"] = "uri/" + urllib.quote(n.get_uri()) + "/"
2793 return n.add_file(u"good", upload.Data(DATA, convergence=""))
2794 d.addCallback(_stash_root_and_create_file)
2795 def _stash_uri(fn, which):
2796 self.uris[which] = fn.get_uri()
2798 d.addCallback(_stash_uri, "good")
2799 d.addCallback(lambda ign:
2800 self.rootnode.add_file(u"small",
2801 upload.Data("literal",
2803 d.addCallback(_stash_uri, "small")
2804 d.addCallback(lambda ign:
2805 self.rootnode.add_file(u"sick",
2806 upload.Data(DATA+"1",
2808 d.addCallback(_stash_uri, "sick")
2810 def _clobber_shares(ignored):
2811 self.delete_shares_numbered(self.uris["sick"], [0,1])
2812 d.addCallback(_clobber_shares)
2819 d.addCallback(self.CHECK, "root", "t=stream-deep-check")
2821 units = [simplejson.loads(line)
2822 for line in res.splitlines()
2824 self.failUnlessEqual(len(units), 4+1)
2825 # should be parent-first
2827 self.failUnlessEqual(u0["path"], [])
2828 self.failUnlessEqual(u0["type"], "directory")
2829 self.failUnlessEqual(u0["cap"], self.rootnode.get_uri())
2830 u0cr = u0["check-results"]
2831 self.failUnlessEqual(u0cr["results"]["count-shares-good"], 10)
2833 ugood = [u for u in units
2834 if u["type"] == "file" and u["path"] == [u"good"]][0]
2835 self.failUnlessEqual(ugood["cap"], self.uris["good"])
2836 ugoodcr = ugood["check-results"]
2837 self.failUnlessEqual(ugoodcr["results"]["count-shares-good"], 10)
2840 self.failUnlessEqual(stats["type"], "stats")
2842 self.failUnlessEqual(s["count-immutable-files"], 2)
2843 self.failUnlessEqual(s["count-literal-files"], 1)
2844 self.failUnlessEqual(s["count-directories"], 1)
2845 d.addCallback(_done)
2847 # now add root/subdir and root/subdir/grandchild, then make subdir
2848 # unrecoverable, then see what happens
2850 d.addCallback(lambda ign:
2851 self.rootnode.create_empty_directory(u"subdir"))
2852 d.addCallback(_stash_uri, "subdir")
2853 d.addCallback(lambda subdir_node:
2854 subdir_node.add_file(u"grandchild",
2855 upload.Data(DATA+"2",
2857 d.addCallback(_stash_uri, "grandchild")
2859 d.addCallback(lambda ign:
2860 self.delete_shares_numbered(self.uris["subdir"],
2867 # root/subdir [unrecoverable]
2868 # root/subdir/grandchild
2870 # how should a streaming-JSON API indicate fatal error?
2871 # answer: emit ERROR: instead of a JSON string
2873 d.addCallback(self.CHECK, "root", "t=stream-manifest")
2874 def _check_broken_manifest(res):
2875 lines = res.splitlines()
2877 for (i,line) in enumerate(lines)
2878 if line.startswith("ERROR:")]
2880 self.fail("no ERROR: in output: %s" % (res,))
2881 first_error = error_lines[0]
2882 error_line = lines[first_error]
2883 error_msg = lines[first_error+1:]
2884 error_msg_s = "\n".join(error_msg) + "\n"
2885 self.failUnlessIn("ERROR: UnrecoverableFileError(no recoverable versions)",
2887 self.failUnless(len(error_msg) > 2, error_msg_s) # some traceback
2888 units = [simplejson.loads(line) for line in lines[:first_error]]
2889 self.failUnlessEqual(len(units), 5) # includes subdir
2890 last_unit = units[-1]
2891 self.failUnlessEqual(last_unit["path"], ["subdir"])
2892 d.addCallback(_check_broken_manifest)
2894 d.addCallback(self.CHECK, "root", "t=stream-deep-check")
2895 def _check_broken_deepcheck(res):
2896 lines = res.splitlines()
2898 for (i,line) in enumerate(lines)
2899 if line.startswith("ERROR:")]
2901 self.fail("no ERROR: in output: %s" % (res,))
2902 first_error = error_lines[0]
2903 error_line = lines[first_error]
2904 error_msg = lines[first_error+1:]
2905 error_msg_s = "\n".join(error_msg) + "\n"
2906 self.failUnlessIn("ERROR: UnrecoverableFileError(no recoverable versions)",
2908 self.failUnless(len(error_msg) > 2, error_msg_s) # some traceback
2909 units = [simplejson.loads(line) for line in lines[:first_error]]
2910 self.failUnlessEqual(len(units), 5) # includes subdir
2911 last_unit = units[-1]
2912 self.failUnlessEqual(last_unit["path"], ["subdir"])
2913 r = last_unit["check-results"]["results"]
2914 self.failUnlessEqual(r["count-recoverable-versions"], 0)
2915 self.failUnlessEqual(r["count-shares-good"], 1)
2916 self.failUnlessEqual(r["recoverable"], False)
2917 d.addCallback(_check_broken_deepcheck)
2919 d.addErrback(self.explain_web_error)
2922 def test_deep_check_and_repair(self):
2923 self.basedir = "web/Grid/deep_check_and_repair"
2925 c0 = self.g.clients[0]
2929 d = c0.create_empty_dirnode()
2930 def _stash_root_and_create_file(n):
2932 self.fileurls["root"] = "uri/" + urllib.quote(n.get_uri()) + "/"
2933 return n.add_file(u"good", upload.Data(DATA, convergence=""))
2934 d.addCallback(_stash_root_and_create_file)
2935 def _stash_uri(fn, which):
2936 self.uris[which] = fn.get_uri()
2937 d.addCallback(_stash_uri, "good")
2938 d.addCallback(lambda ign:
2939 self.rootnode.add_file(u"small",
2940 upload.Data("literal",
2942 d.addCallback(_stash_uri, "small")
2943 d.addCallback(lambda ign:
2944 self.rootnode.add_file(u"sick",
2945 upload.Data(DATA+"1",
2947 d.addCallback(_stash_uri, "sick")
2948 #d.addCallback(lambda ign:
2949 # self.rootnode.add_file(u"dead",
2950 # upload.Data(DATA+"2",
2952 #d.addCallback(_stash_uri, "dead")
2954 #d.addCallback(lambda ign: c0.create_mutable_file("mutable"))
2955 #d.addCallback(lambda fn: self.rootnode.set_node(u"corrupt", fn))
2956 #d.addCallback(_stash_uri, "corrupt")
2958 def _clobber_shares(ignored):
2959 good_shares = self.find_shares(self.uris["good"])
2960 self.failUnlessEqual(len(good_shares), 10)
2961 sick_shares = self.find_shares(self.uris["sick"])
2962 os.unlink(sick_shares[0][2])
2963 #dead_shares = self.find_shares(self.uris["dead"])
2964 #for i in range(1, 10):
2965 # os.unlink(dead_shares[i][2])
2967 #c_shares = self.find_shares(self.uris["corrupt"])
2968 #cso = CorruptShareOptions()
2969 #cso.stdout = StringIO()
2970 #cso.parseOptions([c_shares[0][2]])
2972 d.addCallback(_clobber_shares)
2975 # root/good CHK, 10 shares
2977 # root/sick CHK, 9 shares
2979 d.addCallback(self.CHECK, "root", "t=stream-deep-check&repair=true")
2981 units = [simplejson.loads(line)
2982 for line in res.splitlines()
2984 self.failUnlessEqual(len(units), 4+1)
2985 # should be parent-first
2987 self.failUnlessEqual(u0["path"], [])
2988 self.failUnlessEqual(u0["type"], "directory")
2989 self.failUnlessEqual(u0["cap"], self.rootnode.get_uri())
2990 u0crr = u0["check-and-repair-results"]
2991 self.failUnlessEqual(u0crr["repair-attempted"], False)
2992 self.failUnlessEqual(u0crr["pre-repair-results"]["results"]["count-shares-good"], 10)
2994 ugood = [u for u in units
2995 if u["type"] == "file" and u["path"] == [u"good"]][0]
2996 self.failUnlessEqual(ugood["cap"], self.uris["good"])
2997 ugoodcrr = ugood["check-and-repair-results"]
2998 self.failUnlessEqual(u0crr["repair-attempted"], False)
2999 self.failUnlessEqual(u0crr["pre-repair-results"]["results"]["count-shares-good"], 10)
3001 usick = [u for u in units
3002 if u["type"] == "file" and u["path"] == [u"sick"]][0]
3003 self.failUnlessEqual(usick["cap"], self.uris["sick"])
3004 usickcrr = usick["check-and-repair-results"]
3005 self.failUnlessEqual(usickcrr["repair-attempted"], True)
3006 self.failUnlessEqual(usickcrr["repair-successful"], True)
3007 self.failUnlessEqual(usickcrr["pre-repair-results"]["results"]["count-shares-good"], 9)
3008 self.failUnlessEqual(usickcrr["post-repair-results"]["results"]["count-shares-good"], 10)
3011 self.failUnlessEqual(stats["type"], "stats")
3013 self.failUnlessEqual(s["count-immutable-files"], 2)
3014 self.failUnlessEqual(s["count-literal-files"], 1)
3015 self.failUnlessEqual(s["count-directories"], 1)
3016 d.addCallback(_done)
3018 d.addErrback(self.explain_web_error)
3021 def _count_leases(self, ignored, which):
3022 u = self.uris[which]
3023 shares = self.find_shares(u)
3025 for shnum, serverid, fn in shares:
3026 sf = get_share_file(fn)
3027 num_leases = len(list(sf.get_leases()))
3028 lease_counts.append( (fn, num_leases) )
3031 def _assert_leasecount(self, lease_counts, expected):
3032 for (fn, num_leases) in lease_counts:
3033 if num_leases != expected:
3034 self.fail("expected %d leases, have %d, on %s" %
3035 (expected, num_leases, fn))
3037 def test_add_lease(self):
3038 self.basedir = "web/Grid/add_lease"
3039 self.set_up_grid(num_clients=2)
3040 c0 = self.g.clients[0]
3043 d = c0.upload(upload.Data(DATA, convergence=""))
3044 def _stash_uri(ur, which):
3045 self.uris[which] = ur.uri
3046 d.addCallback(_stash_uri, "one")
3047 d.addCallback(lambda ign:
3048 c0.upload(upload.Data(DATA+"1", convergence="")))
3049 d.addCallback(_stash_uri, "two")
3050 def _stash_mutable_uri(n, which):
3051 self.uris[which] = n.get_uri()
3052 assert isinstance(self.uris[which], str)
3053 d.addCallback(lambda ign: c0.create_mutable_file(DATA+"2"))
3054 d.addCallback(_stash_mutable_uri, "mutable")
3056 def _compute_fileurls(ignored):
3058 for which in self.uris:
3059 self.fileurls[which] = "uri/" + urllib.quote(self.uris[which])
3060 d.addCallback(_compute_fileurls)
3062 d.addCallback(self._count_leases, "one")
3063 d.addCallback(self._assert_leasecount, 1)
3064 d.addCallback(self._count_leases, "two")
3065 d.addCallback(self._assert_leasecount, 1)
3066 d.addCallback(self._count_leases, "mutable")
3067 d.addCallback(self._assert_leasecount, 1)
3069 d.addCallback(self.CHECK, "one", "t=check") # no add-lease
3070 def _got_html_good(res):
3071 self.failUnless("Healthy" in res, res)
3072 self.failIf("Not Healthy" in res, res)
3073 d.addCallback(_got_html_good)
3075 d.addCallback(self._count_leases, "one")
3076 d.addCallback(self._assert_leasecount, 1)
3077 d.addCallback(self._count_leases, "two")
3078 d.addCallback(self._assert_leasecount, 1)
3079 d.addCallback(self._count_leases, "mutable")
3080 d.addCallback(self._assert_leasecount, 1)
3082 # this CHECK uses the original client, which uses the same
3083 # lease-secrets, so it will just renew the original lease
3084 d.addCallback(self.CHECK, "one", "t=check&add-lease=true")
3085 d.addCallback(_got_html_good)
3087 d.addCallback(self._count_leases, "one")
3088 d.addCallback(self._assert_leasecount, 1)
3089 d.addCallback(self._count_leases, "two")
3090 d.addCallback(self._assert_leasecount, 1)
3091 d.addCallback(self._count_leases, "mutable")
3092 d.addCallback(self._assert_leasecount, 1)
3094 # this CHECK uses an alternate client, which adds a second lease
3095 d.addCallback(self.CHECK, "one", "t=check&add-lease=true", clientnum=1)
3096 d.addCallback(_got_html_good)
3098 d.addCallback(self._count_leases, "one")
3099 d.addCallback(self._assert_leasecount, 2)
3100 d.addCallback(self._count_leases, "two")
3101 d.addCallback(self._assert_leasecount, 1)
3102 d.addCallback(self._count_leases, "mutable")
3103 d.addCallback(self._assert_leasecount, 1)
3105 d.addCallback(self.CHECK, "mutable", "t=check&add-lease=true")
3106 d.addCallback(_got_html_good)
3108 d.addCallback(self._count_leases, "one")
3109 d.addCallback(self._assert_leasecount, 2)
3110 d.addCallback(self._count_leases, "two")
3111 d.addCallback(self._assert_leasecount, 1)
3112 d.addCallback(self._count_leases, "mutable")
3113 d.addCallback(self._assert_leasecount, 1)
3115 d.addCallback(self.CHECK, "mutable", "t=check&add-lease=true",
3117 d.addCallback(_got_html_good)
3119 d.addCallback(self._count_leases, "one")
3120 d.addCallback(self._assert_leasecount, 2)
3121 d.addCallback(self._count_leases, "two")
3122 d.addCallback(self._assert_leasecount, 1)
3123 d.addCallback(self._count_leases, "mutable")
3124 d.addCallback(self._assert_leasecount, 2)
3126 d.addErrback(self.explain_web_error)
3129 def test_deep_add_lease(self):
3130 self.basedir = "web/Grid/deep_add_lease"
3131 self.set_up_grid(num_clients=2)
3132 c0 = self.g.clients[0]
3136 d = c0.create_empty_dirnode()
3137 def _stash_root_and_create_file(n):
3139 self.uris["root"] = n.get_uri()
3140 self.fileurls["root"] = "uri/" + urllib.quote(n.get_uri()) + "/"
3141 return n.add_file(u"one", upload.Data(DATA, convergence=""))
3142 d.addCallback(_stash_root_and_create_file)
3143 def _stash_uri(fn, which):
3144 self.uris[which] = fn.get_uri()
3145 d.addCallback(_stash_uri, "one")
3146 d.addCallback(lambda ign:
3147 self.rootnode.add_file(u"small",
3148 upload.Data("literal",
3150 d.addCallback(_stash_uri, "small")
3152 d.addCallback(lambda ign: c0.create_mutable_file("mutable"))
3153 d.addCallback(lambda fn: self.rootnode.set_node(u"mutable", fn))
3154 d.addCallback(_stash_uri, "mutable")
3156 d.addCallback(self.CHECK, "root", "t=stream-deep-check") # no add-lease
3158 units = [simplejson.loads(line)
3159 for line in res.splitlines()
3161 # root, one, small, mutable, stats
3162 self.failUnlessEqual(len(units), 4+1)
3163 d.addCallback(_done)
3165 d.addCallback(self._count_leases, "root")
3166 d.addCallback(self._assert_leasecount, 1)
3167 d.addCallback(self._count_leases, "one")
3168 d.addCallback(self._assert_leasecount, 1)
3169 d.addCallback(self._count_leases, "mutable")
3170 d.addCallback(self._assert_leasecount, 1)
3172 d.addCallback(self.CHECK, "root", "t=stream-deep-check&add-lease=true")
3173 d.addCallback(_done)
3175 d.addCallback(self._count_leases, "root")
3176 d.addCallback(self._assert_leasecount, 1)
3177 d.addCallback(self._count_leases, "one")
3178 d.addCallback(self._assert_leasecount, 1)
3179 d.addCallback(self._count_leases, "mutable")
3180 d.addCallback(self._assert_leasecount, 1)
3182 d.addCallback(self.CHECK, "root", "t=stream-deep-check&add-lease=true",
3184 d.addCallback(_done)
3186 d.addCallback(self._count_leases, "root")
3187 d.addCallback(self._assert_leasecount, 2)
3188 d.addCallback(self._count_leases, "one")
3189 d.addCallback(self._assert_leasecount, 2)
3190 d.addCallback(self._count_leases, "mutable")
3191 d.addCallback(self._assert_leasecount, 2)
3193 d.addErrback(self.explain_web_error)
3197 def test_exceptions(self):
3198 self.basedir = "web/Grid/exceptions"
3199 self.set_up_grid(num_clients=1, num_servers=2)
3200 c0 = self.g.clients[0]
3203 d = c0.create_empty_dirnode()
3205 self.fileurls["root"] = "uri/" + urllib.quote(n.get_uri()) + "/"
3206 self.fileurls["imaginary"] = self.fileurls["root"] + "imaginary"
3208 d.addCallback(_stash_root)
3209 d.addCallback(lambda ign: c0.upload(upload.Data(DATA, convergence="")))
3211 self.fileurls["1share"] = "uri/" + urllib.quote(ur.uri)
3212 self.delete_shares_numbered(ur.uri, range(1,10))
3214 u = uri.from_string(ur.uri)
3215 u.key = testutil.flip_bit(u.key, 0)
3216 baduri = u.to_string()
3217 self.fileurls["0shares"] = "uri/" + urllib.quote(baduri)
3218 d.addCallback(_stash_bad)
3219 d.addCallback(lambda ign: c0.create_empty_dirnode())
3220 def _mangle_dirnode_1share(n):
3222 url = self.fileurls["dir-1share"] = "uri/" + urllib.quote(u) + "/"
3223 self.fileurls["dir-1share-json"] = url + "?t=json"
3224 self.delete_shares_numbered(u, range(1,10))
3225 d.addCallback(_mangle_dirnode_1share)
3226 d.addCallback(lambda ign: c0.create_empty_dirnode())
3227 def _mangle_dirnode_0share(n):
3229 url = self.fileurls["dir-0share"] = "uri/" + urllib.quote(u) + "/"
3230 self.fileurls["dir-0share-json"] = url + "?t=json"
3231 self.delete_shares_numbered(u, range(0,10))
3232 d.addCallback(_mangle_dirnode_0share)
3234 # NotEnoughSharesError should be reported sensibly, with a
3235 # text/plain explanation of the problem, and perhaps some
3236 # information on which shares *could* be found.
3238 d.addCallback(lambda ignored:
3239 self.shouldHTTPError("GET unrecoverable",
3240 410, "Gone", "NotEnoughSharesError",
3241 self.GET, self.fileurls["0shares"]))
3242 def _check_zero_shares(body):
3243 self.failIf("<html>" in body, body)
3244 body = " ".join(body.strip().split())
3245 exp = ("NotEnoughSharesError: no shares could be found. "
3246 "Zero shares usually indicates a corrupt URI, or that "
3247 "no servers were connected, but it might also indicate "
3248 "severe corruption. You should perform a filecheck on "
3249 "this object to learn more.")
3250 self.failUnlessEqual(exp, body)
3251 d.addCallback(_check_zero_shares)
3254 d.addCallback(lambda ignored:
3255 self.shouldHTTPError("GET 1share",
3256 410, "Gone", "NotEnoughSharesError",
3257 self.GET, self.fileurls["1share"]))
3258 def _check_one_share(body):
3259 self.failIf("<html>" in body, body)
3260 body = " ".join(body.strip().split())
3261 exp = ("NotEnoughSharesError: 1 share found, but we need "
3262 "3 to recover the file. This indicates that some "
3263 "servers were unavailable, or that shares have been "
3264 "lost to server departure, hard drive failure, or disk "
3265 "corruption. You should perform a filecheck on "
3266 "this object to learn more.")
3267 self.failUnlessEqual(exp, body)
3268 d.addCallback(_check_one_share)
3270 d.addCallback(lambda ignored:
3271 self.shouldHTTPError("GET imaginary",
3272 404, "Not Found", None,
3273 self.GET, self.fileurls["imaginary"]))
3274 def _missing_child(body):
3275 self.failUnless("No such child: imaginary" in body, body)
3276 d.addCallback(_missing_child)
3278 d.addCallback(lambda ignored: self.GET(self.fileurls["dir-0share"]))
3279 def _check_0shares_dir_html(body):
3280 self.failUnless("<html>" in body, body)
3281 # we should see the regular page, but without the child table or
3283 body = " ".join(body.strip().split())
3284 self.failUnlessIn('href="?t=info">More info on this directory',
3286 exp = ("UnrecoverableFileError: the directory (or mutable file) "
3287 "could not be retrieved, because there were insufficient "
3288 "good shares. This might indicate that no servers were "
3289 "connected, insufficient servers were connected, the URI "
3290 "was corrupt, or that shares have been lost due to server "
3291 "departure, hard drive failure, or disk corruption. You "
3292 "should perform a filecheck on this object to learn more.")
3293 self.failUnlessIn(exp, body)
3294 self.failUnlessIn("No upload forms: directory is unreadable", body)
3295 d.addCallback(_check_0shares_dir_html)
3297 d.addCallback(lambda ignored: self.GET(self.fileurls["dir-1share"]))
3298 def _check_1shares_dir_html(body):
3299 # at some point, we'll split UnrecoverableFileError into 0-shares
3300 # and some-shares like we did for immutable files (since there
3301 # are different sorts of advice to offer in each case). For now,
3302 # they present the same way.
3303 self.failUnless("<html>" in body, body)
3304 body = " ".join(body.strip().split())
3305 self.failUnlessIn('href="?t=info">More info on this directory',
3307 exp = ("UnrecoverableFileError: the directory (or mutable file) "
3308 "could not be retrieved, because there were insufficient "
3309 "good shares. This might indicate that no servers were "
3310 "connected, insufficient servers were connected, the URI "
3311 "was corrupt, or that shares have been lost due to server "
3312 "departure, hard drive failure, or disk corruption. You "
3313 "should perform a filecheck on this object to learn more.")
3314 self.failUnlessIn(exp, body)
3315 self.failUnlessIn("No upload forms: directory is unreadable", body)
3316 d.addCallback(_check_1shares_dir_html)
3318 d.addCallback(lambda ignored:
3319 self.shouldHTTPError("GET dir-0share-json",
3320 410, "Gone", "UnrecoverableFileError",
3322 self.fileurls["dir-0share-json"]))
3323 def _check_unrecoverable_file(body):
3324 self.failIf("<html>" in body, body)
3325 body = " ".join(body.strip().split())
3326 exp = ("UnrecoverableFileError: the directory (or mutable file) "
3327 "could not be retrieved, because there were insufficient "
3328 "good shares. This might indicate that no servers were "
3329 "connected, insufficient servers were connected, the URI "
3330 "was corrupt, or that shares have been lost due to server "
3331 "departure, hard drive failure, or disk corruption. You "
3332 "should perform a filecheck on this object to learn more.")
3333 self.failUnlessEqual(exp, body)
3334 d.addCallback(_check_unrecoverable_file)
3336 d.addCallback(lambda ignored:
3337 self.shouldHTTPError("GET dir-1share-json",
3338 410, "Gone", "UnrecoverableFileError",
3340 self.fileurls["dir-1share-json"]))
3341 d.addCallback(_check_unrecoverable_file)
3343 d.addCallback(lambda ignored:
3344 self.shouldHTTPError("GET imaginary",
3345 404, "Not Found", None,
3346 self.GET, self.fileurls["imaginary"]))
3348 # attach a webapi child that throws a random error, to test how it
3350 w = c0.getServiceNamed("webish")
3351 w.root.putChild("ERRORBOOM", ErrorBoom())
3353 d.addCallback(lambda ignored:
3354 self.shouldHTTPError("GET errorboom_html",
3355 500, "Internal Server Error", None,
3356 self.GET, "ERRORBOOM"))
3357 def _internal_error_html(body):
3358 # test that a weird exception during a webapi operation with
3359 # Accept:*/* results in a text/html stack trace, while one
3360 # without that Accept: line gets us a text/plain stack trace
3361 self.failUnless("<html>" in body, "expected HTML, not '%s'" % body)
3362 d.addCallback(_internal_error_html)
3364 d.addCallback(lambda ignored:
3365 self.shouldHTTPError("GET errorboom_text",
3366 500, "Internal Server Error", None,
3367 self.GET, "ERRORBOOM",
3368 headers={"accept": ["text/plain"]}))
3369 def _internal_error_text(body):
3370 # test that a weird exception during a webapi operation with
3371 # Accept:*/* results in a text/html stack trace, while one
3372 # without that Accept: line gets us a text/plain stack trace
3373 self.failIf("<html>" in body, body)
3374 self.failUnless(body.startswith("Traceback "), body)
3375 d.addCallback(_internal_error_text)
3377 def _flush_errors(res):
3378 # Trial: please ignore the CompletelyUnhandledError in the logs
3379 self.flushLoggedErrors(CompletelyUnhandledError)
3381 d.addBoth(_flush_errors)
3385 class CompletelyUnhandledError(Exception):
3387 class ErrorBoom(rend.Page):
3388 def beforeRender(self, ctx):
3389 raise CompletelyUnhandledError("whoops")