1 import os.path, re, urllib
3 from StringIO import StringIO
4 from twisted.application import service
5 from twisted.trial import unittest
6 from twisted.internet import defer, reactor
7 from twisted.web import client, error, http
8 from twisted.python import failure, log
10 from allmydata import interfaces, uri, webish
11 from allmydata.storage.shares import get_share_file
12 from allmydata.storage_client import StorageFarmBroker
13 from allmydata.immutable import upload, download
14 from allmydata.web import status, common
15 from allmydata.scripts.debug import CorruptShareOptions, corrupt_share
16 from allmydata.util import fileutil, base32
17 from allmydata.util.assertutil import precondition
18 from allmydata.test.common import FakeDirectoryNode, FakeCHKFileNode, \
19 FakeMutableFileNode, create_chk_filenode, WebErrorMixin, ShouldFailMixin
20 from allmydata.interfaces import IURI, INewDirectoryURI, \
21 IReadonlyNewDirectoryURI, IFileURI, IMutableFileURI, IMutableFileNode
22 from allmydata.mutable import servermap, publish, retrieve
23 import common_util as testutil
24 from allmydata.test.no_network import GridTestMixin
26 from allmydata.test.common_web import HTTPClientGETFactory, \
29 # create a fake uploader/downloader, and a couple of fake dirnodes, then
30 # create a webserver that works against them
32 class FakeIntroducerClient:
33 def get_all_connectors(self):
35 def get_all_connections_for(self, service_name):
37 def get_all_peerids(self):
40 class FakeStatsProvider:
42 stats = {'stats': {}, 'counters': {}}
45 class FakeClient(service.MultiService):
46 nodeid = "fake_nodeid"
47 nickname = "fake_nickname"
48 basedir = "fake_basedir"
49 def get_versions(self):
50 return {'allmydata': "fake",
55 introducer_furl = "None"
56 introducer_client = FakeIntroducerClient()
57 _all_upload_status = [upload.UploadStatus()]
58 _all_download_status = [download.DownloadStatus()]
59 _all_mapupdate_statuses = [servermap.UpdateStatus()]
60 _all_publish_statuses = [publish.PublishStatus()]
61 _all_retrieve_statuses = [retrieve.RetrieveStatus()]
62 convergence = "some random string"
63 stats_provider = FakeStatsProvider()
65 def connected_to_introducer(self):
68 storage_broker = StorageFarmBroker()
69 def get_storage_broker(self):
70 return self.storage_broker
72 def create_node_from_uri(self, auri):
73 precondition(isinstance(auri, str), auri)
74 u = uri.from_string(auri)
75 if (INewDirectoryURI.providedBy(u)
76 or IReadonlyNewDirectoryURI.providedBy(u)):
77 return FakeDirectoryNode(self).init_from_uri(u)
78 if IFileURI.providedBy(u):
79 return FakeCHKFileNode(u, self)
80 assert IMutableFileURI.providedBy(u), u
81 return FakeMutableFileNode(self).init_from_uri(u)
83 def create_empty_dirnode(self):
84 n = FakeDirectoryNode(self)
86 d.addCallback(lambda res: n)
89 MUTABLE_SIZELIMIT = FakeMutableFileNode.MUTABLE_SIZELIMIT
90 def create_mutable_file(self, contents=""):
91 n = FakeMutableFileNode(self)
92 return n.create(contents)
94 def upload(self, uploadable):
95 d = uploadable.get_size()
96 d.addCallback(lambda size: uploadable.read(size))
99 n = create_chk_filenode(self, data)
100 results = upload.UploadResults()
101 results.uri = n.get_uri()
103 d.addCallback(_got_data)
106 def list_all_upload_statuses(self):
107 return self._all_upload_status
108 def list_all_download_statuses(self):
109 return self._all_download_status
110 def list_all_mapupdate_statuses(self):
111 return self._all_mapupdate_statuses
112 def list_all_publish_statuses(self):
113 return self._all_publish_statuses
114 def list_all_retrieve_statuses(self):
115 return self._all_retrieve_statuses
116 def list_all_helper_statuses(self):
119 class WebMixin(object):
121 self.s = FakeClient()
122 self.s.startService()
123 self.staticdir = self.mktemp()
124 self.ws = s = webish.WebishServer(self.s, "0", staticdir=self.staticdir)
125 s.setServiceParent(self.s)
126 self.webish_port = port = s.listener._port.getHost().port
127 self.webish_url = "http://localhost:%d" % port
129 l = [ self.s.create_empty_dirnode() for x in range(6) ]
130 d = defer.DeferredList(l)
132 self.public_root = res[0][1]
133 assert interfaces.IDirectoryNode.providedBy(self.public_root), res
134 self.public_url = "/uri/" + self.public_root.get_uri()
135 self.private_root = res[1][1]
139 self._foo_uri = foo.get_uri()
140 self._foo_readonly_uri = foo.get_readonly_uri()
141 self._foo_verifycap = foo.get_verify_cap().to_string()
142 # NOTE: we ignore the deferred on all set_uri() calls, because we
143 # know the fake nodes do these synchronously
144 self.public_root.set_uri(u"foo", foo.get_uri())
146 self.BAR_CONTENTS, n, self._bar_txt_uri = self.makefile(0)
147 foo.set_uri(u"bar.txt", self._bar_txt_uri)
148 self._bar_txt_verifycap = n.get_verify_cap().to_string()
150 foo.set_uri(u"empty", res[3][1].get_uri())
151 sub_uri = res[4][1].get_uri()
152 self._sub_uri = sub_uri
153 foo.set_uri(u"sub", sub_uri)
154 sub = self.s.create_node_from_uri(sub_uri)
156 _ign, n, blocking_uri = self.makefile(1)
157 foo.set_uri(u"blockingfile", blocking_uri)
159 unicode_filename = u"n\u00fc.txt" # n u-umlaut . t x t
160 # ok, unicode calls it LATIN SMALL LETTER U WITH DIAERESIS but I
161 # still think of it as an umlaut
162 foo.set_uri(unicode_filename, self._bar_txt_uri)
164 _ign, n, baz_file = self.makefile(2)
165 self._baz_file_uri = baz_file
166 sub.set_uri(u"baz.txt", baz_file)
168 _ign, n, self._bad_file_uri = self.makefile(3)
169 # this uri should not be downloadable
170 del FakeCHKFileNode.all_contents[self._bad_file_uri]
173 self.public_root.set_uri(u"reedownlee", rodir.get_readonly_uri())
174 rodir.set_uri(u"nor", baz_file)
179 # public/foo/blockingfile
182 # public/foo/sub/baz.txt
184 # public/reedownlee/nor
185 self.NEWFILE_CONTENTS = "newfile contents\n"
187 return foo.get_metadata_for(u"bar.txt")
189 def _got_metadata(metadata):
190 self._bar_txt_metadata = metadata
191 d.addCallback(_got_metadata)
194 def makefile(self, number):
195 contents = "contents of file %s\n" % number
196 n = create_chk_filenode(self.s, contents)
197 return contents, n, n.get_uri()
200 return self.s.stopService()
202 def failUnlessIsBarDotTxt(self, res):
203 self.failUnlessEqual(res, self.BAR_CONTENTS, res)
205 def failUnlessIsBarJSON(self, res):
206 data = simplejson.loads(res)
207 self.failUnless(isinstance(data, list))
208 self.failUnlessEqual(data[0], u"filenode")
209 self.failUnless(isinstance(data[1], dict))
210 self.failIf(data[1]["mutable"])
211 self.failIf("rw_uri" in data[1]) # immutable
212 self.failUnlessEqual(data[1]["ro_uri"], self._bar_txt_uri)
213 self.failUnlessEqual(data[1]["verify_uri"], self._bar_txt_verifycap)
214 self.failUnlessEqual(data[1]["size"], len(self.BAR_CONTENTS))
216 def failUnlessIsFooJSON(self, res):
217 data = simplejson.loads(res)
218 self.failUnless(isinstance(data, list))
219 self.failUnlessEqual(data[0], "dirnode", res)
220 self.failUnless(isinstance(data[1], dict))
221 self.failUnless(data[1]["mutable"])
222 self.failUnless("rw_uri" in data[1]) # mutable
223 self.failUnlessEqual(data[1]["rw_uri"], self._foo_uri)
224 self.failUnlessEqual(data[1]["ro_uri"], self._foo_readonly_uri)
225 self.failUnlessEqual(data[1]["verify_uri"], self._foo_verifycap)
227 kidnames = sorted([unicode(n) for n in data[1]["children"]])
228 self.failUnlessEqual(kidnames,
229 [u"bar.txt", u"blockingfile", u"empty",
230 u"n\u00fc.txt", u"sub"])
231 kids = dict( [(unicode(name),value)
233 in data[1]["children"].iteritems()] )
234 self.failUnlessEqual(kids[u"sub"][0], "dirnode")
235 self.failUnless("metadata" in kids[u"sub"][1])
236 self.failUnless("ctime" in kids[u"sub"][1]["metadata"])
237 self.failUnless("mtime" in kids[u"sub"][1]["metadata"])
238 self.failUnlessEqual(kids[u"bar.txt"][0], "filenode")
239 self.failUnlessEqual(kids[u"bar.txt"][1]["size"], len(self.BAR_CONTENTS))
240 self.failUnlessEqual(kids[u"bar.txt"][1]["ro_uri"], self._bar_txt_uri)
241 self.failUnlessEqual(kids[u"bar.txt"][1]["verify_uri"],
242 self._bar_txt_verifycap)
243 self.failUnlessEqual(kids[u"bar.txt"][1]["metadata"]["ctime"],
244 self._bar_txt_metadata["ctime"])
245 self.failUnlessEqual(kids[u"n\u00fc.txt"][1]["ro_uri"],
248 def GET(self, urlpath, followRedirect=False, return_response=False,
250 # if return_response=True, this fires with (data, statuscode,
251 # respheaders) instead of just data.
252 assert not isinstance(urlpath, unicode)
253 url = self.webish_url + urlpath
254 factory = HTTPClientGETFactory(url, method="GET",
255 followRedirect=followRedirect, **kwargs)
256 reactor.connectTCP("localhost", self.webish_port, factory)
259 return (data, factory.status, factory.response_headers)
261 d.addCallback(_got_data)
262 return factory.deferred
264 def HEAD(self, urlpath, return_response=False, **kwargs):
265 # this requires some surgery, because twisted.web.client doesn't want
266 # to give us back the response headers.
267 factory = HTTPClientHEADFactory(urlpath, method="HEAD", **kwargs)
268 reactor.connectTCP("localhost", self.webish_port, factory)
271 return (data, factory.status, factory.response_headers)
273 d.addCallback(_got_data)
274 return factory.deferred
276 def PUT(self, urlpath, data, **kwargs):
277 url = self.webish_url + urlpath
278 return client.getPage(url, method="PUT", postdata=data, **kwargs)
280 def DELETE(self, urlpath):
281 url = self.webish_url + urlpath
282 return client.getPage(url, method="DELETE")
284 def POST(self, urlpath, followRedirect=False, **fields):
285 url = self.webish_url + urlpath
286 sepbase = "boogabooga"
290 form.append('Content-Disposition: form-data; name="_charset"')
294 for name, value in fields.iteritems():
295 if isinstance(value, tuple):
296 filename, value = value
297 form.append('Content-Disposition: form-data; name="%s"; '
298 'filename="%s"' % (name, filename.encode("utf-8")))
300 form.append('Content-Disposition: form-data; name="%s"' % name)
302 if isinstance(value, unicode):
303 value = value.encode("utf-8")
306 assert isinstance(value, str)
310 body = "\r\n".join(form) + "\r\n"
311 headers = {"content-type": "multipart/form-data; boundary=%s" % sepbase,
313 return client.getPage(url, method="POST", postdata=body,
314 headers=headers, followRedirect=followRedirect)
316 def shouldFail(self, res, expected_failure, which,
317 substring=None, response_substring=None):
318 if isinstance(res, failure.Failure):
319 res.trap(expected_failure)
321 self.failUnless(substring in str(res),
322 "substring '%s' not in '%s'"
323 % (substring, str(res)))
324 if response_substring:
325 self.failUnless(response_substring in res.value.response,
326 "response substring '%s' not in '%s'"
327 % (response_substring, res.value.response))
329 self.fail("%s was supposed to raise %s, not get '%s'" %
330 (which, expected_failure, res))
332 def shouldFail2(self, expected_failure, which, substring,
334 callable, *args, **kwargs):
335 assert substring is None or isinstance(substring, str)
336 assert response_substring is None or isinstance(response_substring, str)
337 d = defer.maybeDeferred(callable, *args, **kwargs)
339 if isinstance(res, failure.Failure):
340 res.trap(expected_failure)
342 self.failUnless(substring in str(res),
343 "%s: substring '%s' not in '%s'"
344 % (which, substring, str(res)))
345 if response_substring:
346 self.failUnless(response_substring in res.value.response,
347 "%s: response substring '%s' not in '%s'"
349 response_substring, res.value.response))
351 self.fail("%s was supposed to raise %s, not get '%s'" %
352 (which, expected_failure, res))
356 def should404(self, res, which):
357 if isinstance(res, failure.Failure):
358 res.trap(error.Error)
359 self.failUnlessEqual(res.value.status, "404")
361 self.fail("%s was supposed to Error(404), not get '%s'" %
365 class Web(WebMixin, WebErrorMixin, testutil.StallMixin, unittest.TestCase):
366 def test_create(self):
369 def test_welcome(self):
372 self.failUnless('Welcome To AllMyData' in res)
373 self.failUnless('Tahoe' in res)
375 self.s.basedir = 'web/test_welcome'
376 fileutil.make_dirs("web/test_welcome")
377 fileutil.make_dirs("web/test_welcome/private")
379 d.addCallback(_check)
382 def test_provisioning(self):
383 d = self.GET("/provisioning/")
385 self.failUnless('Tahoe Provisioning Tool' in res)
386 fields = {'filled': True,
387 "num_users": int(50e3),
388 "files_per_user": 1000,
389 "space_per_user": int(1e9),
390 "sharing_ratio": 1.0,
391 "encoding_parameters": "3-of-10-5",
393 "ownership_mode": "A",
394 "download_rate": 100,
399 return self.POST("/provisioning/", **fields)
401 d.addCallback(_check)
403 self.failUnless('Tahoe Provisioning Tool' in res)
404 self.failUnless("Share space consumed: 167.01TB" in res)
406 fields = {'filled': True,
407 "num_users": int(50e6),
408 "files_per_user": 1000,
409 "space_per_user": int(5e9),
410 "sharing_ratio": 1.0,
411 "encoding_parameters": "25-of-100-50",
412 "num_servers": 30000,
413 "ownership_mode": "E",
414 "drive_failure_model": "U",
416 "download_rate": 1000,
421 return self.POST("/provisioning/", **fields)
422 d.addCallback(_check2)
424 self.failUnless("Share space consumed: huge!" in res)
425 fields = {'filled': True}
426 return self.POST("/provisioning/", **fields)
427 d.addCallback(_check3)
429 self.failUnless("Share space consumed:" in res)
430 d.addCallback(_check4)
433 def test_reliability_tool(self):
435 from allmydata import reliability
436 _hush_pyflakes = reliability
438 raise unittest.SkipTest("reliability tool requires NumPy")
440 d = self.GET("/reliability/")
442 self.failUnless('Tahoe Reliability Tool' in res)
443 fields = {'drive_lifetime': "8Y",
448 "check_period": "1M",
449 "report_period": "3M",
452 return self.POST("/reliability/", **fields)
454 d.addCallback(_check)
456 self.failUnless('Tahoe Reliability Tool' in res)
457 r = r'Probability of loss \(no maintenance\):\s+<span>0.033591'
458 self.failUnless(re.search(r, res), res)
459 d.addCallback(_check2)
462 def test_status(self):
463 dl_num = self.s.list_all_download_statuses()[0].get_counter()
464 ul_num = self.s.list_all_upload_statuses()[0].get_counter()
465 mu_num = self.s.list_all_mapupdate_statuses()[0].get_counter()
466 pub_num = self.s.list_all_publish_statuses()[0].get_counter()
467 ret_num = self.s.list_all_retrieve_statuses()[0].get_counter()
468 d = self.GET("/status", followRedirect=True)
470 self.failUnless('Upload and Download Status' in res, res)
471 self.failUnless('"down-%d"' % dl_num in res, res)
472 self.failUnless('"up-%d"' % ul_num in res, res)
473 self.failUnless('"mapupdate-%d"' % mu_num in res, res)
474 self.failUnless('"publish-%d"' % pub_num in res, res)
475 self.failUnless('"retrieve-%d"' % ret_num in res, res)
476 d.addCallback(_check)
477 d.addCallback(lambda res: self.GET("/status/?t=json"))
478 def _check_json(res):
479 data = simplejson.loads(res)
480 self.failUnless(isinstance(data, dict))
481 active = data["active"]
482 # TODO: test more. We need a way to fake an active operation
484 d.addCallback(_check_json)
486 d.addCallback(lambda res: self.GET("/status/down-%d" % dl_num))
488 self.failUnless("File Download Status" in res, res)
489 d.addCallback(_check_dl)
490 d.addCallback(lambda res: self.GET("/status/up-%d" % ul_num))
492 self.failUnless("File Upload Status" in res, res)
493 d.addCallback(_check_ul)
494 d.addCallback(lambda res: self.GET("/status/mapupdate-%d" % mu_num))
495 def _check_mapupdate(res):
496 self.failUnless("Mutable File Servermap Update Status" in res, res)
497 d.addCallback(_check_mapupdate)
498 d.addCallback(lambda res: self.GET("/status/publish-%d" % pub_num))
499 def _check_publish(res):
500 self.failUnless("Mutable File Publish Status" in res, res)
501 d.addCallback(_check_publish)
502 d.addCallback(lambda res: self.GET("/status/retrieve-%d" % ret_num))
503 def _check_retrieve(res):
504 self.failUnless("Mutable File Retrieve Status" in res, res)
505 d.addCallback(_check_retrieve)
509 def test_status_numbers(self):
510 drrm = status.DownloadResultsRendererMixin()
511 self.failUnlessEqual(drrm.render_time(None, None), "")
512 self.failUnlessEqual(drrm.render_time(None, 2.5), "2.50s")
513 self.failUnlessEqual(drrm.render_time(None, 0.25), "250ms")
514 self.failUnlessEqual(drrm.render_time(None, 0.0021), "2.1ms")
515 self.failUnlessEqual(drrm.render_time(None, 0.000123), "123us")
516 self.failUnlessEqual(drrm.render_rate(None, None), "")
517 self.failUnlessEqual(drrm.render_rate(None, 2500000), "2.50MBps")
518 self.failUnlessEqual(drrm.render_rate(None, 30100), "30.1kBps")
519 self.failUnlessEqual(drrm.render_rate(None, 123), "123Bps")
521 urrm = status.UploadResultsRendererMixin()
522 self.failUnlessEqual(urrm.render_time(None, None), "")
523 self.failUnlessEqual(urrm.render_time(None, 2.5), "2.50s")
524 self.failUnlessEqual(urrm.render_time(None, 0.25), "250ms")
525 self.failUnlessEqual(urrm.render_time(None, 0.0021), "2.1ms")
526 self.failUnlessEqual(urrm.render_time(None, 0.000123), "123us")
527 self.failUnlessEqual(urrm.render_rate(None, None), "")
528 self.failUnlessEqual(urrm.render_rate(None, 2500000), "2.50MBps")
529 self.failUnlessEqual(urrm.render_rate(None, 30100), "30.1kBps")
530 self.failUnlessEqual(urrm.render_rate(None, 123), "123Bps")
532 def test_GET_FILEURL(self):
533 d = self.GET(self.public_url + "/foo/bar.txt")
534 d.addCallback(self.failUnlessIsBarDotTxt)
537 def test_GET_FILEURL_range(self):
538 headers = {"range": "bytes=1-10"}
539 d = self.GET(self.public_url + "/foo/bar.txt", headers=headers,
540 return_response=True)
541 def _got((res, status, headers)):
542 self.failUnlessEqual(int(status), 206)
543 self.failUnless(headers.has_key("content-range"))
544 self.failUnlessEqual(headers["content-range"][0],
545 "bytes 1-10/%d" % len(self.BAR_CONTENTS))
546 self.failUnlessEqual(res, self.BAR_CONTENTS[1:11])
550 def test_GET_FILEURL_partial_range(self):
551 headers = {"range": "bytes=5-"}
552 length = len(self.BAR_CONTENTS)
553 d = self.GET(self.public_url + "/foo/bar.txt", headers=headers,
554 return_response=True)
555 def _got((res, status, headers)):
556 self.failUnlessEqual(int(status), 206)
557 self.failUnless(headers.has_key("content-range"))
558 self.failUnlessEqual(headers["content-range"][0],
559 "bytes 5-%d/%d" % (length-1, length))
560 self.failUnlessEqual(res, self.BAR_CONTENTS[5:])
564 def test_HEAD_FILEURL_range(self):
565 headers = {"range": "bytes=1-10"}
566 d = self.HEAD(self.public_url + "/foo/bar.txt", headers=headers,
567 return_response=True)
568 def _got((res, status, headers)):
569 self.failUnlessEqual(res, "")
570 self.failUnlessEqual(int(status), 206)
571 self.failUnless(headers.has_key("content-range"))
572 self.failUnlessEqual(headers["content-range"][0],
573 "bytes 1-10/%d" % len(self.BAR_CONTENTS))
577 def test_HEAD_FILEURL_partial_range(self):
578 headers = {"range": "bytes=5-"}
579 length = len(self.BAR_CONTENTS)
580 d = self.HEAD(self.public_url + "/foo/bar.txt", headers=headers,
581 return_response=True)
582 def _got((res, status, headers)):
583 self.failUnlessEqual(int(status), 206)
584 self.failUnless(headers.has_key("content-range"))
585 self.failUnlessEqual(headers["content-range"][0],
586 "bytes 5-%d/%d" % (length-1, length))
590 def test_GET_FILEURL_range_bad(self):
591 headers = {"range": "BOGUS=fizbop-quarnak"}
592 d = self.shouldFail2(error.Error, "test_GET_FILEURL_range_bad",
594 "Syntactically invalid http range header",
595 self.GET, self.public_url + "/foo/bar.txt",
599 def test_HEAD_FILEURL(self):
600 d = self.HEAD(self.public_url + "/foo/bar.txt", return_response=True)
601 def _got((res, status, headers)):
602 self.failUnlessEqual(res, "")
603 self.failUnlessEqual(headers["content-length"][0],
604 str(len(self.BAR_CONTENTS)))
605 self.failUnlessEqual(headers["content-type"], ["text/plain"])
609 def test_GET_FILEURL_named(self):
610 base = "/file/%s" % urllib.quote(self._bar_txt_uri)
611 base2 = "/named/%s" % urllib.quote(self._bar_txt_uri)
612 d = self.GET(base + "/@@name=/blah.txt")
613 d.addCallback(self.failUnlessIsBarDotTxt)
614 d.addCallback(lambda res: self.GET(base + "/blah.txt"))
615 d.addCallback(self.failUnlessIsBarDotTxt)
616 d.addCallback(lambda res: self.GET(base + "/ignore/lots/blah.txt"))
617 d.addCallback(self.failUnlessIsBarDotTxt)
618 d.addCallback(lambda res: self.GET(base2 + "/@@name=/blah.txt"))
619 d.addCallback(self.failUnlessIsBarDotTxt)
620 save_url = base + "?save=true&filename=blah.txt"
621 d.addCallback(lambda res: self.GET(save_url))
622 d.addCallback(self.failUnlessIsBarDotTxt) # TODO: check headers
623 u_filename = u"n\u00e9wer.txt" # n e-acute w e r . t x t
624 u_fn_e = urllib.quote(u_filename.encode("utf-8"))
625 u_url = base + "?save=true&filename=" + u_fn_e
626 d.addCallback(lambda res: self.GET(u_url))
627 d.addCallback(self.failUnlessIsBarDotTxt) # TODO: check headers
630 def test_PUT_FILEURL_named_bad(self):
631 base = "/file/%s" % urllib.quote(self._bar_txt_uri)
632 d = self.shouldFail2(error.Error, "test_PUT_FILEURL_named_bad",
634 "/file can only be used with GET or HEAD",
635 self.PUT, base + "/@@name=/blah.txt", "")
638 def test_GET_DIRURL_named_bad(self):
639 base = "/file/%s" % urllib.quote(self._foo_uri)
640 d = self.shouldFail2(error.Error, "test_PUT_DIRURL_named_bad",
643 self.GET, base + "/@@name=/blah.txt")
646 def test_GET_slash_file_bad(self):
647 d = self.shouldFail2(error.Error, "test_GET_slash_file_bad",
649 "/file must be followed by a file-cap and a name",
653 def test_GET_unhandled_URI_named(self):
654 contents, n, newuri = self.makefile(12)
655 verifier_cap = n.get_verify_cap().to_string()
656 base = "/file/%s" % urllib.quote(verifier_cap)
657 # client.create_node_from_uri() can't handle verify-caps
658 d = self.shouldFail2(error.Error, "GET_unhandled_URI_named",
660 "is not a valid file- or directory- cap",
664 def test_GET_unhandled_URI(self):
665 contents, n, newuri = self.makefile(12)
666 verifier_cap = n.get_verify_cap().to_string()
667 base = "/uri/%s" % urllib.quote(verifier_cap)
668 # client.create_node_from_uri() can't handle verify-caps
669 d = self.shouldFail2(error.Error, "test_GET_unhandled_URI",
671 "is not a valid file- or directory- cap",
675 def test_GET_FILE_URI(self):
676 base = "/uri/%s" % urllib.quote(self._bar_txt_uri)
678 d.addCallback(self.failUnlessIsBarDotTxt)
681 def test_GET_FILE_URI_badchild(self):
682 base = "/uri/%s/boguschild" % urllib.quote(self._bar_txt_uri)
683 errmsg = "Files have no children, certainly not named 'boguschild'"
684 d = self.shouldFail2(error.Error, "test_GET_FILE_URI_badchild",
685 "400 Bad Request", errmsg,
689 def test_PUT_FILE_URI_badchild(self):
690 base = "/uri/%s/boguschild" % urllib.quote(self._bar_txt_uri)
691 errmsg = "Cannot create directory 'boguschild', because its parent is a file, not a directory"
692 d = self.shouldFail2(error.Error, "test_GET_FILE_URI_badchild",
693 "400 Bad Request", errmsg,
697 def test_GET_FILEURL_save(self):
698 d = self.GET(self.public_url + "/foo/bar.txt?filename=bar.txt&save=true")
699 # TODO: look at the headers, expect a Content-Disposition: attachment
701 d.addCallback(self.failUnlessIsBarDotTxt)
704 def test_GET_FILEURL_missing(self):
705 d = self.GET(self.public_url + "/foo/missing")
706 d.addBoth(self.should404, "test_GET_FILEURL_missing")
709 def test_PUT_NEWFILEURL(self):
710 d = self.PUT(self.public_url + "/foo/new.txt", self.NEWFILE_CONTENTS)
711 # TODO: we lose the response code, so we can't check this
712 #self.failUnlessEqual(responsecode, 201)
713 d.addCallback(self.failUnlessURIMatchesChild, self._foo_node, u"new.txt")
714 d.addCallback(lambda res:
715 self.failUnlessChildContentsAre(self._foo_node, u"new.txt",
716 self.NEWFILE_CONTENTS))
719 def test_PUT_NEWFILEURL_not_mutable(self):
720 d = self.PUT(self.public_url + "/foo/new.txt?mutable=false",
721 self.NEWFILE_CONTENTS)
722 # TODO: we lose the response code, so we can't check this
723 #self.failUnlessEqual(responsecode, 201)
724 d.addCallback(self.failUnlessURIMatchesChild, self._foo_node, u"new.txt")
725 d.addCallback(lambda res:
726 self.failUnlessChildContentsAre(self._foo_node, u"new.txt",
727 self.NEWFILE_CONTENTS))
730 def test_PUT_NEWFILEURL_range_bad(self):
731 headers = {"content-range": "bytes 1-10/%d" % len(self.NEWFILE_CONTENTS)}
732 target = self.public_url + "/foo/new.txt"
733 d = self.shouldFail2(error.Error, "test_PUT_NEWFILEURL_range_bad",
734 "501 Not Implemented",
735 "Content-Range in PUT not yet supported",
736 # (and certainly not for immutable files)
737 self.PUT, target, self.NEWFILE_CONTENTS[1:11],
739 d.addCallback(lambda res:
740 self.failIfNodeHasChild(self._foo_node, u"new.txt"))
743 def test_PUT_NEWFILEURL_mutable(self):
744 d = self.PUT(self.public_url + "/foo/new.txt?mutable=true",
745 self.NEWFILE_CONTENTS)
746 # TODO: we lose the response code, so we can't check this
747 #self.failUnlessEqual(responsecode, 201)
749 u = uri.from_string_mutable_filenode(res)
750 self.failUnless(u.is_mutable())
751 self.failIf(u.is_readonly())
753 d.addCallback(_check_uri)
754 d.addCallback(self.failUnlessURIMatchesChild, self._foo_node, u"new.txt")
755 d.addCallback(lambda res:
756 self.failUnlessMutableChildContentsAre(self._foo_node,
758 self.NEWFILE_CONTENTS))
761 def test_PUT_NEWFILEURL_mutable_toobig(self):
762 d = self.shouldFail2(error.Error, "test_PUT_NEWFILEURL_mutable_toobig",
763 "413 Request Entity Too Large",
764 "SDMF is limited to one segment, and 10001 > 10000",
766 self.public_url + "/foo/new.txt?mutable=true",
767 "b" * (self.s.MUTABLE_SIZELIMIT+1))
770 def test_PUT_NEWFILEURL_replace(self):
771 d = self.PUT(self.public_url + "/foo/bar.txt", self.NEWFILE_CONTENTS)
772 # TODO: we lose the response code, so we can't check this
773 #self.failUnlessEqual(responsecode, 200)
774 d.addCallback(self.failUnlessURIMatchesChild, self._foo_node, u"bar.txt")
775 d.addCallback(lambda res:
776 self.failUnlessChildContentsAre(self._foo_node, u"bar.txt",
777 self.NEWFILE_CONTENTS))
780 def test_PUT_NEWFILEURL_bad_t(self):
781 d = self.shouldFail2(error.Error, "PUT_bad_t", "400 Bad Request",
782 "PUT to a file: bad t=bogus",
783 self.PUT, self.public_url + "/foo/bar.txt?t=bogus",
787 def test_PUT_NEWFILEURL_no_replace(self):
788 d = self.PUT(self.public_url + "/foo/bar.txt?replace=false",
789 self.NEWFILE_CONTENTS)
790 d.addBoth(self.shouldFail, error.Error, "PUT_NEWFILEURL_no_replace",
792 "There was already a child by that name, and you asked me "
796 def test_PUT_NEWFILEURL_mkdirs(self):
797 d = self.PUT(self.public_url + "/foo/newdir/new.txt", self.NEWFILE_CONTENTS)
799 d.addCallback(self.failUnlessURIMatchesChild, fn, u"newdir/new.txt")
800 d.addCallback(lambda res: self.failIfNodeHasChild(fn, u"new.txt"))
801 d.addCallback(lambda res: self.failUnlessNodeHasChild(fn, u"newdir"))
802 d.addCallback(lambda res:
803 self.failUnlessChildContentsAre(fn, u"newdir/new.txt",
804 self.NEWFILE_CONTENTS))
807 def test_PUT_NEWFILEURL_blocked(self):
808 d = self.PUT(self.public_url + "/foo/blockingfile/new.txt",
809 self.NEWFILE_CONTENTS)
810 d.addBoth(self.shouldFail, error.Error, "PUT_NEWFILEURL_blocked",
812 "Unable to create directory 'blockingfile': a file was in the way")
815 def test_DELETE_FILEURL(self):
816 d = self.DELETE(self.public_url + "/foo/bar.txt")
817 d.addCallback(lambda res:
818 self.failIfNodeHasChild(self._foo_node, u"bar.txt"))
821 def test_DELETE_FILEURL_missing(self):
822 d = self.DELETE(self.public_url + "/foo/missing")
823 d.addBoth(self.should404, "test_DELETE_FILEURL_missing")
826 def test_DELETE_FILEURL_missing2(self):
827 d = self.DELETE(self.public_url + "/missing/missing")
828 d.addBoth(self.should404, "test_DELETE_FILEURL_missing2")
831 def test_GET_FILEURL_json(self):
832 # twisted.web.http.parse_qs ignores any query args without an '=', so
833 # I can't do "GET /path?json", I have to do "GET /path/t=json"
834 # instead. This may make it tricky to emulate the S3 interface
836 d = self.GET(self.public_url + "/foo/bar.txt?t=json")
837 d.addCallback(self.failUnlessIsBarJSON)
840 def test_GET_FILEURL_json_missing(self):
841 d = self.GET(self.public_url + "/foo/missing?json")
842 d.addBoth(self.should404, "test_GET_FILEURL_json_missing")
845 def test_GET_FILEURL_uri(self):
846 d = self.GET(self.public_url + "/foo/bar.txt?t=uri")
848 self.failUnlessEqual(res, self._bar_txt_uri)
849 d.addCallback(_check)
850 d.addCallback(lambda res:
851 self.GET(self.public_url + "/foo/bar.txt?t=readonly-uri"))
853 # for now, for files, uris and readonly-uris are the same
854 self.failUnlessEqual(res, self._bar_txt_uri)
855 d.addCallback(_check2)
858 def test_GET_FILEURL_badtype(self):
859 d = self.shouldHTTPError("GET t=bogus", 400, "Bad Request",
862 self.public_url + "/foo/bar.txt?t=bogus")
865 def test_GET_FILEURL_uri_missing(self):
866 d = self.GET(self.public_url + "/foo/missing?t=uri")
867 d.addBoth(self.should404, "test_GET_FILEURL_uri_missing")
870 def test_GET_DIRURL(self):
871 # the addSlash means we get a redirect here
872 # from /uri/$URI/foo/ , we need ../../../ to get back to the root
874 d = self.GET(self.public_url + "/foo", followRedirect=True)
876 self.failUnless(('<a href="%s">Return to Welcome page' % ROOT)
878 # the FILE reference points to a URI, but it should end in bar.txt
879 bar_url = ("%s/file/%s/@@named=/bar.txt" %
880 (ROOT, urllib.quote(self._bar_txt_uri)))
881 get_bar = "".join([r'<td>FILE</td>',
883 r'<a href="%s">bar.txt</a>' % bar_url,
885 r'\s+<td>%d</td>' % len(self.BAR_CONTENTS),
887 self.failUnless(re.search(get_bar, res), res)
888 for line in res.split("\n"):
889 # find the line that contains the delete button for bar.txt
890 if ("form action" in line and
891 'value="delete"' in line and
892 'value="bar.txt"' in line):
893 # the form target should use a relative URL
894 foo_url = urllib.quote("%s/uri/%s/" % (ROOT, self._foo_uri))
895 self.failUnless(('action="%s"' % foo_url) in line, line)
896 # and the when_done= should too
897 #done_url = urllib.quote(???)
898 #self.failUnless(('name="when_done" value="%s"' % done_url)
902 self.fail("unable to find delete-bar.txt line", res)
904 # the DIR reference just points to a URI
905 sub_url = ("%s/uri/%s/" % (ROOT, urllib.quote(self._sub_uri)))
906 get_sub = ((r'<td>DIR</td>')
907 +r'\s+<td><a href="%s">sub</a></td>' % sub_url)
908 self.failUnless(re.search(get_sub, res), res)
909 d.addCallback(_check)
911 # look at a directory which is readonly
912 d.addCallback(lambda res:
913 self.GET(self.public_url + "/reedownlee", followRedirect=True))
915 self.failUnless("(read-only)" in res, res)
916 self.failIf("Upload a file" in res, res)
917 d.addCallback(_check2)
919 # and at a directory that contains a readonly directory
920 d.addCallback(lambda res:
921 self.GET(self.public_url, followRedirect=True))
923 self.failUnless(re.search('<td>DIR-RO</td>'
924 r'\s+<td><a href="[\.\/]+/uri/URI%3ADIR2-RO%3A[^"]+">reedownlee</a></td>', res), res)
925 d.addCallback(_check3)
927 # and an empty directory
928 d.addCallback(lambda res: self.GET(self.public_url + "/foo/empty/"))
930 self.failUnless("directory is empty" in res, res)
931 MKDIR_BUTTON_RE=re.compile('<input type="hidden" name="t" value="mkdir" />.*<legend class="freeform-form-label">Create a new directory in this directory</legend>.*<input type="submit" value="Create" />', re.I)
932 self.failUnless(MKDIR_BUTTON_RE.search(res), res)
933 d.addCallback(_check4)
937 def test_GET_DIRURL_badtype(self):
938 d = self.shouldHTTPError("test_GET_DIRURL_badtype",
942 self.public_url + "/foo?t=bogus")
945 def test_GET_DIRURL_json(self):
946 d = self.GET(self.public_url + "/foo?t=json")
947 d.addCallback(self.failUnlessIsFooJSON)
951 def test_POST_DIRURL_manifest_no_ophandle(self):
952 d = self.shouldFail2(error.Error,
953 "test_POST_DIRURL_manifest_no_ophandle",
955 "slow operation requires ophandle=",
956 self.POST, self.public_url, t="start-manifest")
959 def test_POST_DIRURL_manifest(self):
960 d = defer.succeed(None)
961 def getman(ignored, output):
962 d = self.POST(self.public_url + "/foo/?t=start-manifest&ophandle=125",
964 d.addCallback(self.wait_for_operation, "125")
965 d.addCallback(self.get_operation_results, "125", output)
967 d.addCallback(getman, None)
968 def _got_html(manifest):
969 self.failUnless("Manifest of SI=" in manifest)
970 self.failUnless("<td>sub</td>" in manifest)
971 self.failUnless(self._sub_uri in manifest)
972 self.failUnless("<td>sub/baz.txt</td>" in manifest)
973 d.addCallback(_got_html)
975 # both t=status and unadorned GET should be identical
976 d.addCallback(lambda res: self.GET("/operations/125"))
977 d.addCallback(_got_html)
979 d.addCallback(getman, "html")
980 d.addCallback(_got_html)
981 d.addCallback(getman, "text")
982 def _got_text(manifest):
983 self.failUnless("\nsub " + self._sub_uri + "\n" in manifest)
984 self.failUnless("\nsub/baz.txt URI:CHK:" in manifest)
985 d.addCallback(_got_text)
986 d.addCallback(getman, "JSON")
988 data = res["manifest"]
990 for (path_list, cap) in data:
991 got[tuple(path_list)] = cap
992 self.failUnlessEqual(got[(u"sub",)], self._sub_uri)
993 self.failUnless((u"sub",u"baz.txt") in got)
994 self.failUnless("finished" in res)
995 self.failUnless("origin" in res)
996 self.failUnless("storage-index" in res)
997 self.failUnless("verifycaps" in res)
998 self.failUnless("stats" in res)
999 d.addCallback(_got_json)
1002 def test_POST_DIRURL_deepsize_no_ophandle(self):
1003 d = self.shouldFail2(error.Error,
1004 "test_POST_DIRURL_deepsize_no_ophandle",
1006 "slow operation requires ophandle=",
1007 self.POST, self.public_url, t="start-deep-size")
1010 def test_POST_DIRURL_deepsize(self):
1011 d = self.POST(self.public_url + "/foo/?t=start-deep-size&ophandle=126",
1012 followRedirect=True)
1013 d.addCallback(self.wait_for_operation, "126")
1014 d.addCallback(self.get_operation_results, "126", "json")
1015 def _got_json(data):
1016 self.failUnlessEqual(data["finished"], True)
1018 self.failUnless(size > 1000)
1019 d.addCallback(_got_json)
1020 d.addCallback(self.get_operation_results, "126", "text")
1022 mo = re.search(r'^size: (\d+)$', res, re.M)
1023 self.failUnless(mo, res)
1024 size = int(mo.group(1))
1025 # with directories, the size varies.
1026 self.failUnless(size > 1000)
1027 d.addCallback(_got_text)
1030 def test_POST_DIRURL_deepstats_no_ophandle(self):
1031 d = self.shouldFail2(error.Error,
1032 "test_POST_DIRURL_deepstats_no_ophandle",
1034 "slow operation requires ophandle=",
1035 self.POST, self.public_url, t="start-deep-stats")
1038 def test_POST_DIRURL_deepstats(self):
1039 d = self.POST(self.public_url + "/foo/?t=start-deep-stats&ophandle=127",
1040 followRedirect=True)
1041 d.addCallback(self.wait_for_operation, "127")
1042 d.addCallback(self.get_operation_results, "127", "json")
1043 def _got_json(stats):
1044 expected = {"count-immutable-files": 3,
1045 "count-mutable-files": 0,
1046 "count-literal-files": 0,
1048 "count-directories": 3,
1049 "size-immutable-files": 57,
1050 "size-literal-files": 0,
1051 #"size-directories": 1912, # varies
1052 #"largest-directory": 1590,
1053 "largest-directory-children": 5,
1054 "largest-immutable-file": 19,
1056 for k,v in expected.iteritems():
1057 self.failUnlessEqual(stats[k], v,
1058 "stats[%s] was %s, not %s" %
1060 self.failUnlessEqual(stats["size-files-histogram"],
1062 d.addCallback(_got_json)
1065 def test_POST_DIRURL_stream_manifest(self):
1066 d = self.POST(self.public_url + "/foo/?t=stream-manifest")
1068 self.failUnless(res.endswith("\n"))
1069 units = [simplejson.loads(t) for t in res[:-1].split("\n")]
1070 self.failUnlessEqual(len(units), 7)
1071 self.failUnlessEqual(units[-1]["type"], "stats")
1073 self.failUnlessEqual(first["path"], [])
1074 self.failUnlessEqual(first["cap"], self._foo_uri)
1075 self.failUnlessEqual(first["type"], "directory")
1076 baz = [u for u in units[:-1] if u["cap"] == self._baz_file_uri][0]
1077 self.failUnlessEqual(baz["path"], ["sub", "baz.txt"])
1078 self.failIfEqual(baz["storage-index"], None)
1079 self.failIfEqual(baz["verifycap"], None)
1080 self.failIfEqual(baz["repaircap"], None)
1082 d.addCallback(_check)
1085 def test_GET_DIRURL_uri(self):
1086 d = self.GET(self.public_url + "/foo?t=uri")
1088 self.failUnlessEqual(res, self._foo_uri)
1089 d.addCallback(_check)
1092 def test_GET_DIRURL_readonly_uri(self):
1093 d = self.GET(self.public_url + "/foo?t=readonly-uri")
1095 self.failUnlessEqual(res, self._foo_readonly_uri)
1096 d.addCallback(_check)
1099 def test_PUT_NEWDIRURL(self):
1100 d = self.PUT(self.public_url + "/foo/newdir?t=mkdir", "")
1101 d.addCallback(lambda res:
1102 self.failUnlessNodeHasChild(self._foo_node, u"newdir"))
1103 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
1104 d.addCallback(self.failUnlessNodeKeysAre, [])
1107 def test_PUT_NEWDIRURL_exists(self):
1108 d = self.PUT(self.public_url + "/foo/sub?t=mkdir", "")
1109 d.addCallback(lambda res:
1110 self.failUnlessNodeHasChild(self._foo_node, u"sub"))
1111 d.addCallback(lambda res: self._foo_node.get(u"sub"))
1112 d.addCallback(self.failUnlessNodeKeysAre, [u"baz.txt"])
1115 def test_PUT_NEWDIRURL_blocked(self):
1116 d = self.shouldFail2(error.Error, "PUT_NEWDIRURL_blocked",
1117 "409 Conflict", "Unable to create directory 'bar.txt': a file was in the way",
1119 self.public_url + "/foo/bar.txt/sub?t=mkdir", "")
1120 d.addCallback(lambda res:
1121 self.failUnlessNodeHasChild(self._foo_node, u"sub"))
1122 d.addCallback(lambda res: self._foo_node.get(u"sub"))
1123 d.addCallback(self.failUnlessNodeKeysAre, [u"baz.txt"])
1126 def test_PUT_NEWDIRURL_mkdir_p(self):
1127 d = defer.succeed(None)
1128 d.addCallback(lambda res: self.POST(self.public_url + "/foo", t='mkdir', name='mkp'))
1129 d.addCallback(lambda res: self.failUnlessNodeHasChild(self._foo_node, u"mkp"))
1130 d.addCallback(lambda res: self._foo_node.get(u"mkp"))
1131 def mkdir_p(mkpnode):
1132 url = '/uri/%s?t=mkdir-p&path=/sub1/sub2' % urllib.quote(mkpnode.get_uri())
1134 def made_subsub(ssuri):
1135 d = self._foo_node.get_child_at_path(u"mkp/sub1/sub2")
1136 d.addCallback(lambda ssnode: self.failUnlessEqual(ssnode.get_uri(), ssuri))
1138 d.addCallback(lambda uri2: self.failUnlessEqual(uri2, ssuri))
1140 d.addCallback(made_subsub)
1142 d.addCallback(mkdir_p)
1145 def test_PUT_NEWDIRURL_mkdirs(self):
1146 d = self.PUT(self.public_url + "/foo/subdir/newdir?t=mkdir", "")
1147 d.addCallback(lambda res:
1148 self.failIfNodeHasChild(self._foo_node, u"newdir"))
1149 d.addCallback(lambda res:
1150 self.failUnlessNodeHasChild(self._foo_node, u"subdir"))
1151 d.addCallback(lambda res:
1152 self._foo_node.get_child_at_path(u"subdir/newdir"))
1153 d.addCallback(self.failUnlessNodeKeysAre, [])
1156 def test_DELETE_DIRURL(self):
1157 d = self.DELETE(self.public_url + "/foo")
1158 d.addCallback(lambda res:
1159 self.failIfNodeHasChild(self.public_root, u"foo"))
1162 def test_DELETE_DIRURL_missing(self):
1163 d = self.DELETE(self.public_url + "/foo/missing")
1164 d.addBoth(self.should404, "test_DELETE_DIRURL_missing")
1165 d.addCallback(lambda res:
1166 self.failUnlessNodeHasChild(self.public_root, u"foo"))
1169 def test_DELETE_DIRURL_missing2(self):
1170 d = self.DELETE(self.public_url + "/missing")
1171 d.addBoth(self.should404, "test_DELETE_DIRURL_missing2")
1174 def dump_root(self):
1176 w = webish.DirnodeWalkerMixin()
1177 def visitor(childpath, childnode, metadata):
1179 d = w.walk(self.public_root, visitor)
1182 def failUnlessNodeKeysAre(self, node, expected_keys):
1183 for k in expected_keys:
1184 assert isinstance(k, unicode)
1186 def _check(children):
1187 self.failUnlessEqual(sorted(children.keys()), sorted(expected_keys))
1188 d.addCallback(_check)
1190 def failUnlessNodeHasChild(self, node, name):
1191 assert isinstance(name, unicode)
1193 def _check(children):
1194 self.failUnless(name in children)
1195 d.addCallback(_check)
1197 def failIfNodeHasChild(self, node, name):
1198 assert isinstance(name, unicode)
1200 def _check(children):
1201 self.failIf(name in children)
1202 d.addCallback(_check)
1205 def failUnlessChildContentsAre(self, node, name, expected_contents):
1206 assert isinstance(name, unicode)
1207 d = node.get_child_at_path(name)
1208 d.addCallback(lambda node: node.download_to_data())
1209 def _check(contents):
1210 self.failUnlessEqual(contents, expected_contents)
1211 d.addCallback(_check)
1214 def failUnlessMutableChildContentsAre(self, node, name, expected_contents):
1215 assert isinstance(name, unicode)
1216 d = node.get_child_at_path(name)
1217 d.addCallback(lambda node: node.download_best_version())
1218 def _check(contents):
1219 self.failUnlessEqual(contents, expected_contents)
1220 d.addCallback(_check)
1223 def failUnlessChildURIIs(self, node, name, expected_uri):
1224 assert isinstance(name, unicode)
1225 d = node.get_child_at_path(name)
1227 self.failUnlessEqual(child.get_uri(), expected_uri.strip())
1228 d.addCallback(_check)
1231 def failUnlessURIMatchesChild(self, got_uri, node, name):
1232 assert isinstance(name, unicode)
1233 d = node.get_child_at_path(name)
1235 self.failUnlessEqual(got_uri.strip(), child.get_uri())
1236 d.addCallback(_check)
1239 def failUnlessCHKURIHasContents(self, got_uri, contents):
1240 self.failUnless(FakeCHKFileNode.all_contents[got_uri] == contents)
1242 def test_POST_upload(self):
1243 d = self.POST(self.public_url + "/foo", t="upload",
1244 file=("new.txt", self.NEWFILE_CONTENTS))
1246 d.addCallback(self.failUnlessURIMatchesChild, fn, u"new.txt")
1247 d.addCallback(lambda res:
1248 self.failUnlessChildContentsAre(fn, u"new.txt",
1249 self.NEWFILE_CONTENTS))
1252 def test_POST_upload_unicode(self):
1253 filename = u"n\u00e9wer.txt" # n e-acute w e r . t x t
1254 d = self.POST(self.public_url + "/foo", t="upload",
1255 file=(filename, self.NEWFILE_CONTENTS))
1257 d.addCallback(self.failUnlessURIMatchesChild, fn, filename)
1258 d.addCallback(lambda res:
1259 self.failUnlessChildContentsAre(fn, filename,
1260 self.NEWFILE_CONTENTS))
1261 target_url = self.public_url + "/foo/" + filename.encode("utf-8")
1262 d.addCallback(lambda res: self.GET(target_url))
1263 d.addCallback(lambda contents: self.failUnlessEqual(contents,
1264 self.NEWFILE_CONTENTS,
1268 def test_POST_upload_unicode_named(self):
1269 filename = u"n\u00e9wer.txt" # n e-acute w e r . t x t
1270 d = self.POST(self.public_url + "/foo", t="upload",
1272 file=("overridden", self.NEWFILE_CONTENTS))
1274 d.addCallback(self.failUnlessURIMatchesChild, fn, filename)
1275 d.addCallback(lambda res:
1276 self.failUnlessChildContentsAre(fn, filename,
1277 self.NEWFILE_CONTENTS))
1278 target_url = self.public_url + "/foo/" + filename.encode("utf-8")
1279 d.addCallback(lambda res: self.GET(target_url))
1280 d.addCallback(lambda contents: self.failUnlessEqual(contents,
1281 self.NEWFILE_CONTENTS,
1285 def test_POST_upload_no_link(self):
1286 d = self.POST("/uri", t="upload",
1287 file=("new.txt", self.NEWFILE_CONTENTS))
1288 def _check_upload_results(page):
1289 # this should be a page which describes the results of the upload
1290 # that just finished.
1291 self.failUnless("Upload Results:" in page)
1292 self.failUnless("URI:" in page)
1293 uri_re = re.compile("URI: <tt><span>(.*)</span>")
1294 mo = uri_re.search(page)
1295 self.failUnless(mo, page)
1296 new_uri = mo.group(1)
1298 d.addCallback(_check_upload_results)
1299 d.addCallback(self.failUnlessCHKURIHasContents, self.NEWFILE_CONTENTS)
1302 def test_POST_upload_no_link_whendone(self):
1303 d = self.POST("/uri", t="upload", when_done="/",
1304 file=("new.txt", self.NEWFILE_CONTENTS))
1305 d.addBoth(self.shouldRedirect, "/")
1308 def shouldRedirect2(self, which, checker, callable, *args, **kwargs):
1309 d = defer.maybeDeferred(callable, *args, **kwargs)
1311 if isinstance(res, failure.Failure):
1312 res.trap(error.PageRedirect)
1313 statuscode = res.value.status
1314 target = res.value.location
1315 return checker(statuscode, target)
1316 self.fail("%s: callable was supposed to redirect, not return '%s'"
1321 def test_POST_upload_no_link_whendone_results(self):
1322 def check(statuscode, target):
1323 self.failUnlessEqual(statuscode, str(http.FOUND))
1324 self.failUnless(target.startswith(self.webish_url), target)
1325 return client.getPage(target, method="GET")
1326 d = self.shouldRedirect2("test_POST_upload_no_link_whendone_results",
1328 self.POST, "/uri", t="upload",
1329 when_done="/uri/%(uri)s",
1330 file=("new.txt", self.NEWFILE_CONTENTS))
1331 d.addCallback(lambda res:
1332 self.failUnlessEqual(res, self.NEWFILE_CONTENTS))
1335 def test_POST_upload_no_link_mutable(self):
1336 d = self.POST("/uri", t="upload", mutable="true",
1337 file=("new.txt", self.NEWFILE_CONTENTS))
1338 def _check(new_uri):
1339 new_uri = new_uri.strip()
1340 self.new_uri = new_uri
1342 self.failUnless(IMutableFileURI.providedBy(u))
1343 self.failUnless(u.storage_index in FakeMutableFileNode.all_contents)
1344 n = self.s.create_node_from_uri(new_uri)
1345 return n.download_best_version()
1346 d.addCallback(_check)
1348 self.failUnlessEqual(data, self.NEWFILE_CONTENTS)
1349 return self.GET("/uri/%s" % urllib.quote(self.new_uri))
1350 d.addCallback(_check2)
1352 self.failUnlessEqual(data, self.NEWFILE_CONTENTS)
1353 return self.GET("/file/%s" % urllib.quote(self.new_uri))
1354 d.addCallback(_check3)
1356 self.failUnlessEqual(data, self.NEWFILE_CONTENTS)
1357 d.addCallback(_check4)
1360 def test_POST_upload_no_link_mutable_toobig(self):
1361 d = self.shouldFail2(error.Error,
1362 "test_POST_upload_no_link_mutable_toobig",
1363 "413 Request Entity Too Large",
1364 "SDMF is limited to one segment, and 10001 > 10000",
1366 "/uri", t="upload", mutable="true",
1368 "b" * (self.s.MUTABLE_SIZELIMIT+1)) )
1371 def test_POST_upload_mutable(self):
1372 # this creates a mutable file
1373 d = self.POST(self.public_url + "/foo", t="upload", mutable="true",
1374 file=("new.txt", self.NEWFILE_CONTENTS))
1376 d.addCallback(self.failUnlessURIMatchesChild, fn, u"new.txt")
1377 d.addCallback(lambda res:
1378 self.failUnlessMutableChildContentsAre(fn, u"new.txt",
1379 self.NEWFILE_CONTENTS))
1380 d.addCallback(lambda res: self._foo_node.get(u"new.txt"))
1382 self.failUnless(IMutableFileNode.providedBy(newnode))
1383 self.failUnless(newnode.is_mutable())
1384 self.failIf(newnode.is_readonly())
1385 self._mutable_node = newnode
1386 self._mutable_uri = newnode.get_uri()
1389 # now upload it again and make sure that the URI doesn't change
1390 NEWER_CONTENTS = self.NEWFILE_CONTENTS + "newer\n"
1391 d.addCallback(lambda res:
1392 self.POST(self.public_url + "/foo", t="upload",
1394 file=("new.txt", NEWER_CONTENTS)))
1395 d.addCallback(self.failUnlessURIMatchesChild, fn, u"new.txt")
1396 d.addCallback(lambda res:
1397 self.failUnlessMutableChildContentsAre(fn, u"new.txt",
1399 d.addCallback(lambda res: self._foo_node.get(u"new.txt"))
1401 self.failUnless(IMutableFileNode.providedBy(newnode))
1402 self.failUnless(newnode.is_mutable())
1403 self.failIf(newnode.is_readonly())
1404 self.failUnlessEqual(self._mutable_uri, newnode.get_uri())
1405 d.addCallback(_got2)
1407 # upload a second time, using PUT instead of POST
1408 NEW2_CONTENTS = NEWER_CONTENTS + "overwrite with PUT\n"
1409 d.addCallback(lambda res:
1410 self.PUT(self.public_url + "/foo/new.txt", NEW2_CONTENTS))
1411 d.addCallback(self.failUnlessURIMatchesChild, fn, u"new.txt")
1412 d.addCallback(lambda res:
1413 self.failUnlessMutableChildContentsAre(fn, u"new.txt",
1416 # finally list the directory, since mutable files are displayed
1417 # slightly differently
1419 d.addCallback(lambda res:
1420 self.GET(self.public_url + "/foo/",
1421 followRedirect=True))
1422 def _check_page(res):
1423 # TODO: assert more about the contents
1424 self.failUnless("SSK" in res)
1426 d.addCallback(_check_page)
1428 d.addCallback(lambda res: self._foo_node.get(u"new.txt"))
1430 self.failUnless(IMutableFileNode.providedBy(newnode))
1431 self.failUnless(newnode.is_mutable())
1432 self.failIf(newnode.is_readonly())
1433 self.failUnlessEqual(self._mutable_uri, newnode.get_uri())
1434 d.addCallback(_got3)
1436 # look at the JSON form of the enclosing directory
1437 d.addCallback(lambda res:
1438 self.GET(self.public_url + "/foo/?t=json",
1439 followRedirect=True))
1440 def _check_page_json(res):
1441 parsed = simplejson.loads(res)
1442 self.failUnlessEqual(parsed[0], "dirnode")
1443 children = dict( [(unicode(name),value)
1445 in parsed[1]["children"].iteritems()] )
1446 self.failUnless("new.txt" in children)
1447 new_json = children["new.txt"]
1448 self.failUnlessEqual(new_json[0], "filenode")
1449 self.failUnless(new_json[1]["mutable"])
1450 self.failUnlessEqual(new_json[1]["rw_uri"], self._mutable_uri)
1451 ro_uri = unicode(self._mutable_node.get_readonly().to_string())
1452 self.failUnlessEqual(new_json[1]["ro_uri"], ro_uri)
1453 d.addCallback(_check_page_json)
1455 # and the JSON form of the file
1456 d.addCallback(lambda res:
1457 self.GET(self.public_url + "/foo/new.txt?t=json"))
1458 def _check_file_json(res):
1459 parsed = simplejson.loads(res)
1460 self.failUnlessEqual(parsed[0], "filenode")
1461 self.failUnless(parsed[1]["mutable"])
1462 self.failUnlessEqual(parsed[1]["rw_uri"], self._mutable_uri)
1463 ro_uri = unicode(self._mutable_node.get_readonly().to_string())
1464 self.failUnlessEqual(parsed[1]["ro_uri"], ro_uri)
1465 d.addCallback(_check_file_json)
1467 # and look at t=uri and t=readonly-uri
1468 d.addCallback(lambda res:
1469 self.GET(self.public_url + "/foo/new.txt?t=uri"))
1470 d.addCallback(lambda res: self.failUnlessEqual(res, self._mutable_uri))
1471 d.addCallback(lambda res:
1472 self.GET(self.public_url + "/foo/new.txt?t=readonly-uri"))
1473 def _check_ro_uri(res):
1474 ro_uri = unicode(self._mutable_node.get_readonly().to_string())
1475 self.failUnlessEqual(res, ro_uri)
1476 d.addCallback(_check_ro_uri)
1478 # make sure we can get to it from /uri/URI
1479 d.addCallback(lambda res:
1480 self.GET("/uri/%s" % urllib.quote(self._mutable_uri)))
1481 d.addCallback(lambda res:
1482 self.failUnlessEqual(res, NEW2_CONTENTS))
1484 # and that HEAD computes the size correctly
1485 d.addCallback(lambda res:
1486 self.HEAD(self.public_url + "/foo/new.txt",
1487 return_response=True))
1488 def _got_headers((res, status, headers)):
1489 self.failUnlessEqual(res, "")
1490 self.failUnlessEqual(headers["content-length"][0],
1491 str(len(NEW2_CONTENTS)))
1492 self.failUnlessEqual(headers["content-type"], ["text/plain"])
1493 d.addCallback(_got_headers)
1495 # make sure that size errors are displayed correctly for overwrite
1496 d.addCallback(lambda res:
1497 self.shouldFail2(error.Error,
1498 "test_POST_upload_mutable-toobig",
1499 "413 Request Entity Too Large",
1500 "SDMF is limited to one segment, and 10001 > 10000",
1502 self.public_url + "/foo", t="upload",
1505 "b" * (self.s.MUTABLE_SIZELIMIT+1)),
1508 d.addErrback(self.dump_error)
1511 def test_POST_upload_mutable_toobig(self):
1512 d = self.shouldFail2(error.Error,
1513 "test_POST_upload_no_link_mutable_toobig",
1514 "413 Request Entity Too Large",
1515 "SDMF is limited to one segment, and 10001 > 10000",
1517 self.public_url + "/foo",
1518 t="upload", mutable="true",
1520 "b" * (self.s.MUTABLE_SIZELIMIT+1)) )
1523 def dump_error(self, f):
1524 # if the web server returns an error code (like 400 Bad Request),
1525 # web.client.getPage puts the HTTP response body into the .response
1526 # attribute of the exception object that it gives back. It does not
1527 # appear in the Failure's repr(), so the ERROR that trial displays
1528 # will be rather terse and unhelpful. addErrback this method to the
1529 # end of your chain to get more information out of these errors.
1530 if f.check(error.Error):
1531 print "web.error.Error:"
1533 print f.value.response
1536 def test_POST_upload_replace(self):
1537 d = self.POST(self.public_url + "/foo", t="upload",
1538 file=("bar.txt", self.NEWFILE_CONTENTS))
1540 d.addCallback(self.failUnlessURIMatchesChild, fn, u"bar.txt")
1541 d.addCallback(lambda res:
1542 self.failUnlessChildContentsAre(fn, u"bar.txt",
1543 self.NEWFILE_CONTENTS))
1546 def test_POST_upload_no_replace_ok(self):
1547 d = self.POST(self.public_url + "/foo?replace=false", t="upload",
1548 file=("new.txt", self.NEWFILE_CONTENTS))
1549 d.addCallback(lambda res: self.GET(self.public_url + "/foo/new.txt"))
1550 d.addCallback(lambda res: self.failUnlessEqual(res,
1551 self.NEWFILE_CONTENTS))
1554 def test_POST_upload_no_replace_queryarg(self):
1555 d = self.POST(self.public_url + "/foo?replace=false", t="upload",
1556 file=("bar.txt", self.NEWFILE_CONTENTS))
1557 d.addBoth(self.shouldFail, error.Error,
1558 "POST_upload_no_replace_queryarg",
1560 "There was already a child by that name, and you asked me "
1561 "to not replace it")
1562 d.addCallback(lambda res: self.GET(self.public_url + "/foo/bar.txt"))
1563 d.addCallback(self.failUnlessIsBarDotTxt)
1566 def test_POST_upload_no_replace_field(self):
1567 d = self.POST(self.public_url + "/foo", t="upload", replace="false",
1568 file=("bar.txt", self.NEWFILE_CONTENTS))
1569 d.addBoth(self.shouldFail, error.Error, "POST_upload_no_replace_field",
1571 "There was already a child by that name, and you asked me "
1572 "to not replace it")
1573 d.addCallback(lambda res: self.GET(self.public_url + "/foo/bar.txt"))
1574 d.addCallback(self.failUnlessIsBarDotTxt)
1577 def test_POST_upload_whendone(self):
1578 d = self.POST(self.public_url + "/foo", t="upload", when_done="/THERE",
1579 file=("new.txt", self.NEWFILE_CONTENTS))
1580 d.addBoth(self.shouldRedirect, "/THERE")
1582 d.addCallback(lambda res:
1583 self.failUnlessChildContentsAre(fn, u"new.txt",
1584 self.NEWFILE_CONTENTS))
1587 def test_POST_upload_named(self):
1589 d = self.POST(self.public_url + "/foo", t="upload",
1590 name="new.txt", file=self.NEWFILE_CONTENTS)
1591 d.addCallback(self.failUnlessURIMatchesChild, fn, u"new.txt")
1592 d.addCallback(lambda res:
1593 self.failUnlessChildContentsAre(fn, u"new.txt",
1594 self.NEWFILE_CONTENTS))
1597 def test_POST_upload_named_badfilename(self):
1598 d = self.POST(self.public_url + "/foo", t="upload",
1599 name="slashes/are/bad.txt", file=self.NEWFILE_CONTENTS)
1600 d.addBoth(self.shouldFail, error.Error,
1601 "test_POST_upload_named_badfilename",
1603 "name= may not contain a slash",
1605 # make sure that nothing was added
1606 d.addCallback(lambda res:
1607 self.failUnlessNodeKeysAre(self._foo_node,
1608 [u"bar.txt", u"blockingfile",
1609 u"empty", u"n\u00fc.txt",
1613 def test_POST_FILEURL_check(self):
1614 bar_url = self.public_url + "/foo/bar.txt"
1615 d = self.POST(bar_url, t="check")
1617 self.failUnless("Healthy :" in res)
1618 d.addCallback(_check)
1619 redir_url = "http://allmydata.org/TARGET"
1620 def _check2(statuscode, target):
1621 self.failUnlessEqual(statuscode, str(http.FOUND))
1622 self.failUnlessEqual(target, redir_url)
1623 d.addCallback(lambda res:
1624 self.shouldRedirect2("test_POST_FILEURL_check",
1628 when_done=redir_url))
1629 d.addCallback(lambda res:
1630 self.POST(bar_url, t="check", return_to=redir_url))
1632 self.failUnless("Healthy :" in res)
1633 self.failUnless("Return to file" in res)
1634 self.failUnless(redir_url in res)
1635 d.addCallback(_check3)
1637 d.addCallback(lambda res:
1638 self.POST(bar_url, t="check", output="JSON"))
1639 def _check_json(res):
1640 data = simplejson.loads(res)
1641 self.failUnless("storage-index" in data)
1642 self.failUnless(data["results"]["healthy"])
1643 d.addCallback(_check_json)
1647 def test_POST_FILEURL_check_and_repair(self):
1648 bar_url = self.public_url + "/foo/bar.txt"
1649 d = self.POST(bar_url, t="check", repair="true")
1651 self.failUnless("Healthy :" in res)
1652 d.addCallback(_check)
1653 redir_url = "http://allmydata.org/TARGET"
1654 def _check2(statuscode, target):
1655 self.failUnlessEqual(statuscode, str(http.FOUND))
1656 self.failUnlessEqual(target, redir_url)
1657 d.addCallback(lambda res:
1658 self.shouldRedirect2("test_POST_FILEURL_check_and_repair",
1661 t="check", repair="true",
1662 when_done=redir_url))
1663 d.addCallback(lambda res:
1664 self.POST(bar_url, t="check", return_to=redir_url))
1666 self.failUnless("Healthy :" in res)
1667 self.failUnless("Return to file" in res)
1668 self.failUnless(redir_url in res)
1669 d.addCallback(_check3)
1672 def test_POST_DIRURL_check(self):
1673 foo_url = self.public_url + "/foo/"
1674 d = self.POST(foo_url, t="check")
1676 self.failUnless("Healthy :" in res, res)
1677 d.addCallback(_check)
1678 redir_url = "http://allmydata.org/TARGET"
1679 def _check2(statuscode, target):
1680 self.failUnlessEqual(statuscode, str(http.FOUND))
1681 self.failUnlessEqual(target, redir_url)
1682 d.addCallback(lambda res:
1683 self.shouldRedirect2("test_POST_DIRURL_check",
1687 when_done=redir_url))
1688 d.addCallback(lambda res:
1689 self.POST(foo_url, t="check", return_to=redir_url))
1691 self.failUnless("Healthy :" in res, res)
1692 self.failUnless("Return to file/directory" in res)
1693 self.failUnless(redir_url in res)
1694 d.addCallback(_check3)
1696 d.addCallback(lambda res:
1697 self.POST(foo_url, t="check", output="JSON"))
1698 def _check_json(res):
1699 data = simplejson.loads(res)
1700 self.failUnless("storage-index" in data)
1701 self.failUnless(data["results"]["healthy"])
1702 d.addCallback(_check_json)
1706 def test_POST_DIRURL_check_and_repair(self):
1707 foo_url = self.public_url + "/foo/"
1708 d = self.POST(foo_url, t="check", repair="true")
1710 self.failUnless("Healthy :" in res, res)
1711 d.addCallback(_check)
1712 redir_url = "http://allmydata.org/TARGET"
1713 def _check2(statuscode, target):
1714 self.failUnlessEqual(statuscode, str(http.FOUND))
1715 self.failUnlessEqual(target, redir_url)
1716 d.addCallback(lambda res:
1717 self.shouldRedirect2("test_POST_DIRURL_check_and_repair",
1720 t="check", repair="true",
1721 when_done=redir_url))
1722 d.addCallback(lambda res:
1723 self.POST(foo_url, t="check", return_to=redir_url))
1725 self.failUnless("Healthy :" in res)
1726 self.failUnless("Return to file/directory" in res)
1727 self.failUnless(redir_url in res)
1728 d.addCallback(_check3)
1731 def wait_for_operation(self, ignored, ophandle):
1732 url = "/operations/" + ophandle
1733 url += "?t=status&output=JSON"
1736 data = simplejson.loads(res)
1737 if not data["finished"]:
1738 d = self.stall(delay=1.0)
1739 d.addCallback(self.wait_for_operation, ophandle)
1745 def get_operation_results(self, ignored, ophandle, output=None):
1746 url = "/operations/" + ophandle
1749 url += "&output=" + output
1752 if output and output.lower() == "json":
1753 return simplejson.loads(res)
1758 def test_POST_DIRURL_deepcheck_no_ophandle(self):
1759 d = self.shouldFail2(error.Error,
1760 "test_POST_DIRURL_deepcheck_no_ophandle",
1762 "slow operation requires ophandle=",
1763 self.POST, self.public_url, t="start-deep-check")
1766 def test_POST_DIRURL_deepcheck(self):
1767 def _check_redirect(statuscode, target):
1768 self.failUnlessEqual(statuscode, str(http.FOUND))
1769 self.failUnless(target.endswith("/operations/123"))
1770 d = self.shouldRedirect2("test_POST_DIRURL_deepcheck", _check_redirect,
1771 self.POST, self.public_url,
1772 t="start-deep-check", ophandle="123")
1773 d.addCallback(self.wait_for_operation, "123")
1774 def _check_json(data):
1775 self.failUnlessEqual(data["finished"], True)
1776 self.failUnlessEqual(data["count-objects-checked"], 8)
1777 self.failUnlessEqual(data["count-objects-healthy"], 8)
1778 d.addCallback(_check_json)
1779 d.addCallback(self.get_operation_results, "123", "html")
1780 def _check_html(res):
1781 self.failUnless("Objects Checked: <span>8</span>" in res)
1782 self.failUnless("Objects Healthy: <span>8</span>" in res)
1783 d.addCallback(_check_html)
1785 d.addCallback(lambda res:
1786 self.GET("/operations/123/"))
1787 d.addCallback(_check_html) # should be the same as without the slash
1789 d.addCallback(lambda res:
1790 self.shouldFail2(error.Error, "one", "404 Not Found",
1791 "No detailed results for SI bogus",
1792 self.GET, "/operations/123/bogus"))
1794 foo_si = self._foo_node.get_storage_index()
1795 foo_si_s = base32.b2a(foo_si)
1796 d.addCallback(lambda res:
1797 self.GET("/operations/123/%s?output=JSON" % foo_si_s))
1798 def _check_foo_json(res):
1799 data = simplejson.loads(res)
1800 self.failUnlessEqual(data["storage-index"], foo_si_s)
1801 self.failUnless(data["results"]["healthy"])
1802 d.addCallback(_check_foo_json)
1805 def test_POST_DIRURL_deepcheck_and_repair(self):
1806 d = self.POST(self.public_url, t="start-deep-check", repair="true",
1807 ophandle="124", output="json", followRedirect=True)
1808 d.addCallback(self.wait_for_operation, "124")
1809 def _check_json(data):
1810 self.failUnlessEqual(data["finished"], True)
1811 self.failUnlessEqual(data["count-objects-checked"], 8)
1812 self.failUnlessEqual(data["count-objects-healthy-pre-repair"], 8)
1813 self.failUnlessEqual(data["count-objects-unhealthy-pre-repair"], 0)
1814 self.failUnlessEqual(data["count-corrupt-shares-pre-repair"], 0)
1815 self.failUnlessEqual(data["count-repairs-attempted"], 0)
1816 self.failUnlessEqual(data["count-repairs-successful"], 0)
1817 self.failUnlessEqual(data["count-repairs-unsuccessful"], 0)
1818 self.failUnlessEqual(data["count-objects-healthy-post-repair"], 8)
1819 self.failUnlessEqual(data["count-objects-unhealthy-post-repair"], 0)
1820 self.failUnlessEqual(data["count-corrupt-shares-post-repair"], 0)
1821 d.addCallback(_check_json)
1822 d.addCallback(self.get_operation_results, "124", "html")
1823 def _check_html(res):
1824 self.failUnless("Objects Checked: <span>8</span>" in res)
1826 self.failUnless("Objects Healthy (before repair): <span>8</span>" in res)
1827 self.failUnless("Objects Unhealthy (before repair): <span>0</span>" in res)
1828 self.failUnless("Corrupt Shares (before repair): <span>0</span>" in res)
1830 self.failUnless("Repairs Attempted: <span>0</span>" in res)
1831 self.failUnless("Repairs Successful: <span>0</span>" in res)
1832 self.failUnless("Repairs Unsuccessful: <span>0</span>" in res)
1834 self.failUnless("Objects Healthy (after repair): <span>8</span>" in res)
1835 self.failUnless("Objects Unhealthy (after repair): <span>0</span>" in res)
1836 self.failUnless("Corrupt Shares (after repair): <span>0</span>" in res)
1837 d.addCallback(_check_html)
1840 def test_POST_FILEURL_bad_t(self):
1841 d = self.shouldFail2(error.Error, "POST_bad_t", "400 Bad Request",
1842 "POST to file: bad t=bogus",
1843 self.POST, self.public_url + "/foo/bar.txt",
1847 def test_POST_mkdir(self): # return value?
1848 d = self.POST(self.public_url + "/foo", t="mkdir", name="newdir")
1849 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
1850 d.addCallback(self.failUnlessNodeKeysAre, [])
1853 def test_POST_mkdir_2(self):
1854 d = self.POST(self.public_url + "/foo/newdir?t=mkdir", "")
1855 d.addCallback(lambda res:
1856 self.failUnlessNodeHasChild(self._foo_node, u"newdir"))
1857 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
1858 d.addCallback(self.failUnlessNodeKeysAre, [])
1861 def test_POST_mkdirs_2(self):
1862 d = self.POST(self.public_url + "/foo/bardir/newdir?t=mkdir", "")
1863 d.addCallback(lambda res:
1864 self.failUnlessNodeHasChild(self._foo_node, u"bardir"))
1865 d.addCallback(lambda res: self._foo_node.get(u"bardir"))
1866 d.addCallback(lambda bardirnode: bardirnode.get(u"newdir"))
1867 d.addCallback(self.failUnlessNodeKeysAre, [])
1870 def test_POST_mkdir_no_parentdir_noredirect(self):
1871 d = self.POST("/uri?t=mkdir")
1872 def _after_mkdir(res):
1873 uri.NewDirectoryURI.init_from_string(res)
1874 d.addCallback(_after_mkdir)
1877 def test_POST_mkdir_no_parentdir_redirect(self):
1878 d = self.POST("/uri?t=mkdir&redirect_to_result=true")
1879 d.addBoth(self.shouldRedirect, None, statuscode='303')
1880 def _check_target(target):
1881 target = urllib.unquote(target)
1882 self.failUnless(target.startswith("uri/URI:DIR2:"), target)
1883 d.addCallback(_check_target)
1886 def test_POST_noparent_bad(self):
1887 d = self.shouldHTTPError("POST /uri?t=bogus", 400, "Bad Request",
1888 "/uri accepts only PUT, PUT?t=mkdir, "
1889 "POST?t=upload, and POST?t=mkdir",
1890 self.POST, "/uri?t=bogus")
1893 def test_welcome_page_mkdir_button(self):
1894 # Fetch the welcome page.
1896 def _after_get_welcome_page(res):
1897 MKDIR_BUTTON_RE=re.compile('<form action="([^"]*)" method="post".*?<input type="hidden" name="t" value="([^"]*)" /><input type="hidden" name="([^"]*)" value="([^"]*)" /><input type="submit" value="Create a directory" />', re.I)
1898 mo = MKDIR_BUTTON_RE.search(res)
1899 formaction = mo.group(1)
1901 formaname = mo.group(3)
1902 formavalue = mo.group(4)
1903 return (formaction, formt, formaname, formavalue)
1904 d.addCallback(_after_get_welcome_page)
1905 def _after_parse_form(res):
1906 (formaction, formt, formaname, formavalue) = res
1907 return self.POST("/%s?t=%s&%s=%s" % (formaction, formt, formaname, formavalue))
1908 d.addCallback(_after_parse_form)
1909 d.addBoth(self.shouldRedirect, None, statuscode='303')
1912 def test_POST_mkdir_replace(self): # return value?
1913 d = self.POST(self.public_url + "/foo", t="mkdir", name="sub")
1914 d.addCallback(lambda res: self._foo_node.get(u"sub"))
1915 d.addCallback(self.failUnlessNodeKeysAre, [])
1918 def test_POST_mkdir_no_replace_queryarg(self): # return value?
1919 d = self.POST(self.public_url + "/foo?replace=false", t="mkdir", name="sub")
1920 d.addBoth(self.shouldFail, error.Error,
1921 "POST_mkdir_no_replace_queryarg",
1923 "There was already a child by that name, and you asked me "
1924 "to not replace it")
1925 d.addCallback(lambda res: self._foo_node.get(u"sub"))
1926 d.addCallback(self.failUnlessNodeKeysAre, [u"baz.txt"])
1929 def test_POST_mkdir_no_replace_field(self): # return value?
1930 d = self.POST(self.public_url + "/foo", t="mkdir", name="sub",
1932 d.addBoth(self.shouldFail, error.Error, "POST_mkdir_no_replace_field",
1934 "There was already a child by that name, and you asked me "
1935 "to not replace it")
1936 d.addCallback(lambda res: self._foo_node.get(u"sub"))
1937 d.addCallback(self.failUnlessNodeKeysAre, [u"baz.txt"])
1940 def test_POST_mkdir_whendone_field(self):
1941 d = self.POST(self.public_url + "/foo",
1942 t="mkdir", name="newdir", when_done="/THERE")
1943 d.addBoth(self.shouldRedirect, "/THERE")
1944 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
1945 d.addCallback(self.failUnlessNodeKeysAre, [])
1948 def test_POST_mkdir_whendone_queryarg(self):
1949 d = self.POST(self.public_url + "/foo?when_done=/THERE",
1950 t="mkdir", name="newdir")
1951 d.addBoth(self.shouldRedirect, "/THERE")
1952 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
1953 d.addCallback(self.failUnlessNodeKeysAre, [])
1956 def test_POST_bad_t(self):
1957 d = self.shouldFail2(error.Error, "POST_bad_t", "400 Bad Request",
1958 "POST to a directory with bad t=BOGUS",
1959 self.POST, self.public_url + "/foo", t="BOGUS")
1962 def test_POST_set_children(self):
1963 contents9, n9, newuri9 = self.makefile(9)
1964 contents10, n10, newuri10 = self.makefile(10)
1965 contents11, n11, newuri11 = self.makefile(11)
1968 "atomic_added_1": [ "filenode", { "rw_uri": "%s",
1971 "ctime": 1002777696.7564139,
1972 "mtime": 1002777696.7564139
1975 "atomic_added_2": [ "filenode", { "rw_uri": "%s",
1978 "ctime": 1002777696.7564139,
1979 "mtime": 1002777696.7564139
1982 "atomic_added_3": [ "filenode", { "rw_uri": "%s",
1985 "ctime": 1002777696.7564139,
1986 "mtime": 1002777696.7564139
1989 }""" % (newuri9, newuri10, newuri11)
1991 url = self.webish_url + self.public_url + "/foo" + "?t=set_children"
1993 d = client.getPage(url, method="POST", postdata=reqbody)
1995 self.failUnlessURIMatchesChild(newuri9, self._foo_node, u"atomic_added_1")
1996 self.failUnlessURIMatchesChild(newuri10, self._foo_node, u"atomic_added_2")
1997 self.failUnlessURIMatchesChild(newuri11, self._foo_node, u"atomic_added_3")
1999 d.addCallback(_then)
2000 d.addErrback(self.dump_error)
2003 def test_POST_put_uri(self):
2004 contents, n, newuri = self.makefile(8)
2005 d = self.POST(self.public_url + "/foo", t="uri", name="new.txt", uri=newuri)
2006 d.addCallback(self.failUnlessURIMatchesChild, self._foo_node, u"new.txt")
2007 d.addCallback(lambda res:
2008 self.failUnlessChildContentsAre(self._foo_node, u"new.txt",
2012 def test_POST_put_uri_replace(self):
2013 contents, n, newuri = self.makefile(8)
2014 d = self.POST(self.public_url + "/foo", t="uri", name="bar.txt", uri=newuri)
2015 d.addCallback(self.failUnlessURIMatchesChild, self._foo_node, u"bar.txt")
2016 d.addCallback(lambda res:
2017 self.failUnlessChildContentsAre(self._foo_node, u"bar.txt",
2021 def test_POST_put_uri_no_replace_queryarg(self):
2022 contents, n, newuri = self.makefile(8)
2023 d = self.POST(self.public_url + "/foo?replace=false", t="uri",
2024 name="bar.txt", uri=newuri)
2025 d.addBoth(self.shouldFail, error.Error,
2026 "POST_put_uri_no_replace_queryarg",
2028 "There was already a child by that name, and you asked me "
2029 "to not replace it")
2030 d.addCallback(lambda res: self.GET(self.public_url + "/foo/bar.txt"))
2031 d.addCallback(self.failUnlessIsBarDotTxt)
2034 def test_POST_put_uri_no_replace_field(self):
2035 contents, n, newuri = self.makefile(8)
2036 d = self.POST(self.public_url + "/foo", t="uri", replace="false",
2037 name="bar.txt", uri=newuri)
2038 d.addBoth(self.shouldFail, error.Error,
2039 "POST_put_uri_no_replace_field",
2041 "There was already a child by that name, and you asked me "
2042 "to not replace it")
2043 d.addCallback(lambda res: self.GET(self.public_url + "/foo/bar.txt"))
2044 d.addCallback(self.failUnlessIsBarDotTxt)
2047 def test_POST_delete(self):
2048 d = self.POST(self.public_url + "/foo", t="delete", name="bar.txt")
2049 d.addCallback(lambda res: self._foo_node.list())
2050 def _check(children):
2051 self.failIf(u"bar.txt" in children)
2052 d.addCallback(_check)
2055 def test_POST_rename_file(self):
2056 d = self.POST(self.public_url + "/foo", t="rename",
2057 from_name="bar.txt", to_name='wibble.txt')
2058 d.addCallback(lambda res:
2059 self.failIfNodeHasChild(self._foo_node, u"bar.txt"))
2060 d.addCallback(lambda res:
2061 self.failUnlessNodeHasChild(self._foo_node, u"wibble.txt"))
2062 d.addCallback(lambda res: self.GET(self.public_url + "/foo/wibble.txt"))
2063 d.addCallback(self.failUnlessIsBarDotTxt)
2064 d.addCallback(lambda res: self.GET(self.public_url + "/foo/wibble.txt?t=json"))
2065 d.addCallback(self.failUnlessIsBarJSON)
2068 def test_POST_rename_file_redundant(self):
2069 d = self.POST(self.public_url + "/foo", t="rename",
2070 from_name="bar.txt", to_name='bar.txt')
2071 d.addCallback(lambda res:
2072 self.failUnlessNodeHasChild(self._foo_node, u"bar.txt"))
2073 d.addCallback(lambda res: self.GET(self.public_url + "/foo/bar.txt"))
2074 d.addCallback(self.failUnlessIsBarDotTxt)
2075 d.addCallback(lambda res: self.GET(self.public_url + "/foo/bar.txt?t=json"))
2076 d.addCallback(self.failUnlessIsBarJSON)
2079 def test_POST_rename_file_replace(self):
2080 # rename a file and replace a directory with it
2081 d = self.POST(self.public_url + "/foo", t="rename",
2082 from_name="bar.txt", to_name='empty')
2083 d.addCallback(lambda res:
2084 self.failIfNodeHasChild(self._foo_node, u"bar.txt"))
2085 d.addCallback(lambda res:
2086 self.failUnlessNodeHasChild(self._foo_node, u"empty"))
2087 d.addCallback(lambda res: self.GET(self.public_url + "/foo/empty"))
2088 d.addCallback(self.failUnlessIsBarDotTxt)
2089 d.addCallback(lambda res: self.GET(self.public_url + "/foo/empty?t=json"))
2090 d.addCallback(self.failUnlessIsBarJSON)
2093 def test_POST_rename_file_no_replace_queryarg(self):
2094 # rename a file and replace a directory with it
2095 d = self.POST(self.public_url + "/foo?replace=false", t="rename",
2096 from_name="bar.txt", to_name='empty')
2097 d.addBoth(self.shouldFail, error.Error,
2098 "POST_rename_file_no_replace_queryarg",
2100 "There was already a child by that name, and you asked me "
2101 "to not replace it")
2102 d.addCallback(lambda res: self.GET(self.public_url + "/foo/empty?t=json"))
2103 d.addCallback(self.failUnlessIsEmptyJSON)
2106 def test_POST_rename_file_no_replace_field(self):
2107 # rename a file and replace a directory with it
2108 d = self.POST(self.public_url + "/foo", t="rename", replace="false",
2109 from_name="bar.txt", to_name='empty')
2110 d.addBoth(self.shouldFail, error.Error,
2111 "POST_rename_file_no_replace_field",
2113 "There was already a child by that name, and you asked me "
2114 "to not replace it")
2115 d.addCallback(lambda res: self.GET(self.public_url + "/foo/empty?t=json"))
2116 d.addCallback(self.failUnlessIsEmptyJSON)
2119 def failUnlessIsEmptyJSON(self, res):
2120 data = simplejson.loads(res)
2121 self.failUnlessEqual(data[0], "dirnode", data)
2122 self.failUnlessEqual(len(data[1]["children"]), 0)
2124 def test_POST_rename_file_slash_fail(self):
2125 d = self.POST(self.public_url + "/foo", t="rename",
2126 from_name="bar.txt", to_name='kirk/spock.txt')
2127 d.addBoth(self.shouldFail, error.Error,
2128 "test_POST_rename_file_slash_fail",
2130 "to_name= may not contain a slash",
2132 d.addCallback(lambda res:
2133 self.failUnlessNodeHasChild(self._foo_node, u"bar.txt"))
2136 def test_POST_rename_dir(self):
2137 d = self.POST(self.public_url, t="rename",
2138 from_name="foo", to_name='plunk')
2139 d.addCallback(lambda res:
2140 self.failIfNodeHasChild(self.public_root, u"foo"))
2141 d.addCallback(lambda res:
2142 self.failUnlessNodeHasChild(self.public_root, u"plunk"))
2143 d.addCallback(lambda res: self.GET(self.public_url + "/plunk?t=json"))
2144 d.addCallback(self.failUnlessIsFooJSON)
2147 def shouldRedirect(self, res, target=None, statuscode=None, which=""):
2148 """ If target is not None then the redirection has to go to target. If
2149 statuscode is not None then the redirection has to be accomplished with
2150 that HTTP status code."""
2151 if not isinstance(res, failure.Failure):
2152 to_where = (target is None) and "somewhere" or ("to " + target)
2153 self.fail("%s: we were expecting to get redirected %s, not get an"
2154 " actual page: %s" % (which, to_where, res))
2155 res.trap(error.PageRedirect)
2156 if statuscode is not None:
2157 self.failUnlessEqual(res.value.status, statuscode,
2158 "%s: not a redirect" % which)
2159 if target is not None:
2160 # the PageRedirect does not seem to capture the uri= query arg
2161 # properly, so we can't check for it.
2162 realtarget = self.webish_url + target
2163 self.failUnlessEqual(res.value.location, realtarget,
2164 "%s: wrong target" % which)
2165 return res.value.location
2167 def test_GET_URI_form(self):
2168 base = "/uri?uri=%s" % self._bar_txt_uri
2169 # this is supposed to give us a redirect to /uri/$URI, plus arguments
2170 targetbase = "/uri/%s" % urllib.quote(self._bar_txt_uri)
2172 d.addBoth(self.shouldRedirect, targetbase)
2173 d.addCallback(lambda res: self.GET(base+"&filename=bar.txt"))
2174 d.addBoth(self.shouldRedirect, targetbase+"?filename=bar.txt")
2175 d.addCallback(lambda res: self.GET(base+"&t=json"))
2176 d.addBoth(self.shouldRedirect, targetbase+"?t=json")
2177 d.addCallback(self.log, "about to get file by uri")
2178 d.addCallback(lambda res: self.GET(base, followRedirect=True))
2179 d.addCallback(self.failUnlessIsBarDotTxt)
2180 d.addCallback(self.log, "got file by uri, about to get dir by uri")
2181 d.addCallback(lambda res: self.GET("/uri?uri=%s&t=json" % self._foo_uri,
2182 followRedirect=True))
2183 d.addCallback(self.failUnlessIsFooJSON)
2184 d.addCallback(self.log, "got dir by uri")
2188 def test_GET_URI_form_bad(self):
2189 d = self.shouldFail2(error.Error, "test_GET_URI_form_bad",
2190 "400 Bad Request", "GET /uri requires uri=",
2194 def test_GET_rename_form(self):
2195 d = self.GET(self.public_url + "/foo?t=rename-form&name=bar.txt",
2196 followRedirect=True)
2198 self.failUnless('name="when_done" value="."' in res, res)
2199 self.failUnless(re.search(r'name="from_name" value="bar\.txt"', res))
2200 d.addCallback(_check)
2203 def log(self, res, msg):
2204 #print "MSG: %s RES: %s" % (msg, res)
2208 def test_GET_URI_URL(self):
2209 base = "/uri/%s" % self._bar_txt_uri
2211 d.addCallback(self.failUnlessIsBarDotTxt)
2212 d.addCallback(lambda res: self.GET(base+"?filename=bar.txt"))
2213 d.addCallback(self.failUnlessIsBarDotTxt)
2214 d.addCallback(lambda res: self.GET(base+"?filename=bar.txt&save=true"))
2215 d.addCallback(self.failUnlessIsBarDotTxt)
2218 def test_GET_URI_URL_dir(self):
2219 base = "/uri/%s?t=json" % self._foo_uri
2221 d.addCallback(self.failUnlessIsFooJSON)
2224 def test_GET_URI_URL_missing(self):
2225 base = "/uri/%s" % self._bad_file_uri
2226 d = self.shouldHTTPError("test_GET_URI_URL_missing",
2227 http.GONE, None, "NotEnoughSharesError",
2229 # TODO: how can we exercise both sides of WebDownloadTarget.fail
2230 # here? we must arrange for a download to fail after target.open()
2231 # has been called, and then inspect the response to see that it is
2232 # shorter than we expected.
2235 def test_PUT_DIRURL_uri(self):
2236 d = self.s.create_empty_dirnode()
2238 new_uri = dn.get_uri()
2239 # replace /foo with a new (empty) directory
2240 d = self.PUT(self.public_url + "/foo?t=uri", new_uri)
2241 d.addCallback(lambda res:
2242 self.failUnlessEqual(res.strip(), new_uri))
2243 d.addCallback(lambda res:
2244 self.failUnlessChildURIIs(self.public_root,
2248 d.addCallback(_made_dir)
2251 def test_PUT_DIRURL_uri_noreplace(self):
2252 d = self.s.create_empty_dirnode()
2254 new_uri = dn.get_uri()
2255 # replace /foo with a new (empty) directory, but ask that
2256 # replace=false, so it should fail
2257 d = self.shouldFail2(error.Error, "test_PUT_DIRURL_uri_noreplace",
2258 "409 Conflict", "There was already a child by that name, and you asked me to not replace it",
2260 self.public_url + "/foo?t=uri&replace=false",
2262 d.addCallback(lambda res:
2263 self.failUnlessChildURIIs(self.public_root,
2267 d.addCallback(_made_dir)
2270 def test_PUT_DIRURL_bad_t(self):
2271 d = self.shouldFail2(error.Error, "test_PUT_DIRURL_bad_t",
2272 "400 Bad Request", "PUT to a directory",
2273 self.PUT, self.public_url + "/foo?t=BOGUS", "")
2274 d.addCallback(lambda res:
2275 self.failUnlessChildURIIs(self.public_root,
2280 def test_PUT_NEWFILEURL_uri(self):
2281 contents, n, new_uri = self.makefile(8)
2282 d = self.PUT(self.public_url + "/foo/new.txt?t=uri", new_uri)
2283 d.addCallback(lambda res: self.failUnlessEqual(res.strip(), new_uri))
2284 d.addCallback(lambda res:
2285 self.failUnlessChildContentsAre(self._foo_node, u"new.txt",
2289 def test_PUT_NEWFILEURL_uri_replace(self):
2290 contents, n, new_uri = self.makefile(8)
2291 d = self.PUT(self.public_url + "/foo/bar.txt?t=uri", new_uri)
2292 d.addCallback(lambda res: self.failUnlessEqual(res.strip(), new_uri))
2293 d.addCallback(lambda res:
2294 self.failUnlessChildContentsAre(self._foo_node, u"bar.txt",
2298 def test_PUT_NEWFILEURL_uri_no_replace(self):
2299 contents, n, new_uri = self.makefile(8)
2300 d = self.PUT(self.public_url + "/foo/bar.txt?t=uri&replace=false", new_uri)
2301 d.addBoth(self.shouldFail, error.Error, "PUT_NEWFILEURL_uri_no_replace",
2303 "There was already a child by that name, and you asked me "
2304 "to not replace it")
2307 def test_PUT_NEWFILE_URI(self):
2308 file_contents = "New file contents here\n"
2309 d = self.PUT("/uri", file_contents)
2311 assert isinstance(uri, str), uri
2312 self.failUnless(uri in FakeCHKFileNode.all_contents)
2313 self.failUnlessEqual(FakeCHKFileNode.all_contents[uri],
2315 return self.GET("/uri/%s" % uri)
2316 d.addCallback(_check)
2318 self.failUnlessEqual(res, file_contents)
2319 d.addCallback(_check2)
2322 def test_PUT_NEWFILE_URI_not_mutable(self):
2323 file_contents = "New file contents here\n"
2324 d = self.PUT("/uri?mutable=false", file_contents)
2326 assert isinstance(uri, str), uri
2327 self.failUnless(uri in FakeCHKFileNode.all_contents)
2328 self.failUnlessEqual(FakeCHKFileNode.all_contents[uri],
2330 return self.GET("/uri/%s" % uri)
2331 d.addCallback(_check)
2333 self.failUnlessEqual(res, file_contents)
2334 d.addCallback(_check2)
2337 def test_PUT_NEWFILE_URI_only_PUT(self):
2338 d = self.PUT("/uri?t=bogus", "")
2339 d.addBoth(self.shouldFail, error.Error,
2340 "PUT_NEWFILE_URI_only_PUT",
2342 "/uri accepts only PUT, PUT?t=mkdir, POST?t=upload, and POST?t=mkdir")
2345 def test_PUT_NEWFILE_URI_mutable(self):
2346 file_contents = "New file contents here\n"
2347 d = self.PUT("/uri?mutable=true", file_contents)
2348 def _check_mutable(uri):
2351 self.failUnless(IMutableFileURI.providedBy(u))
2352 self.failUnless(u.storage_index in FakeMutableFileNode.all_contents)
2353 n = self.s.create_node_from_uri(uri)
2354 return n.download_best_version()
2355 d.addCallback(_check_mutable)
2356 def _check2_mutable(data):
2357 self.failUnlessEqual(data, file_contents)
2358 d.addCallback(_check2_mutable)
2362 self.failUnless(uri.to_string() in FakeCHKFileNode.all_contents)
2363 self.failUnlessEqual(FakeCHKFileNode.all_contents[uri.to_string()],
2365 return self.GET("/uri/%s" % uri)
2366 d.addCallback(_check)
2368 self.failUnlessEqual(res, file_contents)
2369 d.addCallback(_check2)
2372 def test_PUT_mkdir(self):
2373 d = self.PUT("/uri?t=mkdir", "")
2375 n = self.s.create_node_from_uri(uri.strip())
2376 d2 = self.failUnlessNodeKeysAre(n, [])
2377 d2.addCallback(lambda res:
2378 self.GET("/uri/%s?t=json" % uri))
2380 d.addCallback(_check)
2381 d.addCallback(self.failUnlessIsEmptyJSON)
2384 def test_POST_check(self):
2385 d = self.POST(self.public_url + "/foo", t="check", name="bar.txt")
2387 # this returns a string form of the results, which are probably
2388 # None since we're using fake filenodes.
2389 # TODO: verify that the check actually happened, by changing
2390 # FakeCHKFileNode to count how many times .check() has been
2393 d.addCallback(_done)
2396 def test_bad_method(self):
2397 url = self.webish_url + self.public_url + "/foo/bar.txt"
2398 d = self.shouldHTTPError("test_bad_method",
2399 501, "Not Implemented",
2400 "I don't know how to treat a BOGUS request.",
2401 client.getPage, url, method="BOGUS")
2404 def test_short_url(self):
2405 url = self.webish_url + "/uri"
2406 d = self.shouldHTTPError("test_short_url", 501, "Not Implemented",
2407 "I don't know how to treat a DELETE request.",
2408 client.getPage, url, method="DELETE")
2411 def test_ophandle_bad(self):
2412 url = self.webish_url + "/operations/bogus?t=status"
2413 d = self.shouldHTTPError("test_ophandle_bad", 404, "404 Not Found",
2414 "unknown/expired handle 'bogus'",
2415 client.getPage, url)
2418 def test_ophandle_cancel(self):
2419 d = self.POST(self.public_url + "/foo/?t=start-manifest&ophandle=128",
2420 followRedirect=True)
2421 d.addCallback(lambda ignored:
2422 self.GET("/operations/128?t=status&output=JSON"))
2424 data = simplejson.loads(res)
2425 self.failUnless("finished" in data, res)
2426 monitor = self.ws.root.child_operations.handles["128"][0]
2427 d = self.POST("/operations/128?t=cancel&output=JSON")
2429 data = simplejson.loads(res)
2430 self.failUnless("finished" in data, res)
2431 # t=cancel causes the handle to be forgotten
2432 self.failUnless(monitor.is_cancelled())
2433 d.addCallback(_check2)
2435 d.addCallback(_check1)
2436 d.addCallback(lambda ignored:
2437 self.shouldHTTPError("test_ophandle_cancel",
2438 404, "404 Not Found",
2439 "unknown/expired handle '128'",
2441 "/operations/128?t=status&output=JSON"))
2444 def test_ophandle_retainfor(self):
2445 d = self.POST(self.public_url + "/foo/?t=start-manifest&ophandle=129&retain-for=60",
2446 followRedirect=True)
2447 d.addCallback(lambda ignored:
2448 self.GET("/operations/129?t=status&output=JSON&retain-for=0"))
2450 data = simplejson.loads(res)
2451 self.failUnless("finished" in data, res)
2452 d.addCallback(_check1)
2453 # the retain-for=0 will cause the handle to be expired very soon
2454 d.addCallback(self.stall, 2.0)
2455 d.addCallback(lambda ignored:
2456 self.shouldHTTPError("test_ophandle_retainfor",
2457 404, "404 Not Found",
2458 "unknown/expired handle '129'",
2460 "/operations/129?t=status&output=JSON"))
2463 def test_ophandle_release_after_complete(self):
2464 d = self.POST(self.public_url + "/foo/?t=start-manifest&ophandle=130",
2465 followRedirect=True)
2466 d.addCallback(self.wait_for_operation, "130")
2467 d.addCallback(lambda ignored:
2468 self.GET("/operations/130?t=status&output=JSON&release-after-complete=true"))
2469 # the release-after-complete=true will cause the handle to be expired
2470 d.addCallback(lambda ignored:
2471 self.shouldHTTPError("test_ophandle_release_after_complete",
2472 404, "404 Not Found",
2473 "unknown/expired handle '130'",
2475 "/operations/130?t=status&output=JSON"))
2478 def test_incident(self):
2479 d = self.POST("/report_incident", details="eek")
2481 self.failUnless("Thank you for your report!" in res, res)
2482 d.addCallback(_done)
2485 def test_static(self):
2486 webdir = os.path.join(self.staticdir, "subdir")
2487 fileutil.make_dirs(webdir)
2488 f = open(os.path.join(webdir, "hello.txt"), "wb")
2492 d = self.GET("/static/subdir/hello.txt")
2494 self.failUnlessEqual(res, "hello")
2495 d.addCallback(_check)
2499 class Util(unittest.TestCase):
2500 def test_abbreviate_time(self):
2501 self.failUnlessEqual(common.abbreviate_time(None), "")
2502 self.failUnlessEqual(common.abbreviate_time(1.234), "1.23s")
2503 self.failUnlessEqual(common.abbreviate_time(0.123), "123ms")
2504 self.failUnlessEqual(common.abbreviate_time(0.00123), "1.2ms")
2505 self.failUnlessEqual(common.abbreviate_time(0.000123), "123us")
2507 def test_abbreviate_rate(self):
2508 self.failUnlessEqual(common.abbreviate_rate(None), "")
2509 self.failUnlessEqual(common.abbreviate_rate(1234000), "1.23MBps")
2510 self.failUnlessEqual(common.abbreviate_rate(12340), "12.3kBps")
2511 self.failUnlessEqual(common.abbreviate_rate(123), "123Bps")
2513 def test_abbreviate_size(self):
2514 self.failUnlessEqual(common.abbreviate_size(None), "")
2515 self.failUnlessEqual(common.abbreviate_size(1.23*1000*1000*1000), "1.23GB")
2516 self.failUnlessEqual(common.abbreviate_size(1.23*1000*1000), "1.23MB")
2517 self.failUnlessEqual(common.abbreviate_size(1230), "1.2kB")
2518 self.failUnlessEqual(common.abbreviate_size(123), "123B")
2520 def test_plural(self):
2522 return "%d second%s" % (s, status.plural(s))
2523 self.failUnlessEqual(convert(0), "0 seconds")
2524 self.failUnlessEqual(convert(1), "1 second")
2525 self.failUnlessEqual(convert(2), "2 seconds")
2527 return "has share%s: %s" % (status.plural(s), ",".join(s))
2528 self.failUnlessEqual(convert2([]), "has shares: ")
2529 self.failUnlessEqual(convert2(["1"]), "has share: 1")
2530 self.failUnlessEqual(convert2(["1","2"]), "has shares: 1,2")
2533 class Grid(GridTestMixin, WebErrorMixin, unittest.TestCase, ShouldFailMixin):
2535 def CHECK(self, ign, which, args, clientnum=0):
2536 fileurl = self.fileurls[which]
2537 url = fileurl + "?" + args
2538 return self.GET(url, method="POST", clientnum=clientnum)
2540 def test_filecheck(self):
2541 self.basedir = "web/Grid/filecheck"
2543 c0 = self.g.clients[0]
2546 d = c0.upload(upload.Data(DATA, convergence=""))
2547 def _stash_uri(ur, which):
2548 self.uris[which] = ur.uri
2549 d.addCallback(_stash_uri, "good")
2550 d.addCallback(lambda ign:
2551 c0.upload(upload.Data(DATA+"1", convergence="")))
2552 d.addCallback(_stash_uri, "sick")
2553 d.addCallback(lambda ign:
2554 c0.upload(upload.Data(DATA+"2", convergence="")))
2555 d.addCallback(_stash_uri, "dead")
2556 def _stash_mutable_uri(n, which):
2557 self.uris[which] = n.get_uri()
2558 assert isinstance(self.uris[which], str)
2559 d.addCallback(lambda ign: c0.create_mutable_file(DATA+"3"))
2560 d.addCallback(_stash_mutable_uri, "corrupt")
2561 d.addCallback(lambda ign:
2562 c0.upload(upload.Data("literal", convergence="")))
2563 d.addCallback(_stash_uri, "small")
2565 def _compute_fileurls(ignored):
2567 for which in self.uris:
2568 self.fileurls[which] = "uri/" + urllib.quote(self.uris[which])
2569 d.addCallback(_compute_fileurls)
2571 def _clobber_shares(ignored):
2572 good_shares = self.find_shares(self.uris["good"])
2573 self.failUnlessEqual(len(good_shares), 10)
2574 sick_shares = self.find_shares(self.uris["sick"])
2575 os.unlink(sick_shares[0][2])
2576 dead_shares = self.find_shares(self.uris["dead"])
2577 for i in range(1, 10):
2578 os.unlink(dead_shares[i][2])
2579 c_shares = self.find_shares(self.uris["corrupt"])
2580 cso = CorruptShareOptions()
2581 cso.stdout = StringIO()
2582 cso.parseOptions([c_shares[0][2]])
2584 d.addCallback(_clobber_shares)
2586 d.addCallback(self.CHECK, "good", "t=check")
2587 def _got_html_good(res):
2588 self.failUnless("Healthy" in res, res)
2589 self.failIf("Not Healthy" in res, res)
2590 d.addCallback(_got_html_good)
2591 d.addCallback(self.CHECK, "good", "t=check&return_to=somewhere")
2592 def _got_html_good_return_to(res):
2593 self.failUnless("Healthy" in res, res)
2594 self.failIf("Not Healthy" in res, res)
2595 self.failUnless('<a href="somewhere">Return to file'
2597 d.addCallback(_got_html_good_return_to)
2598 d.addCallback(self.CHECK, "good", "t=check&output=json")
2599 def _got_json_good(res):
2600 r = simplejson.loads(res)
2601 self.failUnlessEqual(r["summary"], "Healthy")
2602 self.failUnless(r["results"]["healthy"])
2603 self.failIf(r["results"]["needs-rebalancing"])
2604 self.failUnless(r["results"]["recoverable"])
2605 d.addCallback(_got_json_good)
2607 d.addCallback(self.CHECK, "small", "t=check")
2608 def _got_html_small(res):
2609 self.failUnless("Literal files are always healthy" in res, res)
2610 self.failIf("Not Healthy" in res, res)
2611 d.addCallback(_got_html_small)
2612 d.addCallback(self.CHECK, "small", "t=check&return_to=somewhere")
2613 def _got_html_small_return_to(res):
2614 self.failUnless("Literal files are always healthy" in res, res)
2615 self.failIf("Not Healthy" in res, res)
2616 self.failUnless('<a href="somewhere">Return to file'
2618 d.addCallback(_got_html_small_return_to)
2619 d.addCallback(self.CHECK, "small", "t=check&output=json")
2620 def _got_json_small(res):
2621 r = simplejson.loads(res)
2622 self.failUnlessEqual(r["storage-index"], "")
2623 self.failUnless(r["results"]["healthy"])
2624 d.addCallback(_got_json_small)
2626 d.addCallback(self.CHECK, "sick", "t=check")
2627 def _got_html_sick(res):
2628 self.failUnless("Not Healthy" in res, res)
2629 d.addCallback(_got_html_sick)
2630 d.addCallback(self.CHECK, "sick", "t=check&output=json")
2631 def _got_json_sick(res):
2632 r = simplejson.loads(res)
2633 self.failUnlessEqual(r["summary"],
2634 "Not Healthy: 9 shares (enc 3-of-10)")
2635 self.failIf(r["results"]["healthy"])
2636 self.failIf(r["results"]["needs-rebalancing"])
2637 self.failUnless(r["results"]["recoverable"])
2638 d.addCallback(_got_json_sick)
2640 d.addCallback(self.CHECK, "dead", "t=check")
2641 def _got_html_dead(res):
2642 self.failUnless("Not Healthy" in res, res)
2643 d.addCallback(_got_html_dead)
2644 d.addCallback(self.CHECK, "dead", "t=check&output=json")
2645 def _got_json_dead(res):
2646 r = simplejson.loads(res)
2647 self.failUnlessEqual(r["summary"],
2648 "Not Healthy: 1 shares (enc 3-of-10)")
2649 self.failIf(r["results"]["healthy"])
2650 self.failIf(r["results"]["needs-rebalancing"])
2651 self.failIf(r["results"]["recoverable"])
2652 d.addCallback(_got_json_dead)
2654 d.addCallback(self.CHECK, "corrupt", "t=check&verify=true")
2655 def _got_html_corrupt(res):
2656 self.failUnless("Not Healthy! : Unhealthy" in res, res)
2657 d.addCallback(_got_html_corrupt)
2658 d.addCallback(self.CHECK, "corrupt", "t=check&verify=true&output=json")
2659 def _got_json_corrupt(res):
2660 r = simplejson.loads(res)
2661 self.failUnless("Unhealthy: 9 shares (enc 3-of-10)" in r["summary"],
2663 self.failIf(r["results"]["healthy"])
2664 self.failUnless(r["results"]["recoverable"])
2665 self.failUnlessEqual(r["results"]["count-shares-good"], 9)
2666 self.failUnlessEqual(r["results"]["count-corrupt-shares"], 1)
2667 d.addCallback(_got_json_corrupt)
2669 d.addErrback(self.explain_web_error)
2672 def test_repair_html(self):
2673 self.basedir = "web/Grid/repair_html"
2675 c0 = self.g.clients[0]
2678 d = c0.upload(upload.Data(DATA, convergence=""))
2679 def _stash_uri(ur, which):
2680 self.uris[which] = ur.uri
2681 d.addCallback(_stash_uri, "good")
2682 d.addCallback(lambda ign:
2683 c0.upload(upload.Data(DATA+"1", convergence="")))
2684 d.addCallback(_stash_uri, "sick")
2685 d.addCallback(lambda ign:
2686 c0.upload(upload.Data(DATA+"2", convergence="")))
2687 d.addCallback(_stash_uri, "dead")
2688 def _stash_mutable_uri(n, which):
2689 self.uris[which] = n.get_uri()
2690 assert isinstance(self.uris[which], str)
2691 d.addCallback(lambda ign: c0.create_mutable_file(DATA+"3"))
2692 d.addCallback(_stash_mutable_uri, "corrupt")
2694 def _compute_fileurls(ignored):
2696 for which in self.uris:
2697 self.fileurls[which] = "uri/" + urllib.quote(self.uris[which])
2698 d.addCallback(_compute_fileurls)
2700 def _clobber_shares(ignored):
2701 good_shares = self.find_shares(self.uris["good"])
2702 self.failUnlessEqual(len(good_shares), 10)
2703 sick_shares = self.find_shares(self.uris["sick"])
2704 os.unlink(sick_shares[0][2])
2705 dead_shares = self.find_shares(self.uris["dead"])
2706 for i in range(1, 10):
2707 os.unlink(dead_shares[i][2])
2708 c_shares = self.find_shares(self.uris["corrupt"])
2709 cso = CorruptShareOptions()
2710 cso.stdout = StringIO()
2711 cso.parseOptions([c_shares[0][2]])
2713 d.addCallback(_clobber_shares)
2715 d.addCallback(self.CHECK, "good", "t=check&repair=true")
2716 def _got_html_good(res):
2717 self.failUnless("Healthy" in res, res)
2718 self.failIf("Not Healthy" in res, res)
2719 self.failUnless("No repair necessary" in res, res)
2720 d.addCallback(_got_html_good)
2722 d.addCallback(self.CHECK, "sick", "t=check&repair=true")
2723 def _got_html_sick(res):
2724 self.failUnless("Healthy : healthy" in res, res)
2725 self.failIf("Not Healthy" in res, res)
2726 self.failUnless("Repair successful" in res, res)
2727 d.addCallback(_got_html_sick)
2729 # repair of a dead file will fail, of course, but it isn't yet
2730 # clear how this should be reported. Right now it shows up as
2733 #d.addCallback(self.CHECK, "dead", "t=check&repair=true")
2734 #def _got_html_dead(res):
2736 # self.failUnless("Healthy : healthy" in res, res)
2737 # self.failIf("Not Healthy" in res, res)
2738 # self.failUnless("No repair necessary" in res, res)
2739 #d.addCallback(_got_html_dead)
2741 d.addCallback(self.CHECK, "corrupt", "t=check&verify=true&repair=true")
2742 def _got_html_corrupt(res):
2743 self.failUnless("Healthy : Healthy" in res, res)
2744 self.failIf("Not Healthy" in res, res)
2745 self.failUnless("Repair successful" in res, res)
2746 d.addCallback(_got_html_corrupt)
2748 d.addErrback(self.explain_web_error)
2751 def test_repair_json(self):
2752 self.basedir = "web/Grid/repair_json"
2754 c0 = self.g.clients[0]
2757 d = c0.upload(upload.Data(DATA+"1", convergence=""))
2758 def _stash_uri(ur, which):
2759 self.uris[which] = ur.uri
2760 d.addCallback(_stash_uri, "sick")
2762 def _compute_fileurls(ignored):
2764 for which in self.uris:
2765 self.fileurls[which] = "uri/" + urllib.quote(self.uris[which])
2766 d.addCallback(_compute_fileurls)
2768 def _clobber_shares(ignored):
2769 sick_shares = self.find_shares(self.uris["sick"])
2770 os.unlink(sick_shares[0][2])
2771 d.addCallback(_clobber_shares)
2773 d.addCallback(self.CHECK, "sick", "t=check&repair=true&output=json")
2774 def _got_json_sick(res):
2775 r = simplejson.loads(res)
2776 self.failUnlessEqual(r["repair-attempted"], True)
2777 self.failUnlessEqual(r["repair-successful"], True)
2778 self.failUnlessEqual(r["pre-repair-results"]["summary"],
2779 "Not Healthy: 9 shares (enc 3-of-10)")
2780 self.failIf(r["pre-repair-results"]["results"]["healthy"])
2781 self.failUnlessEqual(r["post-repair-results"]["summary"], "healthy")
2782 self.failUnless(r["post-repair-results"]["results"]["healthy"])
2783 d.addCallback(_got_json_sick)
2785 d.addErrback(self.explain_web_error)
2788 def test_deep_check(self):
2789 self.basedir = "web/Grid/deep_check"
2791 c0 = self.g.clients[0]
2795 d = c0.create_empty_dirnode()
2796 def _stash_root_and_create_file(n):
2798 self.fileurls["root"] = "uri/" + urllib.quote(n.get_uri()) + "/"
2799 return n.add_file(u"good", upload.Data(DATA, convergence=""))
2800 d.addCallback(_stash_root_and_create_file)
2801 def _stash_uri(fn, which):
2802 self.uris[which] = fn.get_uri()
2804 d.addCallback(_stash_uri, "good")
2805 d.addCallback(lambda ign:
2806 self.rootnode.add_file(u"small",
2807 upload.Data("literal",
2809 d.addCallback(_stash_uri, "small")
2810 d.addCallback(lambda ign:
2811 self.rootnode.add_file(u"sick",
2812 upload.Data(DATA+"1",
2814 d.addCallback(_stash_uri, "sick")
2816 def _clobber_shares(ignored):
2817 self.delete_shares_numbered(self.uris["sick"], [0,1])
2818 d.addCallback(_clobber_shares)
2825 d.addCallback(self.CHECK, "root", "t=stream-deep-check")
2827 units = [simplejson.loads(line)
2828 for line in res.splitlines()
2830 self.failUnlessEqual(len(units), 4+1)
2831 # should be parent-first
2833 self.failUnlessEqual(u0["path"], [])
2834 self.failUnlessEqual(u0["type"], "directory")
2835 self.failUnlessEqual(u0["cap"], self.rootnode.get_uri())
2836 u0cr = u0["check-results"]
2837 self.failUnlessEqual(u0cr["results"]["count-shares-good"], 10)
2839 ugood = [u for u in units
2840 if u["type"] == "file" and u["path"] == [u"good"]][0]
2841 self.failUnlessEqual(ugood["cap"], self.uris["good"])
2842 ugoodcr = ugood["check-results"]
2843 self.failUnlessEqual(ugoodcr["results"]["count-shares-good"], 10)
2846 self.failUnlessEqual(stats["type"], "stats")
2848 self.failUnlessEqual(s["count-immutable-files"], 2)
2849 self.failUnlessEqual(s["count-literal-files"], 1)
2850 self.failUnlessEqual(s["count-directories"], 1)
2851 d.addCallback(_done)
2853 # now add root/subdir and root/subdir/grandchild, then make subdir
2854 # unrecoverable, then see what happens
2856 d.addCallback(lambda ign:
2857 self.rootnode.create_empty_directory(u"subdir"))
2858 d.addCallback(_stash_uri, "subdir")
2859 d.addCallback(lambda subdir_node:
2860 subdir_node.add_file(u"grandchild",
2861 upload.Data(DATA+"2",
2863 d.addCallback(_stash_uri, "grandchild")
2865 d.addCallback(lambda ign:
2866 self.delete_shares_numbered(self.uris["subdir"],
2873 # root/subdir [unrecoverable]
2874 # root/subdir/grandchild
2876 # how should a streaming-JSON API indicate fatal error?
2877 # answer: emit ERROR: instead of a JSON string
2879 d.addCallback(self.CHECK, "root", "t=stream-manifest")
2880 def _check_broken_manifest(res):
2881 lines = res.splitlines()
2883 for (i,line) in enumerate(lines)
2884 if line.startswith("ERROR:")]
2886 self.fail("no ERROR: in output: %s" % (res,))
2887 first_error = error_lines[0]
2888 error_line = lines[first_error]
2889 error_msg = lines[first_error+1:]
2890 error_msg_s = "\n".join(error_msg) + "\n"
2891 self.failUnlessIn("ERROR: UnrecoverableFileError(no recoverable versions)",
2893 self.failUnless(len(error_msg) > 2, error_msg_s) # some traceback
2894 units = [simplejson.loads(line) for line in lines[:first_error]]
2895 self.failUnlessEqual(len(units), 5) # includes subdir
2896 last_unit = units[-1]
2897 self.failUnlessEqual(last_unit["path"], ["subdir"])
2898 d.addCallback(_check_broken_manifest)
2900 d.addCallback(self.CHECK, "root", "t=stream-deep-check")
2901 def _check_broken_deepcheck(res):
2902 lines = res.splitlines()
2904 for (i,line) in enumerate(lines)
2905 if line.startswith("ERROR:")]
2907 self.fail("no ERROR: in output: %s" % (res,))
2908 first_error = error_lines[0]
2909 error_line = lines[first_error]
2910 error_msg = lines[first_error+1:]
2911 error_msg_s = "\n".join(error_msg) + "\n"
2912 self.failUnlessIn("ERROR: UnrecoverableFileError(no recoverable versions)",
2914 self.failUnless(len(error_msg) > 2, error_msg_s) # some traceback
2915 units = [simplejson.loads(line) for line in lines[:first_error]]
2916 self.failUnlessEqual(len(units), 5) # includes subdir
2917 last_unit = units[-1]
2918 self.failUnlessEqual(last_unit["path"], ["subdir"])
2919 r = last_unit["check-results"]["results"]
2920 self.failUnlessEqual(r["count-recoverable-versions"], 0)
2921 self.failUnlessEqual(r["count-shares-good"], 1)
2922 self.failUnlessEqual(r["recoverable"], False)
2923 d.addCallback(_check_broken_deepcheck)
2925 d.addErrback(self.explain_web_error)
2928 def test_deep_check_and_repair(self):
2929 self.basedir = "web/Grid/deep_check_and_repair"
2931 c0 = self.g.clients[0]
2935 d = c0.create_empty_dirnode()
2936 def _stash_root_and_create_file(n):
2938 self.fileurls["root"] = "uri/" + urllib.quote(n.get_uri()) + "/"
2939 return n.add_file(u"good", upload.Data(DATA, convergence=""))
2940 d.addCallback(_stash_root_and_create_file)
2941 def _stash_uri(fn, which):
2942 self.uris[which] = fn.get_uri()
2943 d.addCallback(_stash_uri, "good")
2944 d.addCallback(lambda ign:
2945 self.rootnode.add_file(u"small",
2946 upload.Data("literal",
2948 d.addCallback(_stash_uri, "small")
2949 d.addCallback(lambda ign:
2950 self.rootnode.add_file(u"sick",
2951 upload.Data(DATA+"1",
2953 d.addCallback(_stash_uri, "sick")
2954 #d.addCallback(lambda ign:
2955 # self.rootnode.add_file(u"dead",
2956 # upload.Data(DATA+"2",
2958 #d.addCallback(_stash_uri, "dead")
2960 #d.addCallback(lambda ign: c0.create_mutable_file("mutable"))
2961 #d.addCallback(lambda fn: self.rootnode.set_node(u"corrupt", fn))
2962 #d.addCallback(_stash_uri, "corrupt")
2964 def _clobber_shares(ignored):
2965 good_shares = self.find_shares(self.uris["good"])
2966 self.failUnlessEqual(len(good_shares), 10)
2967 sick_shares = self.find_shares(self.uris["sick"])
2968 os.unlink(sick_shares[0][2])
2969 #dead_shares = self.find_shares(self.uris["dead"])
2970 #for i in range(1, 10):
2971 # os.unlink(dead_shares[i][2])
2973 #c_shares = self.find_shares(self.uris["corrupt"])
2974 #cso = CorruptShareOptions()
2975 #cso.stdout = StringIO()
2976 #cso.parseOptions([c_shares[0][2]])
2978 d.addCallback(_clobber_shares)
2981 # root/good CHK, 10 shares
2983 # root/sick CHK, 9 shares
2985 d.addCallback(self.CHECK, "root", "t=stream-deep-check&repair=true")
2987 units = [simplejson.loads(line)
2988 for line in res.splitlines()
2990 self.failUnlessEqual(len(units), 4+1)
2991 # should be parent-first
2993 self.failUnlessEqual(u0["path"], [])
2994 self.failUnlessEqual(u0["type"], "directory")
2995 self.failUnlessEqual(u0["cap"], self.rootnode.get_uri())
2996 u0crr = u0["check-and-repair-results"]
2997 self.failUnlessEqual(u0crr["repair-attempted"], False)
2998 self.failUnlessEqual(u0crr["pre-repair-results"]["results"]["count-shares-good"], 10)
3000 ugood = [u for u in units
3001 if u["type"] == "file" and u["path"] == [u"good"]][0]
3002 self.failUnlessEqual(ugood["cap"], self.uris["good"])
3003 ugoodcrr = ugood["check-and-repair-results"]
3004 self.failUnlessEqual(u0crr["repair-attempted"], False)
3005 self.failUnlessEqual(u0crr["pre-repair-results"]["results"]["count-shares-good"], 10)
3007 usick = [u for u in units
3008 if u["type"] == "file" and u["path"] == [u"sick"]][0]
3009 self.failUnlessEqual(usick["cap"], self.uris["sick"])
3010 usickcrr = usick["check-and-repair-results"]
3011 self.failUnlessEqual(usickcrr["repair-attempted"], True)
3012 self.failUnlessEqual(usickcrr["repair-successful"], True)
3013 self.failUnlessEqual(usickcrr["pre-repair-results"]["results"]["count-shares-good"], 9)
3014 self.failUnlessEqual(usickcrr["post-repair-results"]["results"]["count-shares-good"], 10)
3017 self.failUnlessEqual(stats["type"], "stats")
3019 self.failUnlessEqual(s["count-immutable-files"], 2)
3020 self.failUnlessEqual(s["count-literal-files"], 1)
3021 self.failUnlessEqual(s["count-directories"], 1)
3022 d.addCallback(_done)
3024 d.addErrback(self.explain_web_error)
3027 def _count_leases(self, ignored, which):
3028 u = self.uris[which]
3029 shares = self.find_shares(u)
3031 for shnum, serverid, fn in shares:
3032 sf = get_share_file(fn)
3033 num_leases = len(list(sf.get_leases()))
3034 lease_counts.append( (fn, num_leases) )
3037 def _assert_leasecount(self, lease_counts, expected):
3038 for (fn, num_leases) in lease_counts:
3039 if num_leases != expected:
3040 self.fail("expected %d leases, have %d, on %s" %
3041 (expected, num_leases, fn))
3043 def test_add_lease(self):
3044 self.basedir = "web/Grid/add_lease"
3045 self.set_up_grid(num_clients=2)
3046 c0 = self.g.clients[0]
3049 d = c0.upload(upload.Data(DATA, convergence=""))
3050 def _stash_uri(ur, which):
3051 self.uris[which] = ur.uri
3052 d.addCallback(_stash_uri, "one")
3053 d.addCallback(lambda ign:
3054 c0.upload(upload.Data(DATA+"1", convergence="")))
3055 d.addCallback(_stash_uri, "two")
3056 def _stash_mutable_uri(n, which):
3057 self.uris[which] = n.get_uri()
3058 assert isinstance(self.uris[which], str)
3059 d.addCallback(lambda ign: c0.create_mutable_file(DATA+"2"))
3060 d.addCallback(_stash_mutable_uri, "mutable")
3062 def _compute_fileurls(ignored):
3064 for which in self.uris:
3065 self.fileurls[which] = "uri/" + urllib.quote(self.uris[which])
3066 d.addCallback(_compute_fileurls)
3068 d.addCallback(self._count_leases, "one")
3069 d.addCallback(self._assert_leasecount, 1)
3070 d.addCallback(self._count_leases, "two")
3071 d.addCallback(self._assert_leasecount, 1)
3072 d.addCallback(self._count_leases, "mutable")
3073 d.addCallback(self._assert_leasecount, 1)
3075 d.addCallback(self.CHECK, "one", "t=check") # no add-lease
3076 def _got_html_good(res):
3077 self.failUnless("Healthy" in res, res)
3078 self.failIf("Not Healthy" in res, res)
3079 d.addCallback(_got_html_good)
3081 d.addCallback(self._count_leases, "one")
3082 d.addCallback(self._assert_leasecount, 1)
3083 d.addCallback(self._count_leases, "two")
3084 d.addCallback(self._assert_leasecount, 1)
3085 d.addCallback(self._count_leases, "mutable")
3086 d.addCallback(self._assert_leasecount, 1)
3088 # this CHECK uses the original client, which uses the same
3089 # lease-secrets, so it will just renew the original lease
3090 d.addCallback(self.CHECK, "one", "t=check&add-lease=true")
3091 d.addCallback(_got_html_good)
3093 d.addCallback(self._count_leases, "one")
3094 d.addCallback(self._assert_leasecount, 1)
3095 d.addCallback(self._count_leases, "two")
3096 d.addCallback(self._assert_leasecount, 1)
3097 d.addCallback(self._count_leases, "mutable")
3098 d.addCallback(self._assert_leasecount, 1)
3100 # this CHECK uses an alternate client, which adds a second lease
3101 d.addCallback(self.CHECK, "one", "t=check&add-lease=true", clientnum=1)
3102 d.addCallback(_got_html_good)
3104 d.addCallback(self._count_leases, "one")
3105 d.addCallback(self._assert_leasecount, 2)
3106 d.addCallback(self._count_leases, "two")
3107 d.addCallback(self._assert_leasecount, 1)
3108 d.addCallback(self._count_leases, "mutable")
3109 d.addCallback(self._assert_leasecount, 1)
3111 d.addCallback(self.CHECK, "mutable", "t=check&add-lease=true")
3112 d.addCallback(_got_html_good)
3114 d.addCallback(self._count_leases, "one")
3115 d.addCallback(self._assert_leasecount, 2)
3116 d.addCallback(self._count_leases, "two")
3117 d.addCallback(self._assert_leasecount, 1)
3118 d.addCallback(self._count_leases, "mutable")
3119 d.addCallback(self._assert_leasecount, 1)
3121 d.addCallback(self.CHECK, "mutable", "t=check&add-lease=true",
3123 d.addCallback(_got_html_good)
3125 d.addCallback(self._count_leases, "one")
3126 d.addCallback(self._assert_leasecount, 2)
3127 d.addCallback(self._count_leases, "two")
3128 d.addCallback(self._assert_leasecount, 1)
3129 d.addCallback(self._count_leases, "mutable")
3130 d.addCallback(self._assert_leasecount, 2)
3132 d.addErrback(self.explain_web_error)
3135 def test_deep_add_lease(self):
3136 self.basedir = "web/Grid/deep_add_lease"
3137 self.set_up_grid(num_clients=2)
3138 c0 = self.g.clients[0]
3142 d = c0.create_empty_dirnode()
3143 def _stash_root_and_create_file(n):
3145 self.uris["root"] = n.get_uri()
3146 self.fileurls["root"] = "uri/" + urllib.quote(n.get_uri()) + "/"
3147 return n.add_file(u"one", upload.Data(DATA, convergence=""))
3148 d.addCallback(_stash_root_and_create_file)
3149 def _stash_uri(fn, which):
3150 self.uris[which] = fn.get_uri()
3151 d.addCallback(_stash_uri, "one")
3152 d.addCallback(lambda ign:
3153 self.rootnode.add_file(u"small",
3154 upload.Data("literal",
3156 d.addCallback(_stash_uri, "small")
3158 d.addCallback(lambda ign: c0.create_mutable_file("mutable"))
3159 d.addCallback(lambda fn: self.rootnode.set_node(u"mutable", fn))
3160 d.addCallback(_stash_uri, "mutable")
3162 d.addCallback(self.CHECK, "root", "t=stream-deep-check") # no add-lease
3164 units = [simplejson.loads(line)
3165 for line in res.splitlines()
3167 # root, one, small, mutable, stats
3168 self.failUnlessEqual(len(units), 4+1)
3169 d.addCallback(_done)
3171 d.addCallback(self._count_leases, "root")
3172 d.addCallback(self._assert_leasecount, 1)
3173 d.addCallback(self._count_leases, "one")
3174 d.addCallback(self._assert_leasecount, 1)
3175 d.addCallback(self._count_leases, "mutable")
3176 d.addCallback(self._assert_leasecount, 1)
3178 d.addCallback(self.CHECK, "root", "t=stream-deep-check&add-lease=true")
3179 d.addCallback(_done)
3181 d.addCallback(self._count_leases, "root")
3182 d.addCallback(self._assert_leasecount, 1)
3183 d.addCallback(self._count_leases, "one")
3184 d.addCallback(self._assert_leasecount, 1)
3185 d.addCallback(self._count_leases, "mutable")
3186 d.addCallback(self._assert_leasecount, 1)
3188 d.addCallback(self.CHECK, "root", "t=stream-deep-check&add-lease=true",
3190 d.addCallback(_done)
3192 d.addCallback(self._count_leases, "root")
3193 d.addCallback(self._assert_leasecount, 2)
3194 d.addCallback(self._count_leases, "one")
3195 d.addCallback(self._assert_leasecount, 2)
3196 d.addCallback(self._count_leases, "mutable")
3197 d.addCallback(self._assert_leasecount, 2)
3199 d.addErrback(self.explain_web_error)
3203 def test_exceptions(self):
3204 self.basedir = "web/Grid/exceptions"
3205 self.set_up_grid(num_clients=1, num_servers=2)
3206 c0 = self.g.clients[0]
3209 d = c0.create_empty_dirnode()
3211 self.fileurls["root"] = "uri/" + urllib.quote(n.get_uri()) + "/"
3212 self.fileurls["imaginary"] = self.fileurls["root"] + "imaginary"
3214 d.addCallback(_stash_root)
3215 d.addCallback(lambda ign: c0.upload(upload.Data(DATA, convergence="")))
3217 self.fileurls["1share"] = "uri/" + urllib.quote(ur.uri)
3218 self.delete_shares_numbered(ur.uri, range(1,10))
3220 u = uri.from_string(ur.uri)
3221 u.key = testutil.flip_bit(u.key, 0)
3222 baduri = u.to_string()
3223 self.fileurls["0shares"] = "uri/" + urllib.quote(baduri)
3224 d.addCallback(_stash_bad)
3225 d.addCallback(lambda ign: c0.create_empty_dirnode())
3226 def _mangle_dirnode_1share(n):
3228 url = self.fileurls["dir-1share"] = "uri/" + urllib.quote(u) + "/"
3229 self.fileurls["dir-1share-json"] = url + "?t=json"
3230 self.delete_shares_numbered(u, range(1,10))
3231 d.addCallback(_mangle_dirnode_1share)
3232 d.addCallback(lambda ign: c0.create_empty_dirnode())
3233 def _mangle_dirnode_0share(n):
3235 url = self.fileurls["dir-0share"] = "uri/" + urllib.quote(u) + "/"
3236 self.fileurls["dir-0share-json"] = url + "?t=json"
3237 self.delete_shares_numbered(u, range(0,10))
3238 d.addCallback(_mangle_dirnode_0share)
3240 # NotEnoughSharesError should be reported sensibly, with a
3241 # text/plain explanation of the problem, and perhaps some
3242 # information on which shares *could* be found.
3244 d.addCallback(lambda ignored:
3245 self.shouldHTTPError("GET unrecoverable",
3246 410, "Gone", "NotEnoughSharesError",
3247 self.GET, self.fileurls["0shares"]))
3248 def _check_zero_shares(body):
3249 self.failIf("<html>" in body, body)
3250 body = " ".join(body.strip().split())
3251 exp = ("NotEnoughSharesError: no shares could be found. "
3252 "Zero shares usually indicates a corrupt URI, or that "
3253 "no servers were connected, but it might also indicate "
3254 "severe corruption. You should perform a filecheck on "
3255 "this object to learn more.")
3256 self.failUnlessEqual(exp, body)
3257 d.addCallback(_check_zero_shares)
3260 d.addCallback(lambda ignored:
3261 self.shouldHTTPError("GET 1share",
3262 410, "Gone", "NotEnoughSharesError",
3263 self.GET, self.fileurls["1share"]))
3264 def _check_one_share(body):
3265 self.failIf("<html>" in body, body)
3266 body = " ".join(body.strip().split())
3267 exp = ("NotEnoughSharesError: 1 share found, but we need "
3268 "3 to recover the file. This indicates that some "
3269 "servers were unavailable, or that shares have been "
3270 "lost to server departure, hard drive failure, or disk "
3271 "corruption. You should perform a filecheck on "
3272 "this object to learn more.")
3273 self.failUnlessEqual(exp, body)
3274 d.addCallback(_check_one_share)
3276 d.addCallback(lambda ignored:
3277 self.shouldHTTPError("GET imaginary",
3278 404, "Not Found", None,
3279 self.GET, self.fileurls["imaginary"]))
3280 def _missing_child(body):
3281 self.failUnless("No such child: imaginary" in body, body)
3282 d.addCallback(_missing_child)
3284 d.addCallback(lambda ignored: self.GET(self.fileurls["dir-0share"]))
3285 def _check_0shares_dir_html(body):
3286 self.failUnless("<html>" in body, body)
3287 # we should see the regular page, but without the child table or
3289 body = " ".join(body.strip().split())
3290 self.failUnlessIn('href="?t=info">More info on this directory',
3292 exp = ("UnrecoverableFileError: the directory (or mutable file) "
3293 "could not be retrieved, because there were insufficient "
3294 "good shares. This might indicate that no servers were "
3295 "connected, insufficient servers were connected, the URI "
3296 "was corrupt, or that shares have been lost due to server "
3297 "departure, hard drive failure, or disk corruption. You "
3298 "should perform a filecheck on this object to learn more.")
3299 self.failUnlessIn(exp, body)
3300 self.failUnlessIn("No upload forms: directory is unreadable", body)
3301 d.addCallback(_check_0shares_dir_html)
3303 d.addCallback(lambda ignored: self.GET(self.fileurls["dir-1share"]))
3304 def _check_1shares_dir_html(body):
3305 # at some point, we'll split UnrecoverableFileError into 0-shares
3306 # and some-shares like we did for immutable files (since there
3307 # are different sorts of advice to offer in each case). For now,
3308 # they present the same way.
3309 self.failUnless("<html>" in body, body)
3310 body = " ".join(body.strip().split())
3311 self.failUnlessIn('href="?t=info">More info on this directory',
3313 exp = ("UnrecoverableFileError: the directory (or mutable file) "
3314 "could not be retrieved, because there were insufficient "
3315 "good shares. This might indicate that no servers were "
3316 "connected, insufficient servers were connected, the URI "
3317 "was corrupt, or that shares have been lost due to server "
3318 "departure, hard drive failure, or disk corruption. You "
3319 "should perform a filecheck on this object to learn more.")
3320 self.failUnlessIn(exp, body)
3321 self.failUnlessIn("No upload forms: directory is unreadable", body)
3322 d.addCallback(_check_1shares_dir_html)
3324 d.addCallback(lambda ignored:
3325 self.shouldHTTPError("GET dir-0share-json",
3326 410, "Gone", "UnrecoverableFileError",
3328 self.fileurls["dir-0share-json"]))
3329 def _check_unrecoverable_file(body):
3330 self.failIf("<html>" in body, body)
3331 body = " ".join(body.strip().split())
3332 exp = ("UnrecoverableFileError: the directory (or mutable file) "
3333 "could not be retrieved, because there were insufficient "
3334 "good shares. This might indicate that no servers were "
3335 "connected, insufficient servers were connected, the URI "
3336 "was corrupt, or that shares have been lost due to server "
3337 "departure, hard drive failure, or disk corruption. You "
3338 "should perform a filecheck on this object to learn more.")
3339 self.failUnlessEqual(exp, body)
3340 d.addCallback(_check_unrecoverable_file)
3342 d.addCallback(lambda ignored:
3343 self.shouldHTTPError("GET dir-1share-json",
3344 410, "Gone", "UnrecoverableFileError",
3346 self.fileurls["dir-1share-json"]))
3347 d.addCallback(_check_unrecoverable_file)
3349 d.addCallback(lambda ignored:
3350 self.shouldHTTPError("GET imaginary",
3351 404, "Not Found", None,
3352 self.GET, self.fileurls["imaginary"]))
3354 # attach a webapi child that throws a random error, to test how it
3356 w = c0.getServiceNamed("webish")
3357 w.root.putChild("ERRORBOOM", ErrorBoom())
3359 d.addCallback(lambda ignored:
3360 self.shouldHTTPError("GET errorboom_html",
3361 500, "Internal Server Error", None,
3362 self.GET, "ERRORBOOM"))
3363 def _internal_error_html(body):
3364 # test that a weird exception during a webapi operation with
3365 # Accept:*/* results in a text/html stack trace, while one
3366 # without that Accept: line gets us a text/plain stack trace
3367 self.failUnless("<html>" in body, "expected HTML, not '%s'" % body)
3368 d.addCallback(_internal_error_html)
3370 d.addCallback(lambda ignored:
3371 self.shouldHTTPError("GET errorboom_text",
3372 500, "Internal Server Error", None,
3373 self.GET, "ERRORBOOM",
3374 headers={"accept": ["text/plain"]}))
3375 def _internal_error_text(body):
3376 # test that a weird exception during a webapi operation with
3377 # Accept:*/* results in a text/html stack trace, while one
3378 # without that Accept: line gets us a text/plain stack trace
3379 self.failIf("<html>" in body, body)
3380 self.failUnless(body.startswith("Traceback "), body)
3381 d.addCallback(_internal_error_text)
3383 def _flush_errors(res):
3384 # Trial: please ignore the CompletelyUnhandledError in the logs
3385 self.flushLoggedErrors(CompletelyUnhandledError)
3387 d.addBoth(_flush_errors)
3391 class CompletelyUnhandledError(Exception):
3393 class ErrorBoom(rend.Page):
3394 def beforeRender(self, ctx):
3395 raise CompletelyUnhandledError("whoops")