1 import os.path, re, urllib
3 from StringIO import StringIO
4 from twisted.application import service
5 from twisted.trial import unittest
6 from twisted.internet import defer, reactor
7 from twisted.web import client, error, http
8 from twisted.python import failure, log
10 from allmydata import interfaces, uri, webish
11 from allmydata.storage.shares import get_share_file
12 from allmydata.immutable import upload, download
13 from allmydata.web import status, common
14 from allmydata.scripts.debug import CorruptShareOptions, corrupt_share
15 from allmydata.util import fileutil, base32
16 from allmydata.util.assertutil import precondition
17 from allmydata.test.common import FakeDirectoryNode, FakeCHKFileNode, \
18 FakeMutableFileNode, create_chk_filenode, WebErrorMixin, ShouldFailMixin
19 from allmydata.interfaces import IURI, INewDirectoryURI, \
20 IReadonlyNewDirectoryURI, IFileURI, IMutableFileURI, IMutableFileNode
21 from allmydata.mutable import servermap, publish, retrieve
22 import common_util as testutil
23 from allmydata.test.no_network import GridTestMixin
25 from allmydata.test.common_web import HTTPClientGETFactory, \
28 # create a fake uploader/downloader, and a couple of fake dirnodes, then
29 # create a webserver that works against them
31 class FakeIntroducerClient:
32 def get_all_connectors(self):
34 def get_all_connections_for(self, service_name):
36 def get_all_peerids(self):
39 class FakeStatsProvider:
41 stats = {'stats': {}, 'counters': {}}
44 class FakeClient(service.MultiService):
45 nodeid = "fake_nodeid"
46 nickname = "fake_nickname"
47 basedir = "fake_basedir"
48 def get_versions(self):
49 return {'allmydata': "fake",
54 introducer_furl = "None"
55 introducer_client = FakeIntroducerClient()
56 _all_upload_status = [upload.UploadStatus()]
57 _all_download_status = [download.DownloadStatus()]
58 _all_mapupdate_statuses = [servermap.UpdateStatus()]
59 _all_publish_statuses = [publish.PublishStatus()]
60 _all_retrieve_statuses = [retrieve.RetrieveStatus()]
61 convergence = "some random string"
62 stats_provider = FakeStatsProvider()
64 def connected_to_introducer(self):
67 def get_nickname_for_peerid(self, peerid):
70 def get_permuted_peers(self, service_name, key):
73 def create_node_from_uri(self, auri):
74 precondition(isinstance(auri, str), auri)
75 u = uri.from_string(auri)
76 if (INewDirectoryURI.providedBy(u)
77 or IReadonlyNewDirectoryURI.providedBy(u)):
78 return FakeDirectoryNode(self).init_from_uri(u)
79 if IFileURI.providedBy(u):
80 return FakeCHKFileNode(u, self)
81 assert IMutableFileURI.providedBy(u), u
82 return FakeMutableFileNode(self).init_from_uri(u)
84 def create_empty_dirnode(self):
85 n = FakeDirectoryNode(self)
87 d.addCallback(lambda res: n)
90 MUTABLE_SIZELIMIT = FakeMutableFileNode.MUTABLE_SIZELIMIT
91 def create_mutable_file(self, contents=""):
92 n = FakeMutableFileNode(self)
93 return n.create(contents)
95 def upload(self, uploadable):
96 d = uploadable.get_size()
97 d.addCallback(lambda size: uploadable.read(size))
100 n = create_chk_filenode(self, data)
101 results = upload.UploadResults()
102 results.uri = n.get_uri()
104 d.addCallback(_got_data)
107 def list_all_upload_statuses(self):
108 return self._all_upload_status
109 def list_all_download_statuses(self):
110 return self._all_download_status
111 def list_all_mapupdate_statuses(self):
112 return self._all_mapupdate_statuses
113 def list_all_publish_statuses(self):
114 return self._all_publish_statuses
115 def list_all_retrieve_statuses(self):
116 return self._all_retrieve_statuses
117 def list_all_helper_statuses(self):
120 class WebMixin(object):
122 self.s = FakeClient()
123 self.s.startService()
124 self.staticdir = self.mktemp()
125 self.ws = s = webish.WebishServer(self.s, "0", staticdir=self.staticdir)
126 s.setServiceParent(self.s)
127 self.webish_port = port = s.listener._port.getHost().port
128 self.webish_url = "http://localhost:%d" % port
130 l = [ self.s.create_empty_dirnode() for x in range(6) ]
131 d = defer.DeferredList(l)
133 self.public_root = res[0][1]
134 assert interfaces.IDirectoryNode.providedBy(self.public_root), res
135 self.public_url = "/uri/" + self.public_root.get_uri()
136 self.private_root = res[1][1]
140 self._foo_uri = foo.get_uri()
141 self._foo_readonly_uri = foo.get_readonly_uri()
142 self._foo_verifycap = foo.get_verify_cap().to_string()
143 # NOTE: we ignore the deferred on all set_uri() calls, because we
144 # know the fake nodes do these synchronously
145 self.public_root.set_uri(u"foo", foo.get_uri())
147 self.BAR_CONTENTS, n, self._bar_txt_uri = self.makefile(0)
148 foo.set_uri(u"bar.txt", self._bar_txt_uri)
149 self._bar_txt_verifycap = n.get_verify_cap().to_string()
151 foo.set_uri(u"empty", res[3][1].get_uri())
152 sub_uri = res[4][1].get_uri()
153 self._sub_uri = sub_uri
154 foo.set_uri(u"sub", sub_uri)
155 sub = self.s.create_node_from_uri(sub_uri)
157 _ign, n, blocking_uri = self.makefile(1)
158 foo.set_uri(u"blockingfile", blocking_uri)
160 unicode_filename = u"n\u00fc.txt" # n u-umlaut . t x t
161 # ok, unicode calls it LATIN SMALL LETTER U WITH DIAERESIS but I
162 # still think of it as an umlaut
163 foo.set_uri(unicode_filename, self._bar_txt_uri)
165 _ign, n, baz_file = self.makefile(2)
166 self._baz_file_uri = baz_file
167 sub.set_uri(u"baz.txt", baz_file)
169 _ign, n, self._bad_file_uri = self.makefile(3)
170 # this uri should not be downloadable
171 del FakeCHKFileNode.all_contents[self._bad_file_uri]
174 self.public_root.set_uri(u"reedownlee", rodir.get_readonly_uri())
175 rodir.set_uri(u"nor", baz_file)
180 # public/foo/blockingfile
183 # public/foo/sub/baz.txt
185 # public/reedownlee/nor
186 self.NEWFILE_CONTENTS = "newfile contents\n"
188 return foo.get_metadata_for(u"bar.txt")
190 def _got_metadata(metadata):
191 self._bar_txt_metadata = metadata
192 d.addCallback(_got_metadata)
195 def makefile(self, number):
196 contents = "contents of file %s\n" % number
197 n = create_chk_filenode(self.s, contents)
198 return contents, n, n.get_uri()
201 return self.s.stopService()
203 def failUnlessIsBarDotTxt(self, res):
204 self.failUnlessEqual(res, self.BAR_CONTENTS, res)
206 def failUnlessIsBarJSON(self, res):
207 data = simplejson.loads(res)
208 self.failUnless(isinstance(data, list))
209 self.failUnlessEqual(data[0], u"filenode")
210 self.failUnless(isinstance(data[1], dict))
211 self.failIf(data[1]["mutable"])
212 self.failIf("rw_uri" in data[1]) # immutable
213 self.failUnlessEqual(data[1]["ro_uri"], self._bar_txt_uri)
214 self.failUnlessEqual(data[1]["verify_uri"], self._bar_txt_verifycap)
215 self.failUnlessEqual(data[1]["size"], len(self.BAR_CONTENTS))
217 def failUnlessIsFooJSON(self, res):
218 data = simplejson.loads(res)
219 self.failUnless(isinstance(data, list))
220 self.failUnlessEqual(data[0], "dirnode", res)
221 self.failUnless(isinstance(data[1], dict))
222 self.failUnless(data[1]["mutable"])
223 self.failUnless("rw_uri" in data[1]) # mutable
224 self.failUnlessEqual(data[1]["rw_uri"], self._foo_uri)
225 self.failUnlessEqual(data[1]["ro_uri"], self._foo_readonly_uri)
226 self.failUnlessEqual(data[1]["verify_uri"], self._foo_verifycap)
228 kidnames = sorted([unicode(n) for n in data[1]["children"]])
229 self.failUnlessEqual(kidnames,
230 [u"bar.txt", u"blockingfile", u"empty",
231 u"n\u00fc.txt", u"sub"])
232 kids = dict( [(unicode(name),value)
234 in data[1]["children"].iteritems()] )
235 self.failUnlessEqual(kids[u"sub"][0], "dirnode")
236 self.failUnless("metadata" in kids[u"sub"][1])
237 self.failUnless("ctime" in kids[u"sub"][1]["metadata"])
238 self.failUnless("mtime" in kids[u"sub"][1]["metadata"])
239 self.failUnlessEqual(kids[u"bar.txt"][0], "filenode")
240 self.failUnlessEqual(kids[u"bar.txt"][1]["size"], len(self.BAR_CONTENTS))
241 self.failUnlessEqual(kids[u"bar.txt"][1]["ro_uri"], self._bar_txt_uri)
242 self.failUnlessEqual(kids[u"bar.txt"][1]["verify_uri"],
243 self._bar_txt_verifycap)
244 self.failUnlessEqual(kids[u"bar.txt"][1]["metadata"]["ctime"],
245 self._bar_txt_metadata["ctime"])
246 self.failUnlessEqual(kids[u"n\u00fc.txt"][1]["ro_uri"],
249 def GET(self, urlpath, followRedirect=False, return_response=False,
251 # if return_response=True, this fires with (data, statuscode,
252 # respheaders) instead of just data.
253 assert not isinstance(urlpath, unicode)
254 url = self.webish_url + urlpath
255 factory = HTTPClientGETFactory(url, method="GET",
256 followRedirect=followRedirect, **kwargs)
257 reactor.connectTCP("localhost", self.webish_port, factory)
260 return (data, factory.status, factory.response_headers)
262 d.addCallback(_got_data)
263 return factory.deferred
265 def HEAD(self, urlpath, return_response=False, **kwargs):
266 # this requires some surgery, because twisted.web.client doesn't want
267 # to give us back the response headers.
268 factory = HTTPClientHEADFactory(urlpath, method="HEAD", **kwargs)
269 reactor.connectTCP("localhost", self.webish_port, factory)
272 return (data, factory.status, factory.response_headers)
274 d.addCallback(_got_data)
275 return factory.deferred
277 def PUT(self, urlpath, data, **kwargs):
278 url = self.webish_url + urlpath
279 return client.getPage(url, method="PUT", postdata=data, **kwargs)
281 def DELETE(self, urlpath):
282 url = self.webish_url + urlpath
283 return client.getPage(url, method="DELETE")
285 def POST(self, urlpath, followRedirect=False, **fields):
286 url = self.webish_url + urlpath
287 sepbase = "boogabooga"
291 form.append('Content-Disposition: form-data; name="_charset"')
295 for name, value in fields.iteritems():
296 if isinstance(value, tuple):
297 filename, value = value
298 form.append('Content-Disposition: form-data; name="%s"; '
299 'filename="%s"' % (name, filename.encode("utf-8")))
301 form.append('Content-Disposition: form-data; name="%s"' % name)
303 if isinstance(value, unicode):
304 value = value.encode("utf-8")
307 assert isinstance(value, str)
311 body = "\r\n".join(form) + "\r\n"
312 headers = {"content-type": "multipart/form-data; boundary=%s" % sepbase,
314 return client.getPage(url, method="POST", postdata=body,
315 headers=headers, followRedirect=followRedirect)
317 def shouldFail(self, res, expected_failure, which,
318 substring=None, response_substring=None):
319 if isinstance(res, failure.Failure):
320 res.trap(expected_failure)
322 self.failUnless(substring in str(res),
323 "substring '%s' not in '%s'"
324 % (substring, str(res)))
325 if response_substring:
326 self.failUnless(response_substring in res.value.response,
327 "response substring '%s' not in '%s'"
328 % (response_substring, res.value.response))
330 self.fail("%s was supposed to raise %s, not get '%s'" %
331 (which, expected_failure, res))
333 def shouldFail2(self, expected_failure, which, substring,
335 callable, *args, **kwargs):
336 assert substring is None or isinstance(substring, str)
337 assert response_substring is None or isinstance(response_substring, str)
338 d = defer.maybeDeferred(callable, *args, **kwargs)
340 if isinstance(res, failure.Failure):
341 res.trap(expected_failure)
343 self.failUnless(substring in str(res),
344 "%s: substring '%s' not in '%s'"
345 % (which, substring, str(res)))
346 if response_substring:
347 self.failUnless(response_substring in res.value.response,
348 "%s: response substring '%s' not in '%s'"
350 response_substring, res.value.response))
352 self.fail("%s was supposed to raise %s, not get '%s'" %
353 (which, expected_failure, res))
357 def should404(self, res, which):
358 if isinstance(res, failure.Failure):
359 res.trap(error.Error)
360 self.failUnlessEqual(res.value.status, "404")
362 self.fail("%s was supposed to Error(404), not get '%s'" %
366 class Web(WebMixin, WebErrorMixin, testutil.StallMixin, unittest.TestCase):
367 def test_create(self):
370 def test_welcome(self):
373 self.failUnless('Welcome To AllMyData' in res)
374 self.failUnless('Tahoe' in res)
376 self.s.basedir = 'web/test_welcome'
377 fileutil.make_dirs("web/test_welcome")
378 fileutil.make_dirs("web/test_welcome/private")
380 d.addCallback(_check)
383 def test_provisioning(self):
384 d = self.GET("/provisioning/")
386 self.failUnless('Tahoe Provisioning Tool' in res)
387 fields = {'filled': True,
388 "num_users": int(50e3),
389 "files_per_user": 1000,
390 "space_per_user": int(1e9),
391 "sharing_ratio": 1.0,
392 "encoding_parameters": "3-of-10-5",
394 "ownership_mode": "A",
395 "download_rate": 100,
400 return self.POST("/provisioning/", **fields)
402 d.addCallback(_check)
404 self.failUnless('Tahoe Provisioning Tool' in res)
405 self.failUnless("Share space consumed: 167.01TB" in res)
407 fields = {'filled': True,
408 "num_users": int(50e6),
409 "files_per_user": 1000,
410 "space_per_user": int(5e9),
411 "sharing_ratio": 1.0,
412 "encoding_parameters": "25-of-100-50",
413 "num_servers": 30000,
414 "ownership_mode": "E",
415 "drive_failure_model": "U",
417 "download_rate": 1000,
422 return self.POST("/provisioning/", **fields)
423 d.addCallback(_check2)
425 self.failUnless("Share space consumed: huge!" in res)
426 fields = {'filled': True}
427 return self.POST("/provisioning/", **fields)
428 d.addCallback(_check3)
430 self.failUnless("Share space consumed:" in res)
431 d.addCallback(_check4)
434 def test_reliability_tool(self):
436 from allmydata import reliability
437 _hush_pyflakes = reliability
439 raise unittest.SkipTest("reliability tool requires NumPy")
441 d = self.GET("/reliability/")
443 self.failUnless('Tahoe Reliability Tool' in res)
444 fields = {'drive_lifetime': "8Y",
449 "check_period": "1M",
450 "report_period": "3M",
453 return self.POST("/reliability/", **fields)
455 d.addCallback(_check)
457 self.failUnless('Tahoe Reliability Tool' in res)
458 r = r'Probability of loss \(no maintenance\):\s+<span>0.033591'
459 self.failUnless(re.search(r, res), res)
460 d.addCallback(_check2)
463 def test_status(self):
464 dl_num = self.s.list_all_download_statuses()[0].get_counter()
465 ul_num = self.s.list_all_upload_statuses()[0].get_counter()
466 mu_num = self.s.list_all_mapupdate_statuses()[0].get_counter()
467 pub_num = self.s.list_all_publish_statuses()[0].get_counter()
468 ret_num = self.s.list_all_retrieve_statuses()[0].get_counter()
469 d = self.GET("/status", followRedirect=True)
471 self.failUnless('Upload and Download Status' in res, res)
472 self.failUnless('"down-%d"' % dl_num in res, res)
473 self.failUnless('"up-%d"' % ul_num in res, res)
474 self.failUnless('"mapupdate-%d"' % mu_num in res, res)
475 self.failUnless('"publish-%d"' % pub_num in res, res)
476 self.failUnless('"retrieve-%d"' % ret_num in res, res)
477 d.addCallback(_check)
478 d.addCallback(lambda res: self.GET("/status/?t=json"))
479 def _check_json(res):
480 data = simplejson.loads(res)
481 self.failUnless(isinstance(data, dict))
482 active = data["active"]
483 # TODO: test more. We need a way to fake an active operation
485 d.addCallback(_check_json)
487 d.addCallback(lambda res: self.GET("/status/down-%d" % dl_num))
489 self.failUnless("File Download Status" in res, res)
490 d.addCallback(_check_dl)
491 d.addCallback(lambda res: self.GET("/status/up-%d" % ul_num))
493 self.failUnless("File Upload Status" in res, res)
494 d.addCallback(_check_ul)
495 d.addCallback(lambda res: self.GET("/status/mapupdate-%d" % mu_num))
496 def _check_mapupdate(res):
497 self.failUnless("Mutable File Servermap Update Status" in res, res)
498 d.addCallback(_check_mapupdate)
499 d.addCallback(lambda res: self.GET("/status/publish-%d" % pub_num))
500 def _check_publish(res):
501 self.failUnless("Mutable File Publish Status" in res, res)
502 d.addCallback(_check_publish)
503 d.addCallback(lambda res: self.GET("/status/retrieve-%d" % ret_num))
504 def _check_retrieve(res):
505 self.failUnless("Mutable File Retrieve Status" in res, res)
506 d.addCallback(_check_retrieve)
510 def test_status_numbers(self):
511 drrm = status.DownloadResultsRendererMixin()
512 self.failUnlessEqual(drrm.render_time(None, None), "")
513 self.failUnlessEqual(drrm.render_time(None, 2.5), "2.50s")
514 self.failUnlessEqual(drrm.render_time(None, 0.25), "250ms")
515 self.failUnlessEqual(drrm.render_time(None, 0.0021), "2.1ms")
516 self.failUnlessEqual(drrm.render_time(None, 0.000123), "123us")
517 self.failUnlessEqual(drrm.render_rate(None, None), "")
518 self.failUnlessEqual(drrm.render_rate(None, 2500000), "2.50MBps")
519 self.failUnlessEqual(drrm.render_rate(None, 30100), "30.1kBps")
520 self.failUnlessEqual(drrm.render_rate(None, 123), "123Bps")
522 urrm = status.UploadResultsRendererMixin()
523 self.failUnlessEqual(urrm.render_time(None, None), "")
524 self.failUnlessEqual(urrm.render_time(None, 2.5), "2.50s")
525 self.failUnlessEqual(urrm.render_time(None, 0.25), "250ms")
526 self.failUnlessEqual(urrm.render_time(None, 0.0021), "2.1ms")
527 self.failUnlessEqual(urrm.render_time(None, 0.000123), "123us")
528 self.failUnlessEqual(urrm.render_rate(None, None), "")
529 self.failUnlessEqual(urrm.render_rate(None, 2500000), "2.50MBps")
530 self.failUnlessEqual(urrm.render_rate(None, 30100), "30.1kBps")
531 self.failUnlessEqual(urrm.render_rate(None, 123), "123Bps")
533 def test_GET_FILEURL(self):
534 d = self.GET(self.public_url + "/foo/bar.txt")
535 d.addCallback(self.failUnlessIsBarDotTxt)
538 def test_GET_FILEURL_range(self):
539 headers = {"range": "bytes=1-10"}
540 d = self.GET(self.public_url + "/foo/bar.txt", headers=headers,
541 return_response=True)
542 def _got((res, status, headers)):
543 self.failUnlessEqual(int(status), 206)
544 self.failUnless(headers.has_key("content-range"))
545 self.failUnlessEqual(headers["content-range"][0],
546 "bytes 1-10/%d" % len(self.BAR_CONTENTS))
547 self.failUnlessEqual(res, self.BAR_CONTENTS[1:11])
551 def test_GET_FILEURL_partial_range(self):
552 headers = {"range": "bytes=5-"}
553 length = len(self.BAR_CONTENTS)
554 d = self.GET(self.public_url + "/foo/bar.txt", headers=headers,
555 return_response=True)
556 def _got((res, status, headers)):
557 self.failUnlessEqual(int(status), 206)
558 self.failUnless(headers.has_key("content-range"))
559 self.failUnlessEqual(headers["content-range"][0],
560 "bytes 5-%d/%d" % (length-1, length))
561 self.failUnlessEqual(res, self.BAR_CONTENTS[5:])
565 def test_HEAD_FILEURL_range(self):
566 headers = {"range": "bytes=1-10"}
567 d = self.HEAD(self.public_url + "/foo/bar.txt", headers=headers,
568 return_response=True)
569 def _got((res, status, headers)):
570 self.failUnlessEqual(res, "")
571 self.failUnlessEqual(int(status), 206)
572 self.failUnless(headers.has_key("content-range"))
573 self.failUnlessEqual(headers["content-range"][0],
574 "bytes 1-10/%d" % len(self.BAR_CONTENTS))
578 def test_HEAD_FILEURL_partial_range(self):
579 headers = {"range": "bytes=5-"}
580 length = len(self.BAR_CONTENTS)
581 d = self.HEAD(self.public_url + "/foo/bar.txt", headers=headers,
582 return_response=True)
583 def _got((res, status, headers)):
584 self.failUnlessEqual(int(status), 206)
585 self.failUnless(headers.has_key("content-range"))
586 self.failUnlessEqual(headers["content-range"][0],
587 "bytes 5-%d/%d" % (length-1, length))
591 def test_GET_FILEURL_range_bad(self):
592 headers = {"range": "BOGUS=fizbop-quarnak"}
593 d = self.shouldFail2(error.Error, "test_GET_FILEURL_range_bad",
595 "Syntactically invalid http range header",
596 self.GET, self.public_url + "/foo/bar.txt",
600 def test_HEAD_FILEURL(self):
601 d = self.HEAD(self.public_url + "/foo/bar.txt", return_response=True)
602 def _got((res, status, headers)):
603 self.failUnlessEqual(res, "")
604 self.failUnlessEqual(headers["content-length"][0],
605 str(len(self.BAR_CONTENTS)))
606 self.failUnlessEqual(headers["content-type"], ["text/plain"])
610 def test_GET_FILEURL_named(self):
611 base = "/file/%s" % urllib.quote(self._bar_txt_uri)
612 base2 = "/named/%s" % urllib.quote(self._bar_txt_uri)
613 d = self.GET(base + "/@@name=/blah.txt")
614 d.addCallback(self.failUnlessIsBarDotTxt)
615 d.addCallback(lambda res: self.GET(base + "/blah.txt"))
616 d.addCallback(self.failUnlessIsBarDotTxt)
617 d.addCallback(lambda res: self.GET(base + "/ignore/lots/blah.txt"))
618 d.addCallback(self.failUnlessIsBarDotTxt)
619 d.addCallback(lambda res: self.GET(base2 + "/@@name=/blah.txt"))
620 d.addCallback(self.failUnlessIsBarDotTxt)
621 save_url = base + "?save=true&filename=blah.txt"
622 d.addCallback(lambda res: self.GET(save_url))
623 d.addCallback(self.failUnlessIsBarDotTxt) # TODO: check headers
624 u_filename = u"n\u00e9wer.txt" # n e-acute w e r . t x t
625 u_fn_e = urllib.quote(u_filename.encode("utf-8"))
626 u_url = base + "?save=true&filename=" + u_fn_e
627 d.addCallback(lambda res: self.GET(u_url))
628 d.addCallback(self.failUnlessIsBarDotTxt) # TODO: check headers
631 def test_PUT_FILEURL_named_bad(self):
632 base = "/file/%s" % urllib.quote(self._bar_txt_uri)
633 d = self.shouldFail2(error.Error, "test_PUT_FILEURL_named_bad",
635 "/file can only be used with GET or HEAD",
636 self.PUT, base + "/@@name=/blah.txt", "")
639 def test_GET_DIRURL_named_bad(self):
640 base = "/file/%s" % urllib.quote(self._foo_uri)
641 d = self.shouldFail2(error.Error, "test_PUT_DIRURL_named_bad",
644 self.GET, base + "/@@name=/blah.txt")
647 def test_GET_slash_file_bad(self):
648 d = self.shouldFail2(error.Error, "test_GET_slash_file_bad",
650 "/file must be followed by a file-cap and a name",
654 def test_GET_unhandled_URI_named(self):
655 contents, n, newuri = self.makefile(12)
656 verifier_cap = n.get_verify_cap().to_string()
657 base = "/file/%s" % urllib.quote(verifier_cap)
658 # client.create_node_from_uri() can't handle verify-caps
659 d = self.shouldFail2(error.Error, "GET_unhandled_URI_named",
661 "is not a valid file- or directory- cap",
665 def test_GET_unhandled_URI(self):
666 contents, n, newuri = self.makefile(12)
667 verifier_cap = n.get_verify_cap().to_string()
668 base = "/uri/%s" % urllib.quote(verifier_cap)
669 # client.create_node_from_uri() can't handle verify-caps
670 d = self.shouldFail2(error.Error, "test_GET_unhandled_URI",
672 "is not a valid file- or directory- cap",
676 def test_GET_FILE_URI(self):
677 base = "/uri/%s" % urllib.quote(self._bar_txt_uri)
679 d.addCallback(self.failUnlessIsBarDotTxt)
682 def test_GET_FILE_URI_badchild(self):
683 base = "/uri/%s/boguschild" % urllib.quote(self._bar_txt_uri)
684 errmsg = "Files have no children, certainly not named 'boguschild'"
685 d = self.shouldFail2(error.Error, "test_GET_FILE_URI_badchild",
686 "400 Bad Request", errmsg,
690 def test_PUT_FILE_URI_badchild(self):
691 base = "/uri/%s/boguschild" % urllib.quote(self._bar_txt_uri)
692 errmsg = "Cannot create directory 'boguschild', because its parent is a file, not a directory"
693 d = self.shouldFail2(error.Error, "test_GET_FILE_URI_badchild",
694 "400 Bad Request", errmsg,
698 def test_GET_FILEURL_save(self):
699 d = self.GET(self.public_url + "/foo/bar.txt?filename=bar.txt&save=true")
700 # TODO: look at the headers, expect a Content-Disposition: attachment
702 d.addCallback(self.failUnlessIsBarDotTxt)
705 def test_GET_FILEURL_missing(self):
706 d = self.GET(self.public_url + "/foo/missing")
707 d.addBoth(self.should404, "test_GET_FILEURL_missing")
710 def test_PUT_NEWFILEURL(self):
711 d = self.PUT(self.public_url + "/foo/new.txt", self.NEWFILE_CONTENTS)
712 # TODO: we lose the response code, so we can't check this
713 #self.failUnlessEqual(responsecode, 201)
714 d.addCallback(self.failUnlessURIMatchesChild, self._foo_node, u"new.txt")
715 d.addCallback(lambda res:
716 self.failUnlessChildContentsAre(self._foo_node, u"new.txt",
717 self.NEWFILE_CONTENTS))
720 def test_PUT_NEWFILEURL_not_mutable(self):
721 d = self.PUT(self.public_url + "/foo/new.txt?mutable=false",
722 self.NEWFILE_CONTENTS)
723 # TODO: we lose the response code, so we can't check this
724 #self.failUnlessEqual(responsecode, 201)
725 d.addCallback(self.failUnlessURIMatchesChild, self._foo_node, u"new.txt")
726 d.addCallback(lambda res:
727 self.failUnlessChildContentsAre(self._foo_node, u"new.txt",
728 self.NEWFILE_CONTENTS))
731 def test_PUT_NEWFILEURL_range_bad(self):
732 headers = {"content-range": "bytes 1-10/%d" % len(self.NEWFILE_CONTENTS)}
733 target = self.public_url + "/foo/new.txt"
734 d = self.shouldFail2(error.Error, "test_PUT_NEWFILEURL_range_bad",
735 "501 Not Implemented",
736 "Content-Range in PUT not yet supported",
737 # (and certainly not for immutable files)
738 self.PUT, target, self.NEWFILE_CONTENTS[1:11],
740 d.addCallback(lambda res:
741 self.failIfNodeHasChild(self._foo_node, u"new.txt"))
744 def test_PUT_NEWFILEURL_mutable(self):
745 d = self.PUT(self.public_url + "/foo/new.txt?mutable=true",
746 self.NEWFILE_CONTENTS)
747 # TODO: we lose the response code, so we can't check this
748 #self.failUnlessEqual(responsecode, 201)
750 u = uri.from_string_mutable_filenode(res)
751 self.failUnless(u.is_mutable())
752 self.failIf(u.is_readonly())
754 d.addCallback(_check_uri)
755 d.addCallback(self.failUnlessURIMatchesChild, self._foo_node, u"new.txt")
756 d.addCallback(lambda res:
757 self.failUnlessMutableChildContentsAre(self._foo_node,
759 self.NEWFILE_CONTENTS))
762 def test_PUT_NEWFILEURL_mutable_toobig(self):
763 d = self.shouldFail2(error.Error, "test_PUT_NEWFILEURL_mutable_toobig",
764 "413 Request Entity Too Large",
765 "SDMF is limited to one segment, and 10001 > 10000",
767 self.public_url + "/foo/new.txt?mutable=true",
768 "b" * (self.s.MUTABLE_SIZELIMIT+1))
771 def test_PUT_NEWFILEURL_replace(self):
772 d = self.PUT(self.public_url + "/foo/bar.txt", self.NEWFILE_CONTENTS)
773 # TODO: we lose the response code, so we can't check this
774 #self.failUnlessEqual(responsecode, 200)
775 d.addCallback(self.failUnlessURIMatchesChild, self._foo_node, u"bar.txt")
776 d.addCallback(lambda res:
777 self.failUnlessChildContentsAre(self._foo_node, u"bar.txt",
778 self.NEWFILE_CONTENTS))
781 def test_PUT_NEWFILEURL_bad_t(self):
782 d = self.shouldFail2(error.Error, "PUT_bad_t", "400 Bad Request",
783 "PUT to a file: bad t=bogus",
784 self.PUT, self.public_url + "/foo/bar.txt?t=bogus",
788 def test_PUT_NEWFILEURL_no_replace(self):
789 d = self.PUT(self.public_url + "/foo/bar.txt?replace=false",
790 self.NEWFILE_CONTENTS)
791 d.addBoth(self.shouldFail, error.Error, "PUT_NEWFILEURL_no_replace",
793 "There was already a child by that name, and you asked me "
797 def test_PUT_NEWFILEURL_mkdirs(self):
798 d = self.PUT(self.public_url + "/foo/newdir/new.txt", self.NEWFILE_CONTENTS)
800 d.addCallback(self.failUnlessURIMatchesChild, fn, u"newdir/new.txt")
801 d.addCallback(lambda res: self.failIfNodeHasChild(fn, u"new.txt"))
802 d.addCallback(lambda res: self.failUnlessNodeHasChild(fn, u"newdir"))
803 d.addCallback(lambda res:
804 self.failUnlessChildContentsAre(fn, u"newdir/new.txt",
805 self.NEWFILE_CONTENTS))
808 def test_PUT_NEWFILEURL_blocked(self):
809 d = self.PUT(self.public_url + "/foo/blockingfile/new.txt",
810 self.NEWFILE_CONTENTS)
811 d.addBoth(self.shouldFail, error.Error, "PUT_NEWFILEURL_blocked",
813 "Unable to create directory 'blockingfile': a file was in the way")
816 def test_DELETE_FILEURL(self):
817 d = self.DELETE(self.public_url + "/foo/bar.txt")
818 d.addCallback(lambda res:
819 self.failIfNodeHasChild(self._foo_node, u"bar.txt"))
822 def test_DELETE_FILEURL_missing(self):
823 d = self.DELETE(self.public_url + "/foo/missing")
824 d.addBoth(self.should404, "test_DELETE_FILEURL_missing")
827 def test_DELETE_FILEURL_missing2(self):
828 d = self.DELETE(self.public_url + "/missing/missing")
829 d.addBoth(self.should404, "test_DELETE_FILEURL_missing2")
832 def test_GET_FILEURL_json(self):
833 # twisted.web.http.parse_qs ignores any query args without an '=', so
834 # I can't do "GET /path?json", I have to do "GET /path/t=json"
835 # instead. This may make it tricky to emulate the S3 interface
837 d = self.GET(self.public_url + "/foo/bar.txt?t=json")
838 d.addCallback(self.failUnlessIsBarJSON)
841 def test_GET_FILEURL_json_missing(self):
842 d = self.GET(self.public_url + "/foo/missing?json")
843 d.addBoth(self.should404, "test_GET_FILEURL_json_missing")
846 def test_GET_FILEURL_uri(self):
847 d = self.GET(self.public_url + "/foo/bar.txt?t=uri")
849 self.failUnlessEqual(res, self._bar_txt_uri)
850 d.addCallback(_check)
851 d.addCallback(lambda res:
852 self.GET(self.public_url + "/foo/bar.txt?t=readonly-uri"))
854 # for now, for files, uris and readonly-uris are the same
855 self.failUnlessEqual(res, self._bar_txt_uri)
856 d.addCallback(_check2)
859 def test_GET_FILEURL_badtype(self):
860 d = self.shouldHTTPError("GET t=bogus", 400, "Bad Request",
863 self.public_url + "/foo/bar.txt?t=bogus")
866 def test_GET_FILEURL_uri_missing(self):
867 d = self.GET(self.public_url + "/foo/missing?t=uri")
868 d.addBoth(self.should404, "test_GET_FILEURL_uri_missing")
871 def test_GET_DIRURL(self):
872 # the addSlash means we get a redirect here
873 # from /uri/$URI/foo/ , we need ../../../ to get back to the root
875 d = self.GET(self.public_url + "/foo", followRedirect=True)
877 self.failUnless(('<a href="%s">Return to Welcome page' % ROOT)
879 # the FILE reference points to a URI, but it should end in bar.txt
880 bar_url = ("%s/file/%s/@@named=/bar.txt" %
881 (ROOT, urllib.quote(self._bar_txt_uri)))
882 get_bar = "".join([r'<td>FILE</td>',
884 r'<a href="%s">bar.txt</a>' % bar_url,
886 r'\s+<td>%d</td>' % len(self.BAR_CONTENTS),
888 self.failUnless(re.search(get_bar, res), res)
889 for line in res.split("\n"):
890 # find the line that contains the delete button for bar.txt
891 if ("form action" in line and
892 'value="delete"' in line and
893 'value="bar.txt"' in line):
894 # the form target should use a relative URL
895 foo_url = urllib.quote("%s/uri/%s/" % (ROOT, self._foo_uri))
896 self.failUnless(('action="%s"' % foo_url) in line, line)
897 # and the when_done= should too
898 #done_url = urllib.quote(???)
899 #self.failUnless(('name="when_done" value="%s"' % done_url)
903 self.fail("unable to find delete-bar.txt line", res)
905 # the DIR reference just points to a URI
906 sub_url = ("%s/uri/%s/" % (ROOT, urllib.quote(self._sub_uri)))
907 get_sub = ((r'<td>DIR</td>')
908 +r'\s+<td><a href="%s">sub</a></td>' % sub_url)
909 self.failUnless(re.search(get_sub, res), res)
910 d.addCallback(_check)
912 # look at a directory which is readonly
913 d.addCallback(lambda res:
914 self.GET(self.public_url + "/reedownlee", followRedirect=True))
916 self.failUnless("(read-only)" in res, res)
917 self.failIf("Upload a file" in res, res)
918 d.addCallback(_check2)
920 # and at a directory that contains a readonly directory
921 d.addCallback(lambda res:
922 self.GET(self.public_url, followRedirect=True))
924 self.failUnless(re.search('<td>DIR-RO</td>'
925 r'\s+<td><a href="[\.\/]+/uri/URI%3ADIR2-RO%3A[^"]+">reedownlee</a></td>', res), res)
926 d.addCallback(_check3)
928 # and an empty directory
929 d.addCallback(lambda res: self.GET(self.public_url + "/foo/empty/"))
931 self.failUnless("directory is empty" in res, res)
932 MKDIR_BUTTON_RE=re.compile('<input type="hidden" name="t" value="mkdir" />.*<legend class="freeform-form-label">Create a new directory in this directory</legend>.*<input type="submit" value="Create" />', re.I)
933 self.failUnless(MKDIR_BUTTON_RE.search(res), res)
934 d.addCallback(_check4)
938 def test_GET_DIRURL_badtype(self):
939 d = self.shouldHTTPError("test_GET_DIRURL_badtype",
943 self.public_url + "/foo?t=bogus")
946 def test_GET_DIRURL_json(self):
947 d = self.GET(self.public_url + "/foo?t=json")
948 d.addCallback(self.failUnlessIsFooJSON)
952 def test_POST_DIRURL_manifest_no_ophandle(self):
953 d = self.shouldFail2(error.Error,
954 "test_POST_DIRURL_manifest_no_ophandle",
956 "slow operation requires ophandle=",
957 self.POST, self.public_url, t="start-manifest")
960 def test_POST_DIRURL_manifest(self):
961 d = defer.succeed(None)
962 def getman(ignored, output):
963 d = self.POST(self.public_url + "/foo/?t=start-manifest&ophandle=125",
965 d.addCallback(self.wait_for_operation, "125")
966 d.addCallback(self.get_operation_results, "125", output)
968 d.addCallback(getman, None)
969 def _got_html(manifest):
970 self.failUnless("Manifest of SI=" in manifest)
971 self.failUnless("<td>sub</td>" in manifest)
972 self.failUnless(self._sub_uri in manifest)
973 self.failUnless("<td>sub/baz.txt</td>" in manifest)
974 d.addCallback(_got_html)
976 # both t=status and unadorned GET should be identical
977 d.addCallback(lambda res: self.GET("/operations/125"))
978 d.addCallback(_got_html)
980 d.addCallback(getman, "html")
981 d.addCallback(_got_html)
982 d.addCallback(getman, "text")
983 def _got_text(manifest):
984 self.failUnless("\nsub " + self._sub_uri + "\n" in manifest)
985 self.failUnless("\nsub/baz.txt URI:CHK:" in manifest)
986 d.addCallback(_got_text)
987 d.addCallback(getman, "JSON")
989 data = res["manifest"]
991 for (path_list, cap) in data:
992 got[tuple(path_list)] = cap
993 self.failUnlessEqual(got[(u"sub",)], self._sub_uri)
994 self.failUnless((u"sub",u"baz.txt") in got)
995 self.failUnless("finished" in res)
996 self.failUnless("origin" in res)
997 self.failUnless("storage-index" in res)
998 self.failUnless("verifycaps" in res)
999 self.failUnless("stats" in res)
1000 d.addCallback(_got_json)
1003 def test_POST_DIRURL_deepsize_no_ophandle(self):
1004 d = self.shouldFail2(error.Error,
1005 "test_POST_DIRURL_deepsize_no_ophandle",
1007 "slow operation requires ophandle=",
1008 self.POST, self.public_url, t="start-deep-size")
1011 def test_POST_DIRURL_deepsize(self):
1012 d = self.POST(self.public_url + "/foo/?t=start-deep-size&ophandle=126",
1013 followRedirect=True)
1014 d.addCallback(self.wait_for_operation, "126")
1015 d.addCallback(self.get_operation_results, "126", "json")
1016 def _got_json(data):
1017 self.failUnlessEqual(data["finished"], True)
1019 self.failUnless(size > 1000)
1020 d.addCallback(_got_json)
1021 d.addCallback(self.get_operation_results, "126", "text")
1023 mo = re.search(r'^size: (\d+)$', res, re.M)
1024 self.failUnless(mo, res)
1025 size = int(mo.group(1))
1026 # with directories, the size varies.
1027 self.failUnless(size > 1000)
1028 d.addCallback(_got_text)
1031 def test_POST_DIRURL_deepstats_no_ophandle(self):
1032 d = self.shouldFail2(error.Error,
1033 "test_POST_DIRURL_deepstats_no_ophandle",
1035 "slow operation requires ophandle=",
1036 self.POST, self.public_url, t="start-deep-stats")
1039 def test_POST_DIRURL_deepstats(self):
1040 d = self.POST(self.public_url + "/foo/?t=start-deep-stats&ophandle=127",
1041 followRedirect=True)
1042 d.addCallback(self.wait_for_operation, "127")
1043 d.addCallback(self.get_operation_results, "127", "json")
1044 def _got_json(stats):
1045 expected = {"count-immutable-files": 3,
1046 "count-mutable-files": 0,
1047 "count-literal-files": 0,
1049 "count-directories": 3,
1050 "size-immutable-files": 57,
1051 "size-literal-files": 0,
1052 #"size-directories": 1912, # varies
1053 #"largest-directory": 1590,
1054 "largest-directory-children": 5,
1055 "largest-immutable-file": 19,
1057 for k,v in expected.iteritems():
1058 self.failUnlessEqual(stats[k], v,
1059 "stats[%s] was %s, not %s" %
1061 self.failUnlessEqual(stats["size-files-histogram"],
1063 d.addCallback(_got_json)
1066 def test_POST_DIRURL_stream_manifest(self):
1067 d = self.POST(self.public_url + "/foo/?t=stream-manifest")
1069 self.failUnless(res.endswith("\n"))
1070 units = [simplejson.loads(t) for t in res[:-1].split("\n")]
1071 self.failUnlessEqual(len(units), 7)
1072 self.failUnlessEqual(units[-1]["type"], "stats")
1074 self.failUnlessEqual(first["path"], [])
1075 self.failUnlessEqual(first["cap"], self._foo_uri)
1076 self.failUnlessEqual(first["type"], "directory")
1077 baz = [u for u in units[:-1] if u["cap"] == self._baz_file_uri][0]
1078 self.failUnlessEqual(baz["path"], ["sub", "baz.txt"])
1079 self.failIfEqual(baz["storage-index"], None)
1080 self.failIfEqual(baz["verifycap"], None)
1081 self.failIfEqual(baz["repaircap"], None)
1083 d.addCallback(_check)
1086 def test_GET_DIRURL_uri(self):
1087 d = self.GET(self.public_url + "/foo?t=uri")
1089 self.failUnlessEqual(res, self._foo_uri)
1090 d.addCallback(_check)
1093 def test_GET_DIRURL_readonly_uri(self):
1094 d = self.GET(self.public_url + "/foo?t=readonly-uri")
1096 self.failUnlessEqual(res, self._foo_readonly_uri)
1097 d.addCallback(_check)
1100 def test_PUT_NEWDIRURL(self):
1101 d = self.PUT(self.public_url + "/foo/newdir?t=mkdir", "")
1102 d.addCallback(lambda res:
1103 self.failUnlessNodeHasChild(self._foo_node, u"newdir"))
1104 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
1105 d.addCallback(self.failUnlessNodeKeysAre, [])
1108 def test_PUT_NEWDIRURL_exists(self):
1109 d = self.PUT(self.public_url + "/foo/sub?t=mkdir", "")
1110 d.addCallback(lambda res:
1111 self.failUnlessNodeHasChild(self._foo_node, u"sub"))
1112 d.addCallback(lambda res: self._foo_node.get(u"sub"))
1113 d.addCallback(self.failUnlessNodeKeysAre, [u"baz.txt"])
1116 def test_PUT_NEWDIRURL_blocked(self):
1117 d = self.shouldFail2(error.Error, "PUT_NEWDIRURL_blocked",
1118 "409 Conflict", "Unable to create directory 'bar.txt': a file was in the way",
1120 self.public_url + "/foo/bar.txt/sub?t=mkdir", "")
1121 d.addCallback(lambda res:
1122 self.failUnlessNodeHasChild(self._foo_node, u"sub"))
1123 d.addCallback(lambda res: self._foo_node.get(u"sub"))
1124 d.addCallback(self.failUnlessNodeKeysAre, [u"baz.txt"])
1127 def test_PUT_NEWDIRURL_mkdir_p(self):
1128 d = defer.succeed(None)
1129 d.addCallback(lambda res: self.POST(self.public_url + "/foo", t='mkdir', name='mkp'))
1130 d.addCallback(lambda res: self.failUnlessNodeHasChild(self._foo_node, u"mkp"))
1131 d.addCallback(lambda res: self._foo_node.get(u"mkp"))
1132 def mkdir_p(mkpnode):
1133 url = '/uri/%s?t=mkdir-p&path=/sub1/sub2' % urllib.quote(mkpnode.get_uri())
1135 def made_subsub(ssuri):
1136 d = self._foo_node.get_child_at_path(u"mkp/sub1/sub2")
1137 d.addCallback(lambda ssnode: self.failUnlessEqual(ssnode.get_uri(), ssuri))
1139 d.addCallback(lambda uri2: self.failUnlessEqual(uri2, ssuri))
1141 d.addCallback(made_subsub)
1143 d.addCallback(mkdir_p)
1146 def test_PUT_NEWDIRURL_mkdirs(self):
1147 d = self.PUT(self.public_url + "/foo/subdir/newdir?t=mkdir", "")
1148 d.addCallback(lambda res:
1149 self.failIfNodeHasChild(self._foo_node, u"newdir"))
1150 d.addCallback(lambda res:
1151 self.failUnlessNodeHasChild(self._foo_node, u"subdir"))
1152 d.addCallback(lambda res:
1153 self._foo_node.get_child_at_path(u"subdir/newdir"))
1154 d.addCallback(self.failUnlessNodeKeysAre, [])
1157 def test_DELETE_DIRURL(self):
1158 d = self.DELETE(self.public_url + "/foo")
1159 d.addCallback(lambda res:
1160 self.failIfNodeHasChild(self.public_root, u"foo"))
1163 def test_DELETE_DIRURL_missing(self):
1164 d = self.DELETE(self.public_url + "/foo/missing")
1165 d.addBoth(self.should404, "test_DELETE_DIRURL_missing")
1166 d.addCallback(lambda res:
1167 self.failUnlessNodeHasChild(self.public_root, u"foo"))
1170 def test_DELETE_DIRURL_missing2(self):
1171 d = self.DELETE(self.public_url + "/missing")
1172 d.addBoth(self.should404, "test_DELETE_DIRURL_missing2")
1175 def dump_root(self):
1177 w = webish.DirnodeWalkerMixin()
1178 def visitor(childpath, childnode, metadata):
1180 d = w.walk(self.public_root, visitor)
1183 def failUnlessNodeKeysAre(self, node, expected_keys):
1184 for k in expected_keys:
1185 assert isinstance(k, unicode)
1187 def _check(children):
1188 self.failUnlessEqual(sorted(children.keys()), sorted(expected_keys))
1189 d.addCallback(_check)
1191 def failUnlessNodeHasChild(self, node, name):
1192 assert isinstance(name, unicode)
1194 def _check(children):
1195 self.failUnless(name in children)
1196 d.addCallback(_check)
1198 def failIfNodeHasChild(self, node, name):
1199 assert isinstance(name, unicode)
1201 def _check(children):
1202 self.failIf(name in children)
1203 d.addCallback(_check)
1206 def failUnlessChildContentsAre(self, node, name, expected_contents):
1207 assert isinstance(name, unicode)
1208 d = node.get_child_at_path(name)
1209 d.addCallback(lambda node: node.download_to_data())
1210 def _check(contents):
1211 self.failUnlessEqual(contents, expected_contents)
1212 d.addCallback(_check)
1215 def failUnlessMutableChildContentsAre(self, node, name, expected_contents):
1216 assert isinstance(name, unicode)
1217 d = node.get_child_at_path(name)
1218 d.addCallback(lambda node: node.download_best_version())
1219 def _check(contents):
1220 self.failUnlessEqual(contents, expected_contents)
1221 d.addCallback(_check)
1224 def failUnlessChildURIIs(self, node, name, expected_uri):
1225 assert isinstance(name, unicode)
1226 d = node.get_child_at_path(name)
1228 self.failUnlessEqual(child.get_uri(), expected_uri.strip())
1229 d.addCallback(_check)
1232 def failUnlessURIMatchesChild(self, got_uri, node, name):
1233 assert isinstance(name, unicode)
1234 d = node.get_child_at_path(name)
1236 self.failUnlessEqual(got_uri.strip(), child.get_uri())
1237 d.addCallback(_check)
1240 def failUnlessCHKURIHasContents(self, got_uri, contents):
1241 self.failUnless(FakeCHKFileNode.all_contents[got_uri] == contents)
1243 def test_POST_upload(self):
1244 d = self.POST(self.public_url + "/foo", t="upload",
1245 file=("new.txt", self.NEWFILE_CONTENTS))
1247 d.addCallback(self.failUnlessURIMatchesChild, fn, u"new.txt")
1248 d.addCallback(lambda res:
1249 self.failUnlessChildContentsAre(fn, u"new.txt",
1250 self.NEWFILE_CONTENTS))
1253 def test_POST_upload_unicode(self):
1254 filename = u"n\u00e9wer.txt" # n e-acute w e r . t x t
1255 d = self.POST(self.public_url + "/foo", t="upload",
1256 file=(filename, self.NEWFILE_CONTENTS))
1258 d.addCallback(self.failUnlessURIMatchesChild, fn, filename)
1259 d.addCallback(lambda res:
1260 self.failUnlessChildContentsAre(fn, filename,
1261 self.NEWFILE_CONTENTS))
1262 target_url = self.public_url + "/foo/" + filename.encode("utf-8")
1263 d.addCallback(lambda res: self.GET(target_url))
1264 d.addCallback(lambda contents: self.failUnlessEqual(contents,
1265 self.NEWFILE_CONTENTS,
1269 def test_POST_upload_unicode_named(self):
1270 filename = u"n\u00e9wer.txt" # n e-acute w e r . t x t
1271 d = self.POST(self.public_url + "/foo", t="upload",
1273 file=("overridden", self.NEWFILE_CONTENTS))
1275 d.addCallback(self.failUnlessURIMatchesChild, fn, filename)
1276 d.addCallback(lambda res:
1277 self.failUnlessChildContentsAre(fn, filename,
1278 self.NEWFILE_CONTENTS))
1279 target_url = self.public_url + "/foo/" + filename.encode("utf-8")
1280 d.addCallback(lambda res: self.GET(target_url))
1281 d.addCallback(lambda contents: self.failUnlessEqual(contents,
1282 self.NEWFILE_CONTENTS,
1286 def test_POST_upload_no_link(self):
1287 d = self.POST("/uri", t="upload",
1288 file=("new.txt", self.NEWFILE_CONTENTS))
1289 def _check_upload_results(page):
1290 # this should be a page which describes the results of the upload
1291 # that just finished.
1292 self.failUnless("Upload Results:" in page)
1293 self.failUnless("URI:" in page)
1294 uri_re = re.compile("URI: <tt><span>(.*)</span>")
1295 mo = uri_re.search(page)
1296 self.failUnless(mo, page)
1297 new_uri = mo.group(1)
1299 d.addCallback(_check_upload_results)
1300 d.addCallback(self.failUnlessCHKURIHasContents, self.NEWFILE_CONTENTS)
1303 def test_POST_upload_no_link_whendone(self):
1304 d = self.POST("/uri", t="upload", when_done="/",
1305 file=("new.txt", self.NEWFILE_CONTENTS))
1306 d.addBoth(self.shouldRedirect, "/")
1309 def shouldRedirect2(self, which, checker, callable, *args, **kwargs):
1310 d = defer.maybeDeferred(callable, *args, **kwargs)
1312 if isinstance(res, failure.Failure):
1313 res.trap(error.PageRedirect)
1314 statuscode = res.value.status
1315 target = res.value.location
1316 return checker(statuscode, target)
1317 self.fail("%s: callable was supposed to redirect, not return '%s'"
1322 def test_POST_upload_no_link_whendone_results(self):
1323 def check(statuscode, target):
1324 self.failUnlessEqual(statuscode, str(http.FOUND))
1325 self.failUnless(target.startswith(self.webish_url), target)
1326 return client.getPage(target, method="GET")
1327 d = self.shouldRedirect2("test_POST_upload_no_link_whendone_results",
1329 self.POST, "/uri", t="upload",
1330 when_done="/uri/%(uri)s",
1331 file=("new.txt", self.NEWFILE_CONTENTS))
1332 d.addCallback(lambda res:
1333 self.failUnlessEqual(res, self.NEWFILE_CONTENTS))
1336 def test_POST_upload_no_link_mutable(self):
1337 d = self.POST("/uri", t="upload", mutable="true",
1338 file=("new.txt", self.NEWFILE_CONTENTS))
1339 def _check(new_uri):
1340 new_uri = new_uri.strip()
1341 self.new_uri = new_uri
1343 self.failUnless(IMutableFileURI.providedBy(u))
1344 self.failUnless(u.storage_index in FakeMutableFileNode.all_contents)
1345 n = self.s.create_node_from_uri(new_uri)
1346 return n.download_best_version()
1347 d.addCallback(_check)
1349 self.failUnlessEqual(data, self.NEWFILE_CONTENTS)
1350 return self.GET("/uri/%s" % urllib.quote(self.new_uri))
1351 d.addCallback(_check2)
1353 self.failUnlessEqual(data, self.NEWFILE_CONTENTS)
1354 return self.GET("/file/%s" % urllib.quote(self.new_uri))
1355 d.addCallback(_check3)
1357 self.failUnlessEqual(data, self.NEWFILE_CONTENTS)
1358 d.addCallback(_check4)
1361 def test_POST_upload_no_link_mutable_toobig(self):
1362 d = self.shouldFail2(error.Error,
1363 "test_POST_upload_no_link_mutable_toobig",
1364 "413 Request Entity Too Large",
1365 "SDMF is limited to one segment, and 10001 > 10000",
1367 "/uri", t="upload", mutable="true",
1369 "b" * (self.s.MUTABLE_SIZELIMIT+1)) )
1372 def test_POST_upload_mutable(self):
1373 # this creates a mutable file
1374 d = self.POST(self.public_url + "/foo", t="upload", mutable="true",
1375 file=("new.txt", self.NEWFILE_CONTENTS))
1377 d.addCallback(self.failUnlessURIMatchesChild, fn, u"new.txt")
1378 d.addCallback(lambda res:
1379 self.failUnlessMutableChildContentsAre(fn, u"new.txt",
1380 self.NEWFILE_CONTENTS))
1381 d.addCallback(lambda res: self._foo_node.get(u"new.txt"))
1383 self.failUnless(IMutableFileNode.providedBy(newnode))
1384 self.failUnless(newnode.is_mutable())
1385 self.failIf(newnode.is_readonly())
1386 self._mutable_node = newnode
1387 self._mutable_uri = newnode.get_uri()
1390 # now upload it again and make sure that the URI doesn't change
1391 NEWER_CONTENTS = self.NEWFILE_CONTENTS + "newer\n"
1392 d.addCallback(lambda res:
1393 self.POST(self.public_url + "/foo", t="upload",
1395 file=("new.txt", NEWER_CONTENTS)))
1396 d.addCallback(self.failUnlessURIMatchesChild, fn, u"new.txt")
1397 d.addCallback(lambda res:
1398 self.failUnlessMutableChildContentsAre(fn, u"new.txt",
1400 d.addCallback(lambda res: self._foo_node.get(u"new.txt"))
1402 self.failUnless(IMutableFileNode.providedBy(newnode))
1403 self.failUnless(newnode.is_mutable())
1404 self.failIf(newnode.is_readonly())
1405 self.failUnlessEqual(self._mutable_uri, newnode.get_uri())
1406 d.addCallback(_got2)
1408 # upload a second time, using PUT instead of POST
1409 NEW2_CONTENTS = NEWER_CONTENTS + "overwrite with PUT\n"
1410 d.addCallback(lambda res:
1411 self.PUT(self.public_url + "/foo/new.txt", NEW2_CONTENTS))
1412 d.addCallback(self.failUnlessURIMatchesChild, fn, u"new.txt")
1413 d.addCallback(lambda res:
1414 self.failUnlessMutableChildContentsAre(fn, u"new.txt",
1417 # finally list the directory, since mutable files are displayed
1418 # slightly differently
1420 d.addCallback(lambda res:
1421 self.GET(self.public_url + "/foo/",
1422 followRedirect=True))
1423 def _check_page(res):
1424 # TODO: assert more about the contents
1425 self.failUnless("SSK" in res)
1427 d.addCallback(_check_page)
1429 d.addCallback(lambda res: self._foo_node.get(u"new.txt"))
1431 self.failUnless(IMutableFileNode.providedBy(newnode))
1432 self.failUnless(newnode.is_mutable())
1433 self.failIf(newnode.is_readonly())
1434 self.failUnlessEqual(self._mutable_uri, newnode.get_uri())
1435 d.addCallback(_got3)
1437 # look at the JSON form of the enclosing directory
1438 d.addCallback(lambda res:
1439 self.GET(self.public_url + "/foo/?t=json",
1440 followRedirect=True))
1441 def _check_page_json(res):
1442 parsed = simplejson.loads(res)
1443 self.failUnlessEqual(parsed[0], "dirnode")
1444 children = dict( [(unicode(name),value)
1446 in parsed[1]["children"].iteritems()] )
1447 self.failUnless("new.txt" in children)
1448 new_json = children["new.txt"]
1449 self.failUnlessEqual(new_json[0], "filenode")
1450 self.failUnless(new_json[1]["mutable"])
1451 self.failUnlessEqual(new_json[1]["rw_uri"], self._mutable_uri)
1452 ro_uri = unicode(self._mutable_node.get_readonly().to_string())
1453 self.failUnlessEqual(new_json[1]["ro_uri"], ro_uri)
1454 d.addCallback(_check_page_json)
1456 # and the JSON form of the file
1457 d.addCallback(lambda res:
1458 self.GET(self.public_url + "/foo/new.txt?t=json"))
1459 def _check_file_json(res):
1460 parsed = simplejson.loads(res)
1461 self.failUnlessEqual(parsed[0], "filenode")
1462 self.failUnless(parsed[1]["mutable"])
1463 self.failUnlessEqual(parsed[1]["rw_uri"], self._mutable_uri)
1464 ro_uri = unicode(self._mutable_node.get_readonly().to_string())
1465 self.failUnlessEqual(parsed[1]["ro_uri"], ro_uri)
1466 d.addCallback(_check_file_json)
1468 # and look at t=uri and t=readonly-uri
1469 d.addCallback(lambda res:
1470 self.GET(self.public_url + "/foo/new.txt?t=uri"))
1471 d.addCallback(lambda res: self.failUnlessEqual(res, self._mutable_uri))
1472 d.addCallback(lambda res:
1473 self.GET(self.public_url + "/foo/new.txt?t=readonly-uri"))
1474 def _check_ro_uri(res):
1475 ro_uri = unicode(self._mutable_node.get_readonly().to_string())
1476 self.failUnlessEqual(res, ro_uri)
1477 d.addCallback(_check_ro_uri)
1479 # make sure we can get to it from /uri/URI
1480 d.addCallback(lambda res:
1481 self.GET("/uri/%s" % urllib.quote(self._mutable_uri)))
1482 d.addCallback(lambda res:
1483 self.failUnlessEqual(res, NEW2_CONTENTS))
1485 # and that HEAD computes the size correctly
1486 d.addCallback(lambda res:
1487 self.HEAD(self.public_url + "/foo/new.txt",
1488 return_response=True))
1489 def _got_headers((res, status, headers)):
1490 self.failUnlessEqual(res, "")
1491 self.failUnlessEqual(headers["content-length"][0],
1492 str(len(NEW2_CONTENTS)))
1493 self.failUnlessEqual(headers["content-type"], ["text/plain"])
1494 d.addCallback(_got_headers)
1496 # make sure that size errors are displayed correctly for overwrite
1497 d.addCallback(lambda res:
1498 self.shouldFail2(error.Error,
1499 "test_POST_upload_mutable-toobig",
1500 "413 Request Entity Too Large",
1501 "SDMF is limited to one segment, and 10001 > 10000",
1503 self.public_url + "/foo", t="upload",
1506 "b" * (self.s.MUTABLE_SIZELIMIT+1)),
1509 d.addErrback(self.dump_error)
1512 def test_POST_upload_mutable_toobig(self):
1513 d = self.shouldFail2(error.Error,
1514 "test_POST_upload_no_link_mutable_toobig",
1515 "413 Request Entity Too Large",
1516 "SDMF is limited to one segment, and 10001 > 10000",
1518 self.public_url + "/foo",
1519 t="upload", mutable="true",
1521 "b" * (self.s.MUTABLE_SIZELIMIT+1)) )
1524 def dump_error(self, f):
1525 # if the web server returns an error code (like 400 Bad Request),
1526 # web.client.getPage puts the HTTP response body into the .response
1527 # attribute of the exception object that it gives back. It does not
1528 # appear in the Failure's repr(), so the ERROR that trial displays
1529 # will be rather terse and unhelpful. addErrback this method to the
1530 # end of your chain to get more information out of these errors.
1531 if f.check(error.Error):
1532 print "web.error.Error:"
1534 print f.value.response
1537 def test_POST_upload_replace(self):
1538 d = self.POST(self.public_url + "/foo", t="upload",
1539 file=("bar.txt", self.NEWFILE_CONTENTS))
1541 d.addCallback(self.failUnlessURIMatchesChild, fn, u"bar.txt")
1542 d.addCallback(lambda res:
1543 self.failUnlessChildContentsAre(fn, u"bar.txt",
1544 self.NEWFILE_CONTENTS))
1547 def test_POST_upload_no_replace_ok(self):
1548 d = self.POST(self.public_url + "/foo?replace=false", t="upload",
1549 file=("new.txt", self.NEWFILE_CONTENTS))
1550 d.addCallback(lambda res: self.GET(self.public_url + "/foo/new.txt"))
1551 d.addCallback(lambda res: self.failUnlessEqual(res,
1552 self.NEWFILE_CONTENTS))
1555 def test_POST_upload_no_replace_queryarg(self):
1556 d = self.POST(self.public_url + "/foo?replace=false", t="upload",
1557 file=("bar.txt", self.NEWFILE_CONTENTS))
1558 d.addBoth(self.shouldFail, error.Error,
1559 "POST_upload_no_replace_queryarg",
1561 "There was already a child by that name, and you asked me "
1562 "to not replace it")
1563 d.addCallback(lambda res: self.GET(self.public_url + "/foo/bar.txt"))
1564 d.addCallback(self.failUnlessIsBarDotTxt)
1567 def test_POST_upload_no_replace_field(self):
1568 d = self.POST(self.public_url + "/foo", t="upload", replace="false",
1569 file=("bar.txt", self.NEWFILE_CONTENTS))
1570 d.addBoth(self.shouldFail, error.Error, "POST_upload_no_replace_field",
1572 "There was already a child by that name, and you asked me "
1573 "to not replace it")
1574 d.addCallback(lambda res: self.GET(self.public_url + "/foo/bar.txt"))
1575 d.addCallback(self.failUnlessIsBarDotTxt)
1578 def test_POST_upload_whendone(self):
1579 d = self.POST(self.public_url + "/foo", t="upload", when_done="/THERE",
1580 file=("new.txt", self.NEWFILE_CONTENTS))
1581 d.addBoth(self.shouldRedirect, "/THERE")
1583 d.addCallback(lambda res:
1584 self.failUnlessChildContentsAre(fn, u"new.txt",
1585 self.NEWFILE_CONTENTS))
1588 def test_POST_upload_named(self):
1590 d = self.POST(self.public_url + "/foo", t="upload",
1591 name="new.txt", file=self.NEWFILE_CONTENTS)
1592 d.addCallback(self.failUnlessURIMatchesChild, fn, u"new.txt")
1593 d.addCallback(lambda res:
1594 self.failUnlessChildContentsAre(fn, u"new.txt",
1595 self.NEWFILE_CONTENTS))
1598 def test_POST_upload_named_badfilename(self):
1599 d = self.POST(self.public_url + "/foo", t="upload",
1600 name="slashes/are/bad.txt", file=self.NEWFILE_CONTENTS)
1601 d.addBoth(self.shouldFail, error.Error,
1602 "test_POST_upload_named_badfilename",
1604 "name= may not contain a slash",
1606 # make sure that nothing was added
1607 d.addCallback(lambda res:
1608 self.failUnlessNodeKeysAre(self._foo_node,
1609 [u"bar.txt", u"blockingfile",
1610 u"empty", u"n\u00fc.txt",
1614 def test_POST_FILEURL_check(self):
1615 bar_url = self.public_url + "/foo/bar.txt"
1616 d = self.POST(bar_url, t="check")
1618 self.failUnless("Healthy :" in res)
1619 d.addCallback(_check)
1620 redir_url = "http://allmydata.org/TARGET"
1621 def _check2(statuscode, target):
1622 self.failUnlessEqual(statuscode, str(http.FOUND))
1623 self.failUnlessEqual(target, redir_url)
1624 d.addCallback(lambda res:
1625 self.shouldRedirect2("test_POST_FILEURL_check",
1629 when_done=redir_url))
1630 d.addCallback(lambda res:
1631 self.POST(bar_url, t="check", return_to=redir_url))
1633 self.failUnless("Healthy :" in res)
1634 self.failUnless("Return to file" in res)
1635 self.failUnless(redir_url in res)
1636 d.addCallback(_check3)
1638 d.addCallback(lambda res:
1639 self.POST(bar_url, t="check", output="JSON"))
1640 def _check_json(res):
1641 data = simplejson.loads(res)
1642 self.failUnless("storage-index" in data)
1643 self.failUnless(data["results"]["healthy"])
1644 d.addCallback(_check_json)
1648 def test_POST_FILEURL_check_and_repair(self):
1649 bar_url = self.public_url + "/foo/bar.txt"
1650 d = self.POST(bar_url, t="check", repair="true")
1652 self.failUnless("Healthy :" in res)
1653 d.addCallback(_check)
1654 redir_url = "http://allmydata.org/TARGET"
1655 def _check2(statuscode, target):
1656 self.failUnlessEqual(statuscode, str(http.FOUND))
1657 self.failUnlessEqual(target, redir_url)
1658 d.addCallback(lambda res:
1659 self.shouldRedirect2("test_POST_FILEURL_check_and_repair",
1662 t="check", repair="true",
1663 when_done=redir_url))
1664 d.addCallback(lambda res:
1665 self.POST(bar_url, t="check", return_to=redir_url))
1667 self.failUnless("Healthy :" in res)
1668 self.failUnless("Return to file" in res)
1669 self.failUnless(redir_url in res)
1670 d.addCallback(_check3)
1673 def test_POST_DIRURL_check(self):
1674 foo_url = self.public_url + "/foo/"
1675 d = self.POST(foo_url, t="check")
1677 self.failUnless("Healthy :" in res, res)
1678 d.addCallback(_check)
1679 redir_url = "http://allmydata.org/TARGET"
1680 def _check2(statuscode, target):
1681 self.failUnlessEqual(statuscode, str(http.FOUND))
1682 self.failUnlessEqual(target, redir_url)
1683 d.addCallback(lambda res:
1684 self.shouldRedirect2("test_POST_DIRURL_check",
1688 when_done=redir_url))
1689 d.addCallback(lambda res:
1690 self.POST(foo_url, t="check", return_to=redir_url))
1692 self.failUnless("Healthy :" in res, res)
1693 self.failUnless("Return to file/directory" in res)
1694 self.failUnless(redir_url in res)
1695 d.addCallback(_check3)
1697 d.addCallback(lambda res:
1698 self.POST(foo_url, t="check", output="JSON"))
1699 def _check_json(res):
1700 data = simplejson.loads(res)
1701 self.failUnless("storage-index" in data)
1702 self.failUnless(data["results"]["healthy"])
1703 d.addCallback(_check_json)
1707 def test_POST_DIRURL_check_and_repair(self):
1708 foo_url = self.public_url + "/foo/"
1709 d = self.POST(foo_url, t="check", repair="true")
1711 self.failUnless("Healthy :" in res, res)
1712 d.addCallback(_check)
1713 redir_url = "http://allmydata.org/TARGET"
1714 def _check2(statuscode, target):
1715 self.failUnlessEqual(statuscode, str(http.FOUND))
1716 self.failUnlessEqual(target, redir_url)
1717 d.addCallback(lambda res:
1718 self.shouldRedirect2("test_POST_DIRURL_check_and_repair",
1721 t="check", repair="true",
1722 when_done=redir_url))
1723 d.addCallback(lambda res:
1724 self.POST(foo_url, t="check", return_to=redir_url))
1726 self.failUnless("Healthy :" in res)
1727 self.failUnless("Return to file/directory" in res)
1728 self.failUnless(redir_url in res)
1729 d.addCallback(_check3)
1732 def wait_for_operation(self, ignored, ophandle):
1733 url = "/operations/" + ophandle
1734 url += "?t=status&output=JSON"
1737 data = simplejson.loads(res)
1738 if not data["finished"]:
1739 d = self.stall(delay=1.0)
1740 d.addCallback(self.wait_for_operation, ophandle)
1746 def get_operation_results(self, ignored, ophandle, output=None):
1747 url = "/operations/" + ophandle
1750 url += "&output=" + output
1753 if output and output.lower() == "json":
1754 return simplejson.loads(res)
1759 def test_POST_DIRURL_deepcheck_no_ophandle(self):
1760 d = self.shouldFail2(error.Error,
1761 "test_POST_DIRURL_deepcheck_no_ophandle",
1763 "slow operation requires ophandle=",
1764 self.POST, self.public_url, t="start-deep-check")
1767 def test_POST_DIRURL_deepcheck(self):
1768 def _check_redirect(statuscode, target):
1769 self.failUnlessEqual(statuscode, str(http.FOUND))
1770 self.failUnless(target.endswith("/operations/123"))
1771 d = self.shouldRedirect2("test_POST_DIRURL_deepcheck", _check_redirect,
1772 self.POST, self.public_url,
1773 t="start-deep-check", ophandle="123")
1774 d.addCallback(self.wait_for_operation, "123")
1775 def _check_json(data):
1776 self.failUnlessEqual(data["finished"], True)
1777 self.failUnlessEqual(data["count-objects-checked"], 8)
1778 self.failUnlessEqual(data["count-objects-healthy"], 8)
1779 d.addCallback(_check_json)
1780 d.addCallback(self.get_operation_results, "123", "html")
1781 def _check_html(res):
1782 self.failUnless("Objects Checked: <span>8</span>" in res)
1783 self.failUnless("Objects Healthy: <span>8</span>" in res)
1784 d.addCallback(_check_html)
1786 d.addCallback(lambda res:
1787 self.GET("/operations/123/"))
1788 d.addCallback(_check_html) # should be the same as without the slash
1790 d.addCallback(lambda res:
1791 self.shouldFail2(error.Error, "one", "404 Not Found",
1792 "No detailed results for SI bogus",
1793 self.GET, "/operations/123/bogus"))
1795 foo_si = self._foo_node.get_storage_index()
1796 foo_si_s = base32.b2a(foo_si)
1797 d.addCallback(lambda res:
1798 self.GET("/operations/123/%s?output=JSON" % foo_si_s))
1799 def _check_foo_json(res):
1800 data = simplejson.loads(res)
1801 self.failUnlessEqual(data["storage-index"], foo_si_s)
1802 self.failUnless(data["results"]["healthy"])
1803 d.addCallback(_check_foo_json)
1806 def test_POST_DIRURL_deepcheck_and_repair(self):
1807 d = self.POST(self.public_url, t="start-deep-check", repair="true",
1808 ophandle="124", output="json", followRedirect=True)
1809 d.addCallback(self.wait_for_operation, "124")
1810 def _check_json(data):
1811 self.failUnlessEqual(data["finished"], True)
1812 self.failUnlessEqual(data["count-objects-checked"], 8)
1813 self.failUnlessEqual(data["count-objects-healthy-pre-repair"], 8)
1814 self.failUnlessEqual(data["count-objects-unhealthy-pre-repair"], 0)
1815 self.failUnlessEqual(data["count-corrupt-shares-pre-repair"], 0)
1816 self.failUnlessEqual(data["count-repairs-attempted"], 0)
1817 self.failUnlessEqual(data["count-repairs-successful"], 0)
1818 self.failUnlessEqual(data["count-repairs-unsuccessful"], 0)
1819 self.failUnlessEqual(data["count-objects-healthy-post-repair"], 8)
1820 self.failUnlessEqual(data["count-objects-unhealthy-post-repair"], 0)
1821 self.failUnlessEqual(data["count-corrupt-shares-post-repair"], 0)
1822 d.addCallback(_check_json)
1823 d.addCallback(self.get_operation_results, "124", "html")
1824 def _check_html(res):
1825 self.failUnless("Objects Checked: <span>8</span>" in res)
1827 self.failUnless("Objects Healthy (before repair): <span>8</span>" in res)
1828 self.failUnless("Objects Unhealthy (before repair): <span>0</span>" in res)
1829 self.failUnless("Corrupt Shares (before repair): <span>0</span>" in res)
1831 self.failUnless("Repairs Attempted: <span>0</span>" in res)
1832 self.failUnless("Repairs Successful: <span>0</span>" in res)
1833 self.failUnless("Repairs Unsuccessful: <span>0</span>" in res)
1835 self.failUnless("Objects Healthy (after repair): <span>8</span>" in res)
1836 self.failUnless("Objects Unhealthy (after repair): <span>0</span>" in res)
1837 self.failUnless("Corrupt Shares (after repair): <span>0</span>" in res)
1838 d.addCallback(_check_html)
1841 def test_POST_FILEURL_bad_t(self):
1842 d = self.shouldFail2(error.Error, "POST_bad_t", "400 Bad Request",
1843 "POST to file: bad t=bogus",
1844 self.POST, self.public_url + "/foo/bar.txt",
1848 def test_POST_mkdir(self): # return value?
1849 d = self.POST(self.public_url + "/foo", t="mkdir", name="newdir")
1850 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
1851 d.addCallback(self.failUnlessNodeKeysAre, [])
1854 def test_POST_mkdir_2(self):
1855 d = self.POST(self.public_url + "/foo/newdir?t=mkdir", "")
1856 d.addCallback(lambda res:
1857 self.failUnlessNodeHasChild(self._foo_node, u"newdir"))
1858 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
1859 d.addCallback(self.failUnlessNodeKeysAre, [])
1862 def test_POST_mkdirs_2(self):
1863 d = self.POST(self.public_url + "/foo/bardir/newdir?t=mkdir", "")
1864 d.addCallback(lambda res:
1865 self.failUnlessNodeHasChild(self._foo_node, u"bardir"))
1866 d.addCallback(lambda res: self._foo_node.get(u"bardir"))
1867 d.addCallback(lambda bardirnode: bardirnode.get(u"newdir"))
1868 d.addCallback(self.failUnlessNodeKeysAre, [])
1871 def test_POST_mkdir_no_parentdir_noredirect(self):
1872 d = self.POST("/uri?t=mkdir")
1873 def _after_mkdir(res):
1874 uri.NewDirectoryURI.init_from_string(res)
1875 d.addCallback(_after_mkdir)
1878 def test_POST_mkdir_no_parentdir_redirect(self):
1879 d = self.POST("/uri?t=mkdir&redirect_to_result=true")
1880 d.addBoth(self.shouldRedirect, None, statuscode='303')
1881 def _check_target(target):
1882 target = urllib.unquote(target)
1883 self.failUnless(target.startswith("uri/URI:DIR2:"), target)
1884 d.addCallback(_check_target)
1887 def test_POST_noparent_bad(self):
1888 d = self.shouldHTTPError("POST /uri?t=bogus", 400, "Bad Request",
1889 "/uri accepts only PUT, PUT?t=mkdir, "
1890 "POST?t=upload, and POST?t=mkdir",
1891 self.POST, "/uri?t=bogus")
1894 def test_welcome_page_mkdir_button(self):
1895 # Fetch the welcome page.
1897 def _after_get_welcome_page(res):
1898 MKDIR_BUTTON_RE=re.compile('<form action="([^"]*)" method="post".*?<input type="hidden" name="t" value="([^"]*)" /><input type="hidden" name="([^"]*)" value="([^"]*)" /><input type="submit" value="Create a directory" />', re.I)
1899 mo = MKDIR_BUTTON_RE.search(res)
1900 formaction = mo.group(1)
1902 formaname = mo.group(3)
1903 formavalue = mo.group(4)
1904 return (formaction, formt, formaname, formavalue)
1905 d.addCallback(_after_get_welcome_page)
1906 def _after_parse_form(res):
1907 (formaction, formt, formaname, formavalue) = res
1908 return self.POST("/%s?t=%s&%s=%s" % (formaction, formt, formaname, formavalue))
1909 d.addCallback(_after_parse_form)
1910 d.addBoth(self.shouldRedirect, None, statuscode='303')
1913 def test_POST_mkdir_replace(self): # return value?
1914 d = self.POST(self.public_url + "/foo", t="mkdir", name="sub")
1915 d.addCallback(lambda res: self._foo_node.get(u"sub"))
1916 d.addCallback(self.failUnlessNodeKeysAre, [])
1919 def test_POST_mkdir_no_replace_queryarg(self): # return value?
1920 d = self.POST(self.public_url + "/foo?replace=false", t="mkdir", name="sub")
1921 d.addBoth(self.shouldFail, error.Error,
1922 "POST_mkdir_no_replace_queryarg",
1924 "There was already a child by that name, and you asked me "
1925 "to not replace it")
1926 d.addCallback(lambda res: self._foo_node.get(u"sub"))
1927 d.addCallback(self.failUnlessNodeKeysAre, [u"baz.txt"])
1930 def test_POST_mkdir_no_replace_field(self): # return value?
1931 d = self.POST(self.public_url + "/foo", t="mkdir", name="sub",
1933 d.addBoth(self.shouldFail, error.Error, "POST_mkdir_no_replace_field",
1935 "There was already a child by that name, and you asked me "
1936 "to not replace it")
1937 d.addCallback(lambda res: self._foo_node.get(u"sub"))
1938 d.addCallback(self.failUnlessNodeKeysAre, [u"baz.txt"])
1941 def test_POST_mkdir_whendone_field(self):
1942 d = self.POST(self.public_url + "/foo",
1943 t="mkdir", name="newdir", when_done="/THERE")
1944 d.addBoth(self.shouldRedirect, "/THERE")
1945 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
1946 d.addCallback(self.failUnlessNodeKeysAre, [])
1949 def test_POST_mkdir_whendone_queryarg(self):
1950 d = self.POST(self.public_url + "/foo?when_done=/THERE",
1951 t="mkdir", name="newdir")
1952 d.addBoth(self.shouldRedirect, "/THERE")
1953 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
1954 d.addCallback(self.failUnlessNodeKeysAre, [])
1957 def test_POST_bad_t(self):
1958 d = self.shouldFail2(error.Error, "POST_bad_t", "400 Bad Request",
1959 "POST to a directory with bad t=BOGUS",
1960 self.POST, self.public_url + "/foo", t="BOGUS")
1963 def test_POST_set_children(self):
1964 contents9, n9, newuri9 = self.makefile(9)
1965 contents10, n10, newuri10 = self.makefile(10)
1966 contents11, n11, newuri11 = self.makefile(11)
1969 "atomic_added_1": [ "filenode", { "rw_uri": "%s",
1972 "ctime": 1002777696.7564139,
1973 "mtime": 1002777696.7564139
1976 "atomic_added_2": [ "filenode", { "rw_uri": "%s",
1979 "ctime": 1002777696.7564139,
1980 "mtime": 1002777696.7564139
1983 "atomic_added_3": [ "filenode", { "rw_uri": "%s",
1986 "ctime": 1002777696.7564139,
1987 "mtime": 1002777696.7564139
1990 }""" % (newuri9, newuri10, newuri11)
1992 url = self.webish_url + self.public_url + "/foo" + "?t=set_children"
1994 d = client.getPage(url, method="POST", postdata=reqbody)
1996 self.failUnlessURIMatchesChild(newuri9, self._foo_node, u"atomic_added_1")
1997 self.failUnlessURIMatchesChild(newuri10, self._foo_node, u"atomic_added_2")
1998 self.failUnlessURIMatchesChild(newuri11, self._foo_node, u"atomic_added_3")
2000 d.addCallback(_then)
2001 d.addErrback(self.dump_error)
2004 def test_POST_put_uri(self):
2005 contents, n, newuri = self.makefile(8)
2006 d = self.POST(self.public_url + "/foo", t="uri", name="new.txt", uri=newuri)
2007 d.addCallback(self.failUnlessURIMatchesChild, self._foo_node, u"new.txt")
2008 d.addCallback(lambda res:
2009 self.failUnlessChildContentsAre(self._foo_node, u"new.txt",
2013 def test_POST_put_uri_replace(self):
2014 contents, n, newuri = self.makefile(8)
2015 d = self.POST(self.public_url + "/foo", t="uri", name="bar.txt", uri=newuri)
2016 d.addCallback(self.failUnlessURIMatchesChild, self._foo_node, u"bar.txt")
2017 d.addCallback(lambda res:
2018 self.failUnlessChildContentsAre(self._foo_node, u"bar.txt",
2022 def test_POST_put_uri_no_replace_queryarg(self):
2023 contents, n, newuri = self.makefile(8)
2024 d = self.POST(self.public_url + "/foo?replace=false", t="uri",
2025 name="bar.txt", uri=newuri)
2026 d.addBoth(self.shouldFail, error.Error,
2027 "POST_put_uri_no_replace_queryarg",
2029 "There was already a child by that name, and you asked me "
2030 "to not replace it")
2031 d.addCallback(lambda res: self.GET(self.public_url + "/foo/bar.txt"))
2032 d.addCallback(self.failUnlessIsBarDotTxt)
2035 def test_POST_put_uri_no_replace_field(self):
2036 contents, n, newuri = self.makefile(8)
2037 d = self.POST(self.public_url + "/foo", t="uri", replace="false",
2038 name="bar.txt", uri=newuri)
2039 d.addBoth(self.shouldFail, error.Error,
2040 "POST_put_uri_no_replace_field",
2042 "There was already a child by that name, and you asked me "
2043 "to not replace it")
2044 d.addCallback(lambda res: self.GET(self.public_url + "/foo/bar.txt"))
2045 d.addCallback(self.failUnlessIsBarDotTxt)
2048 def test_POST_delete(self):
2049 d = self.POST(self.public_url + "/foo", t="delete", name="bar.txt")
2050 d.addCallback(lambda res: self._foo_node.list())
2051 def _check(children):
2052 self.failIf(u"bar.txt" in children)
2053 d.addCallback(_check)
2056 def test_POST_rename_file(self):
2057 d = self.POST(self.public_url + "/foo", t="rename",
2058 from_name="bar.txt", to_name='wibble.txt')
2059 d.addCallback(lambda res:
2060 self.failIfNodeHasChild(self._foo_node, u"bar.txt"))
2061 d.addCallback(lambda res:
2062 self.failUnlessNodeHasChild(self._foo_node, u"wibble.txt"))
2063 d.addCallback(lambda res: self.GET(self.public_url + "/foo/wibble.txt"))
2064 d.addCallback(self.failUnlessIsBarDotTxt)
2065 d.addCallback(lambda res: self.GET(self.public_url + "/foo/wibble.txt?t=json"))
2066 d.addCallback(self.failUnlessIsBarJSON)
2069 def test_POST_rename_file_redundant(self):
2070 d = self.POST(self.public_url + "/foo", t="rename",
2071 from_name="bar.txt", to_name='bar.txt')
2072 d.addCallback(lambda res:
2073 self.failUnlessNodeHasChild(self._foo_node, u"bar.txt"))
2074 d.addCallback(lambda res: self.GET(self.public_url + "/foo/bar.txt"))
2075 d.addCallback(self.failUnlessIsBarDotTxt)
2076 d.addCallback(lambda res: self.GET(self.public_url + "/foo/bar.txt?t=json"))
2077 d.addCallback(self.failUnlessIsBarJSON)
2080 def test_POST_rename_file_replace(self):
2081 # rename a file and replace a directory with it
2082 d = self.POST(self.public_url + "/foo", t="rename",
2083 from_name="bar.txt", to_name='empty')
2084 d.addCallback(lambda res:
2085 self.failIfNodeHasChild(self._foo_node, u"bar.txt"))
2086 d.addCallback(lambda res:
2087 self.failUnlessNodeHasChild(self._foo_node, u"empty"))
2088 d.addCallback(lambda res: self.GET(self.public_url + "/foo/empty"))
2089 d.addCallback(self.failUnlessIsBarDotTxt)
2090 d.addCallback(lambda res: self.GET(self.public_url + "/foo/empty?t=json"))
2091 d.addCallback(self.failUnlessIsBarJSON)
2094 def test_POST_rename_file_no_replace_queryarg(self):
2095 # rename a file and replace a directory with it
2096 d = self.POST(self.public_url + "/foo?replace=false", t="rename",
2097 from_name="bar.txt", to_name='empty')
2098 d.addBoth(self.shouldFail, error.Error,
2099 "POST_rename_file_no_replace_queryarg",
2101 "There was already a child by that name, and you asked me "
2102 "to not replace it")
2103 d.addCallback(lambda res: self.GET(self.public_url + "/foo/empty?t=json"))
2104 d.addCallback(self.failUnlessIsEmptyJSON)
2107 def test_POST_rename_file_no_replace_field(self):
2108 # rename a file and replace a directory with it
2109 d = self.POST(self.public_url + "/foo", t="rename", replace="false",
2110 from_name="bar.txt", to_name='empty')
2111 d.addBoth(self.shouldFail, error.Error,
2112 "POST_rename_file_no_replace_field",
2114 "There was already a child by that name, and you asked me "
2115 "to not replace it")
2116 d.addCallback(lambda res: self.GET(self.public_url + "/foo/empty?t=json"))
2117 d.addCallback(self.failUnlessIsEmptyJSON)
2120 def failUnlessIsEmptyJSON(self, res):
2121 data = simplejson.loads(res)
2122 self.failUnlessEqual(data[0], "dirnode", data)
2123 self.failUnlessEqual(len(data[1]["children"]), 0)
2125 def test_POST_rename_file_slash_fail(self):
2126 d = self.POST(self.public_url + "/foo", t="rename",
2127 from_name="bar.txt", to_name='kirk/spock.txt')
2128 d.addBoth(self.shouldFail, error.Error,
2129 "test_POST_rename_file_slash_fail",
2131 "to_name= may not contain a slash",
2133 d.addCallback(lambda res:
2134 self.failUnlessNodeHasChild(self._foo_node, u"bar.txt"))
2137 def test_POST_rename_dir(self):
2138 d = self.POST(self.public_url, t="rename",
2139 from_name="foo", to_name='plunk')
2140 d.addCallback(lambda res:
2141 self.failIfNodeHasChild(self.public_root, u"foo"))
2142 d.addCallback(lambda res:
2143 self.failUnlessNodeHasChild(self.public_root, u"plunk"))
2144 d.addCallback(lambda res: self.GET(self.public_url + "/plunk?t=json"))
2145 d.addCallback(self.failUnlessIsFooJSON)
2148 def shouldRedirect(self, res, target=None, statuscode=None, which=""):
2149 """ If target is not None then the redirection has to go to target. If
2150 statuscode is not None then the redirection has to be accomplished with
2151 that HTTP status code."""
2152 if not isinstance(res, failure.Failure):
2153 to_where = (target is None) and "somewhere" or ("to " + target)
2154 self.fail("%s: we were expecting to get redirected %s, not get an"
2155 " actual page: %s" % (which, to_where, res))
2156 res.trap(error.PageRedirect)
2157 if statuscode is not None:
2158 self.failUnlessEqual(res.value.status, statuscode,
2159 "%s: not a redirect" % which)
2160 if target is not None:
2161 # the PageRedirect does not seem to capture the uri= query arg
2162 # properly, so we can't check for it.
2163 realtarget = self.webish_url + target
2164 self.failUnlessEqual(res.value.location, realtarget,
2165 "%s: wrong target" % which)
2166 return res.value.location
2168 def test_GET_URI_form(self):
2169 base = "/uri?uri=%s" % self._bar_txt_uri
2170 # this is supposed to give us a redirect to /uri/$URI, plus arguments
2171 targetbase = "/uri/%s" % urllib.quote(self._bar_txt_uri)
2173 d.addBoth(self.shouldRedirect, targetbase)
2174 d.addCallback(lambda res: self.GET(base+"&filename=bar.txt"))
2175 d.addBoth(self.shouldRedirect, targetbase+"?filename=bar.txt")
2176 d.addCallback(lambda res: self.GET(base+"&t=json"))
2177 d.addBoth(self.shouldRedirect, targetbase+"?t=json")
2178 d.addCallback(self.log, "about to get file by uri")
2179 d.addCallback(lambda res: self.GET(base, followRedirect=True))
2180 d.addCallback(self.failUnlessIsBarDotTxt)
2181 d.addCallback(self.log, "got file by uri, about to get dir by uri")
2182 d.addCallback(lambda res: self.GET("/uri?uri=%s&t=json" % self._foo_uri,
2183 followRedirect=True))
2184 d.addCallback(self.failUnlessIsFooJSON)
2185 d.addCallback(self.log, "got dir by uri")
2189 def test_GET_URI_form_bad(self):
2190 d = self.shouldFail2(error.Error, "test_GET_URI_form_bad",
2191 "400 Bad Request", "GET /uri requires uri=",
2195 def test_GET_rename_form(self):
2196 d = self.GET(self.public_url + "/foo?t=rename-form&name=bar.txt",
2197 followRedirect=True)
2199 self.failUnless('name="when_done" value="."' in res, res)
2200 self.failUnless(re.search(r'name="from_name" value="bar\.txt"', res))
2201 d.addCallback(_check)
2204 def log(self, res, msg):
2205 #print "MSG: %s RES: %s" % (msg, res)
2209 def test_GET_URI_URL(self):
2210 base = "/uri/%s" % self._bar_txt_uri
2212 d.addCallback(self.failUnlessIsBarDotTxt)
2213 d.addCallback(lambda res: self.GET(base+"?filename=bar.txt"))
2214 d.addCallback(self.failUnlessIsBarDotTxt)
2215 d.addCallback(lambda res: self.GET(base+"?filename=bar.txt&save=true"))
2216 d.addCallback(self.failUnlessIsBarDotTxt)
2219 def test_GET_URI_URL_dir(self):
2220 base = "/uri/%s?t=json" % self._foo_uri
2222 d.addCallback(self.failUnlessIsFooJSON)
2225 def test_GET_URI_URL_missing(self):
2226 base = "/uri/%s" % self._bad_file_uri
2227 d = self.shouldHTTPError("test_GET_URI_URL_missing",
2228 http.GONE, None, "NotEnoughSharesError",
2230 # TODO: how can we exercise both sides of WebDownloadTarget.fail
2231 # here? we must arrange for a download to fail after target.open()
2232 # has been called, and then inspect the response to see that it is
2233 # shorter than we expected.
2236 def test_PUT_DIRURL_uri(self):
2237 d = self.s.create_empty_dirnode()
2239 new_uri = dn.get_uri()
2240 # replace /foo with a new (empty) directory
2241 d = self.PUT(self.public_url + "/foo?t=uri", new_uri)
2242 d.addCallback(lambda res:
2243 self.failUnlessEqual(res.strip(), new_uri))
2244 d.addCallback(lambda res:
2245 self.failUnlessChildURIIs(self.public_root,
2249 d.addCallback(_made_dir)
2252 def test_PUT_DIRURL_uri_noreplace(self):
2253 d = self.s.create_empty_dirnode()
2255 new_uri = dn.get_uri()
2256 # replace /foo with a new (empty) directory, but ask that
2257 # replace=false, so it should fail
2258 d = self.shouldFail2(error.Error, "test_PUT_DIRURL_uri_noreplace",
2259 "409 Conflict", "There was already a child by that name, and you asked me to not replace it",
2261 self.public_url + "/foo?t=uri&replace=false",
2263 d.addCallback(lambda res:
2264 self.failUnlessChildURIIs(self.public_root,
2268 d.addCallback(_made_dir)
2271 def test_PUT_DIRURL_bad_t(self):
2272 d = self.shouldFail2(error.Error, "test_PUT_DIRURL_bad_t",
2273 "400 Bad Request", "PUT to a directory",
2274 self.PUT, self.public_url + "/foo?t=BOGUS", "")
2275 d.addCallback(lambda res:
2276 self.failUnlessChildURIIs(self.public_root,
2281 def test_PUT_NEWFILEURL_uri(self):
2282 contents, n, new_uri = self.makefile(8)
2283 d = self.PUT(self.public_url + "/foo/new.txt?t=uri", new_uri)
2284 d.addCallback(lambda res: self.failUnlessEqual(res.strip(), new_uri))
2285 d.addCallback(lambda res:
2286 self.failUnlessChildContentsAre(self._foo_node, u"new.txt",
2290 def test_PUT_NEWFILEURL_uri_replace(self):
2291 contents, n, new_uri = self.makefile(8)
2292 d = self.PUT(self.public_url + "/foo/bar.txt?t=uri", new_uri)
2293 d.addCallback(lambda res: self.failUnlessEqual(res.strip(), new_uri))
2294 d.addCallback(lambda res:
2295 self.failUnlessChildContentsAre(self._foo_node, u"bar.txt",
2299 def test_PUT_NEWFILEURL_uri_no_replace(self):
2300 contents, n, new_uri = self.makefile(8)
2301 d = self.PUT(self.public_url + "/foo/bar.txt?t=uri&replace=false", new_uri)
2302 d.addBoth(self.shouldFail, error.Error, "PUT_NEWFILEURL_uri_no_replace",
2304 "There was already a child by that name, and you asked me "
2305 "to not replace it")
2308 def test_PUT_NEWFILE_URI(self):
2309 file_contents = "New file contents here\n"
2310 d = self.PUT("/uri", file_contents)
2312 assert isinstance(uri, str), uri
2313 self.failUnless(uri in FakeCHKFileNode.all_contents)
2314 self.failUnlessEqual(FakeCHKFileNode.all_contents[uri],
2316 return self.GET("/uri/%s" % uri)
2317 d.addCallback(_check)
2319 self.failUnlessEqual(res, file_contents)
2320 d.addCallback(_check2)
2323 def test_PUT_NEWFILE_URI_not_mutable(self):
2324 file_contents = "New file contents here\n"
2325 d = self.PUT("/uri?mutable=false", file_contents)
2327 assert isinstance(uri, str), uri
2328 self.failUnless(uri in FakeCHKFileNode.all_contents)
2329 self.failUnlessEqual(FakeCHKFileNode.all_contents[uri],
2331 return self.GET("/uri/%s" % uri)
2332 d.addCallback(_check)
2334 self.failUnlessEqual(res, file_contents)
2335 d.addCallback(_check2)
2338 def test_PUT_NEWFILE_URI_only_PUT(self):
2339 d = self.PUT("/uri?t=bogus", "")
2340 d.addBoth(self.shouldFail, error.Error,
2341 "PUT_NEWFILE_URI_only_PUT",
2343 "/uri accepts only PUT, PUT?t=mkdir, POST?t=upload, and POST?t=mkdir")
2346 def test_PUT_NEWFILE_URI_mutable(self):
2347 file_contents = "New file contents here\n"
2348 d = self.PUT("/uri?mutable=true", file_contents)
2349 def _check_mutable(uri):
2352 self.failUnless(IMutableFileURI.providedBy(u))
2353 self.failUnless(u.storage_index in FakeMutableFileNode.all_contents)
2354 n = self.s.create_node_from_uri(uri)
2355 return n.download_best_version()
2356 d.addCallback(_check_mutable)
2357 def _check2_mutable(data):
2358 self.failUnlessEqual(data, file_contents)
2359 d.addCallback(_check2_mutable)
2363 self.failUnless(uri.to_string() in FakeCHKFileNode.all_contents)
2364 self.failUnlessEqual(FakeCHKFileNode.all_contents[uri.to_string()],
2366 return self.GET("/uri/%s" % uri)
2367 d.addCallback(_check)
2369 self.failUnlessEqual(res, file_contents)
2370 d.addCallback(_check2)
2373 def test_PUT_mkdir(self):
2374 d = self.PUT("/uri?t=mkdir", "")
2376 n = self.s.create_node_from_uri(uri.strip())
2377 d2 = self.failUnlessNodeKeysAre(n, [])
2378 d2.addCallback(lambda res:
2379 self.GET("/uri/%s?t=json" % uri))
2381 d.addCallback(_check)
2382 d.addCallback(self.failUnlessIsEmptyJSON)
2385 def test_POST_check(self):
2386 d = self.POST(self.public_url + "/foo", t="check", name="bar.txt")
2388 # this returns a string form of the results, which are probably
2389 # None since we're using fake filenodes.
2390 # TODO: verify that the check actually happened, by changing
2391 # FakeCHKFileNode to count how many times .check() has been
2394 d.addCallback(_done)
2397 def test_bad_method(self):
2398 url = self.webish_url + self.public_url + "/foo/bar.txt"
2399 d = self.shouldHTTPError("test_bad_method",
2400 501, "Not Implemented",
2401 "I don't know how to treat a BOGUS request.",
2402 client.getPage, url, method="BOGUS")
2405 def test_short_url(self):
2406 url = self.webish_url + "/uri"
2407 d = self.shouldHTTPError("test_short_url", 501, "Not Implemented",
2408 "I don't know how to treat a DELETE request.",
2409 client.getPage, url, method="DELETE")
2412 def test_ophandle_bad(self):
2413 url = self.webish_url + "/operations/bogus?t=status"
2414 d = self.shouldHTTPError("test_ophandle_bad", 404, "404 Not Found",
2415 "unknown/expired handle 'bogus'",
2416 client.getPage, url)
2419 def test_ophandle_cancel(self):
2420 d = self.POST(self.public_url + "/foo/?t=start-manifest&ophandle=128",
2421 followRedirect=True)
2422 d.addCallback(lambda ignored:
2423 self.GET("/operations/128?t=status&output=JSON"))
2425 data = simplejson.loads(res)
2426 self.failUnless("finished" in data, res)
2427 monitor = self.ws.root.child_operations.handles["128"][0]
2428 d = self.POST("/operations/128?t=cancel&output=JSON")
2430 data = simplejson.loads(res)
2431 self.failUnless("finished" in data, res)
2432 # t=cancel causes the handle to be forgotten
2433 self.failUnless(monitor.is_cancelled())
2434 d.addCallback(_check2)
2436 d.addCallback(_check1)
2437 d.addCallback(lambda ignored:
2438 self.shouldHTTPError("test_ophandle_cancel",
2439 404, "404 Not Found",
2440 "unknown/expired handle '128'",
2442 "/operations/128?t=status&output=JSON"))
2445 def test_ophandle_retainfor(self):
2446 d = self.POST(self.public_url + "/foo/?t=start-manifest&ophandle=129&retain-for=60",
2447 followRedirect=True)
2448 d.addCallback(lambda ignored:
2449 self.GET("/operations/129?t=status&output=JSON&retain-for=0"))
2451 data = simplejson.loads(res)
2452 self.failUnless("finished" in data, res)
2453 d.addCallback(_check1)
2454 # the retain-for=0 will cause the handle to be expired very soon
2455 d.addCallback(self.stall, 2.0)
2456 d.addCallback(lambda ignored:
2457 self.shouldHTTPError("test_ophandle_retainfor",
2458 404, "404 Not Found",
2459 "unknown/expired handle '129'",
2461 "/operations/129?t=status&output=JSON"))
2464 def test_ophandle_release_after_complete(self):
2465 d = self.POST(self.public_url + "/foo/?t=start-manifest&ophandle=130",
2466 followRedirect=True)
2467 d.addCallback(self.wait_for_operation, "130")
2468 d.addCallback(lambda ignored:
2469 self.GET("/operations/130?t=status&output=JSON&release-after-complete=true"))
2470 # the release-after-complete=true will cause the handle to be expired
2471 d.addCallback(lambda ignored:
2472 self.shouldHTTPError("test_ophandle_release_after_complete",
2473 404, "404 Not Found",
2474 "unknown/expired handle '130'",
2476 "/operations/130?t=status&output=JSON"))
2479 def test_incident(self):
2480 d = self.POST("/report_incident", details="eek")
2482 self.failUnless("Thank you for your report!" in res, res)
2483 d.addCallback(_done)
2486 def test_static(self):
2487 webdir = os.path.join(self.staticdir, "subdir")
2488 fileutil.make_dirs(webdir)
2489 f = open(os.path.join(webdir, "hello.txt"), "wb")
2493 d = self.GET("/static/subdir/hello.txt")
2495 self.failUnlessEqual(res, "hello")
2496 d.addCallback(_check)
2500 class Util(unittest.TestCase):
2501 def test_abbreviate_time(self):
2502 self.failUnlessEqual(common.abbreviate_time(None), "")
2503 self.failUnlessEqual(common.abbreviate_time(1.234), "1.23s")
2504 self.failUnlessEqual(common.abbreviate_time(0.123), "123ms")
2505 self.failUnlessEqual(common.abbreviate_time(0.00123), "1.2ms")
2506 self.failUnlessEqual(common.abbreviate_time(0.000123), "123us")
2508 def test_abbreviate_rate(self):
2509 self.failUnlessEqual(common.abbreviate_rate(None), "")
2510 self.failUnlessEqual(common.abbreviate_rate(1234000), "1.23MBps")
2511 self.failUnlessEqual(common.abbreviate_rate(12340), "12.3kBps")
2512 self.failUnlessEqual(common.abbreviate_rate(123), "123Bps")
2514 def test_abbreviate_size(self):
2515 self.failUnlessEqual(common.abbreviate_size(None), "")
2516 self.failUnlessEqual(common.abbreviate_size(1.23*1000*1000*1000), "1.23GB")
2517 self.failUnlessEqual(common.abbreviate_size(1.23*1000*1000), "1.23MB")
2518 self.failUnlessEqual(common.abbreviate_size(1230), "1.2kB")
2519 self.failUnlessEqual(common.abbreviate_size(123), "123B")
2521 def test_plural(self):
2523 return "%d second%s" % (s, status.plural(s))
2524 self.failUnlessEqual(convert(0), "0 seconds")
2525 self.failUnlessEqual(convert(1), "1 second")
2526 self.failUnlessEqual(convert(2), "2 seconds")
2528 return "has share%s: %s" % (status.plural(s), ",".join(s))
2529 self.failUnlessEqual(convert2([]), "has shares: ")
2530 self.failUnlessEqual(convert2(["1"]), "has share: 1")
2531 self.failUnlessEqual(convert2(["1","2"]), "has shares: 1,2")
2534 class Grid(GridTestMixin, WebErrorMixin, unittest.TestCase, ShouldFailMixin):
2536 def CHECK(self, ign, which, args, clientnum=0):
2537 fileurl = self.fileurls[which]
2538 url = fileurl + "?" + args
2539 return self.GET(url, method="POST", clientnum=clientnum)
2541 def test_filecheck(self):
2542 self.basedir = "web/Grid/filecheck"
2544 c0 = self.g.clients[0]
2547 d = c0.upload(upload.Data(DATA, convergence=""))
2548 def _stash_uri(ur, which):
2549 self.uris[which] = ur.uri
2550 d.addCallback(_stash_uri, "good")
2551 d.addCallback(lambda ign:
2552 c0.upload(upload.Data(DATA+"1", convergence="")))
2553 d.addCallback(_stash_uri, "sick")
2554 d.addCallback(lambda ign:
2555 c0.upload(upload.Data(DATA+"2", convergence="")))
2556 d.addCallback(_stash_uri, "dead")
2557 def _stash_mutable_uri(n, which):
2558 self.uris[which] = n.get_uri()
2559 assert isinstance(self.uris[which], str)
2560 d.addCallback(lambda ign: c0.create_mutable_file(DATA+"3"))
2561 d.addCallback(_stash_mutable_uri, "corrupt")
2562 d.addCallback(lambda ign:
2563 c0.upload(upload.Data("literal", convergence="")))
2564 d.addCallback(_stash_uri, "small")
2566 def _compute_fileurls(ignored):
2568 for which in self.uris:
2569 self.fileurls[which] = "uri/" + urllib.quote(self.uris[which])
2570 d.addCallback(_compute_fileurls)
2572 def _clobber_shares(ignored):
2573 good_shares = self.find_shares(self.uris["good"])
2574 self.failUnlessEqual(len(good_shares), 10)
2575 sick_shares = self.find_shares(self.uris["sick"])
2576 os.unlink(sick_shares[0][2])
2577 dead_shares = self.find_shares(self.uris["dead"])
2578 for i in range(1, 10):
2579 os.unlink(dead_shares[i][2])
2580 c_shares = self.find_shares(self.uris["corrupt"])
2581 cso = CorruptShareOptions()
2582 cso.stdout = StringIO()
2583 cso.parseOptions([c_shares[0][2]])
2585 d.addCallback(_clobber_shares)
2587 d.addCallback(self.CHECK, "good", "t=check")
2588 def _got_html_good(res):
2589 self.failUnless("Healthy" in res, res)
2590 self.failIf("Not Healthy" in res, res)
2591 d.addCallback(_got_html_good)
2592 d.addCallback(self.CHECK, "good", "t=check&return_to=somewhere")
2593 def _got_html_good_return_to(res):
2594 self.failUnless("Healthy" in res, res)
2595 self.failIf("Not Healthy" in res, res)
2596 self.failUnless('<a href="somewhere">Return to file'
2598 d.addCallback(_got_html_good_return_to)
2599 d.addCallback(self.CHECK, "good", "t=check&output=json")
2600 def _got_json_good(res):
2601 r = simplejson.loads(res)
2602 self.failUnlessEqual(r["summary"], "Healthy")
2603 self.failUnless(r["results"]["healthy"])
2604 self.failIf(r["results"]["needs-rebalancing"])
2605 self.failUnless(r["results"]["recoverable"])
2606 d.addCallback(_got_json_good)
2608 d.addCallback(self.CHECK, "small", "t=check")
2609 def _got_html_small(res):
2610 self.failUnless("Literal files are always healthy" in res, res)
2611 self.failIf("Not Healthy" in res, res)
2612 d.addCallback(_got_html_small)
2613 d.addCallback(self.CHECK, "small", "t=check&return_to=somewhere")
2614 def _got_html_small_return_to(res):
2615 self.failUnless("Literal files are always healthy" in res, res)
2616 self.failIf("Not Healthy" in res, res)
2617 self.failUnless('<a href="somewhere">Return to file'
2619 d.addCallback(_got_html_small_return_to)
2620 d.addCallback(self.CHECK, "small", "t=check&output=json")
2621 def _got_json_small(res):
2622 r = simplejson.loads(res)
2623 self.failUnlessEqual(r["storage-index"], "")
2624 self.failUnless(r["results"]["healthy"])
2625 d.addCallback(_got_json_small)
2627 d.addCallback(self.CHECK, "sick", "t=check")
2628 def _got_html_sick(res):
2629 self.failUnless("Not Healthy" in res, res)
2630 d.addCallback(_got_html_sick)
2631 d.addCallback(self.CHECK, "sick", "t=check&output=json")
2632 def _got_json_sick(res):
2633 r = simplejson.loads(res)
2634 self.failUnlessEqual(r["summary"],
2635 "Not Healthy: 9 shares (enc 3-of-10)")
2636 self.failIf(r["results"]["healthy"])
2637 self.failIf(r["results"]["needs-rebalancing"])
2638 self.failUnless(r["results"]["recoverable"])
2639 d.addCallback(_got_json_sick)
2641 d.addCallback(self.CHECK, "dead", "t=check")
2642 def _got_html_dead(res):
2643 self.failUnless("Not Healthy" in res, res)
2644 d.addCallback(_got_html_dead)
2645 d.addCallback(self.CHECK, "dead", "t=check&output=json")
2646 def _got_json_dead(res):
2647 r = simplejson.loads(res)
2648 self.failUnlessEqual(r["summary"],
2649 "Not Healthy: 1 shares (enc 3-of-10)")
2650 self.failIf(r["results"]["healthy"])
2651 self.failIf(r["results"]["needs-rebalancing"])
2652 self.failIf(r["results"]["recoverable"])
2653 d.addCallback(_got_json_dead)
2655 d.addCallback(self.CHECK, "corrupt", "t=check&verify=true")
2656 def _got_html_corrupt(res):
2657 self.failUnless("Not Healthy! : Unhealthy" in res, res)
2658 d.addCallback(_got_html_corrupt)
2659 d.addCallback(self.CHECK, "corrupt", "t=check&verify=true&output=json")
2660 def _got_json_corrupt(res):
2661 r = simplejson.loads(res)
2662 self.failUnless("Unhealthy: 9 shares (enc 3-of-10)" in r["summary"],
2664 self.failIf(r["results"]["healthy"])
2665 self.failUnless(r["results"]["recoverable"])
2666 self.failUnlessEqual(r["results"]["count-shares-good"], 9)
2667 self.failUnlessEqual(r["results"]["count-corrupt-shares"], 1)
2668 d.addCallback(_got_json_corrupt)
2670 d.addErrback(self.explain_web_error)
2673 def test_repair_html(self):
2674 self.basedir = "web/Grid/repair_html"
2676 c0 = self.g.clients[0]
2679 d = c0.upload(upload.Data(DATA, convergence=""))
2680 def _stash_uri(ur, which):
2681 self.uris[which] = ur.uri
2682 d.addCallback(_stash_uri, "good")
2683 d.addCallback(lambda ign:
2684 c0.upload(upload.Data(DATA+"1", convergence="")))
2685 d.addCallback(_stash_uri, "sick")
2686 d.addCallback(lambda ign:
2687 c0.upload(upload.Data(DATA+"2", convergence="")))
2688 d.addCallback(_stash_uri, "dead")
2689 def _stash_mutable_uri(n, which):
2690 self.uris[which] = n.get_uri()
2691 assert isinstance(self.uris[which], str)
2692 d.addCallback(lambda ign: c0.create_mutable_file(DATA+"3"))
2693 d.addCallback(_stash_mutable_uri, "corrupt")
2695 def _compute_fileurls(ignored):
2697 for which in self.uris:
2698 self.fileurls[which] = "uri/" + urllib.quote(self.uris[which])
2699 d.addCallback(_compute_fileurls)
2701 def _clobber_shares(ignored):
2702 good_shares = self.find_shares(self.uris["good"])
2703 self.failUnlessEqual(len(good_shares), 10)
2704 sick_shares = self.find_shares(self.uris["sick"])
2705 os.unlink(sick_shares[0][2])
2706 dead_shares = self.find_shares(self.uris["dead"])
2707 for i in range(1, 10):
2708 os.unlink(dead_shares[i][2])
2709 c_shares = self.find_shares(self.uris["corrupt"])
2710 cso = CorruptShareOptions()
2711 cso.stdout = StringIO()
2712 cso.parseOptions([c_shares[0][2]])
2714 d.addCallback(_clobber_shares)
2716 d.addCallback(self.CHECK, "good", "t=check&repair=true")
2717 def _got_html_good(res):
2718 self.failUnless("Healthy" in res, res)
2719 self.failIf("Not Healthy" in res, res)
2720 self.failUnless("No repair necessary" in res, res)
2721 d.addCallback(_got_html_good)
2723 d.addCallback(self.CHECK, "sick", "t=check&repair=true")
2724 def _got_html_sick(res):
2725 self.failUnless("Healthy : healthy" in res, res)
2726 self.failIf("Not Healthy" in res, res)
2727 self.failUnless("Repair successful" in res, res)
2728 d.addCallback(_got_html_sick)
2730 # repair of a dead file will fail, of course, but it isn't yet
2731 # clear how this should be reported. Right now it shows up as
2734 #d.addCallback(self.CHECK, "dead", "t=check&repair=true")
2735 #def _got_html_dead(res):
2737 # self.failUnless("Healthy : healthy" in res, res)
2738 # self.failIf("Not Healthy" in res, res)
2739 # self.failUnless("No repair necessary" in res, res)
2740 #d.addCallback(_got_html_dead)
2742 d.addCallback(self.CHECK, "corrupt", "t=check&verify=true&repair=true")
2743 def _got_html_corrupt(res):
2744 self.failUnless("Healthy : Healthy" in res, res)
2745 self.failIf("Not Healthy" in res, res)
2746 self.failUnless("Repair successful" in res, res)
2747 d.addCallback(_got_html_corrupt)
2749 d.addErrback(self.explain_web_error)
2752 def test_repair_json(self):
2753 self.basedir = "web/Grid/repair_json"
2755 c0 = self.g.clients[0]
2758 d = c0.upload(upload.Data(DATA+"1", convergence=""))
2759 def _stash_uri(ur, which):
2760 self.uris[which] = ur.uri
2761 d.addCallback(_stash_uri, "sick")
2763 def _compute_fileurls(ignored):
2765 for which in self.uris:
2766 self.fileurls[which] = "uri/" + urllib.quote(self.uris[which])
2767 d.addCallback(_compute_fileurls)
2769 def _clobber_shares(ignored):
2770 sick_shares = self.find_shares(self.uris["sick"])
2771 os.unlink(sick_shares[0][2])
2772 d.addCallback(_clobber_shares)
2774 d.addCallback(self.CHECK, "sick", "t=check&repair=true&output=json")
2775 def _got_json_sick(res):
2776 r = simplejson.loads(res)
2777 self.failUnlessEqual(r["repair-attempted"], True)
2778 self.failUnlessEqual(r["repair-successful"], True)
2779 self.failUnlessEqual(r["pre-repair-results"]["summary"],
2780 "Not Healthy: 9 shares (enc 3-of-10)")
2781 self.failIf(r["pre-repair-results"]["results"]["healthy"])
2782 self.failUnlessEqual(r["post-repair-results"]["summary"], "healthy")
2783 self.failUnless(r["post-repair-results"]["results"]["healthy"])
2784 d.addCallback(_got_json_sick)
2786 d.addErrback(self.explain_web_error)
2789 def test_deep_check(self):
2790 self.basedir = "web/Grid/deep_check"
2792 c0 = self.g.clients[0]
2796 d = c0.create_empty_dirnode()
2797 def _stash_root_and_create_file(n):
2799 self.fileurls["root"] = "uri/" + urllib.quote(n.get_uri()) + "/"
2800 return n.add_file(u"good", upload.Data(DATA, convergence=""))
2801 d.addCallback(_stash_root_and_create_file)
2802 def _stash_uri(fn, which):
2803 self.uris[which] = fn.get_uri()
2805 d.addCallback(_stash_uri, "good")
2806 d.addCallback(lambda ign:
2807 self.rootnode.add_file(u"small",
2808 upload.Data("literal",
2810 d.addCallback(_stash_uri, "small")
2811 d.addCallback(lambda ign:
2812 self.rootnode.add_file(u"sick",
2813 upload.Data(DATA+"1",
2815 d.addCallback(_stash_uri, "sick")
2817 def _clobber_shares(ignored):
2818 self.delete_shares_numbered(self.uris["sick"], [0,1])
2819 d.addCallback(_clobber_shares)
2826 d.addCallback(self.CHECK, "root", "t=stream-deep-check")
2828 units = [simplejson.loads(line)
2829 for line in res.splitlines()
2831 self.failUnlessEqual(len(units), 4+1)
2832 # should be parent-first
2834 self.failUnlessEqual(u0["path"], [])
2835 self.failUnlessEqual(u0["type"], "directory")
2836 self.failUnlessEqual(u0["cap"], self.rootnode.get_uri())
2837 u0cr = u0["check-results"]
2838 self.failUnlessEqual(u0cr["results"]["count-shares-good"], 10)
2840 ugood = [u for u in units
2841 if u["type"] == "file" and u["path"] == [u"good"]][0]
2842 self.failUnlessEqual(ugood["cap"], self.uris["good"])
2843 ugoodcr = ugood["check-results"]
2844 self.failUnlessEqual(ugoodcr["results"]["count-shares-good"], 10)
2847 self.failUnlessEqual(stats["type"], "stats")
2849 self.failUnlessEqual(s["count-immutable-files"], 2)
2850 self.failUnlessEqual(s["count-literal-files"], 1)
2851 self.failUnlessEqual(s["count-directories"], 1)
2852 d.addCallback(_done)
2854 # now add root/subdir and root/subdir/grandchild, then make subdir
2855 # unrecoverable, then see what happens
2857 d.addCallback(lambda ign:
2858 self.rootnode.create_empty_directory(u"subdir"))
2859 d.addCallback(_stash_uri, "subdir")
2860 d.addCallback(lambda subdir_node:
2861 subdir_node.add_file(u"grandchild",
2862 upload.Data(DATA+"2",
2864 d.addCallback(_stash_uri, "grandchild")
2866 d.addCallback(lambda ign:
2867 self.delete_shares_numbered(self.uris["subdir"],
2874 # root/subdir [unrecoverable]
2875 # root/subdir/grandchild
2877 # how should a streaming-JSON API indicate fatal error?
2878 # answer: emit ERROR: instead of a JSON string
2880 d.addCallback(self.CHECK, "root", "t=stream-manifest")
2881 def _check_broken_manifest(res):
2882 lines = res.splitlines()
2884 for (i,line) in enumerate(lines)
2885 if line.startswith("ERROR:")]
2887 self.fail("no ERROR: in output: %s" % (res,))
2888 first_error = error_lines[0]
2889 error_line = lines[first_error]
2890 error_msg = lines[first_error+1:]
2891 error_msg_s = "\n".join(error_msg) + "\n"
2892 self.failUnlessIn("ERROR: UnrecoverableFileError(no recoverable versions)",
2894 self.failUnless(len(error_msg) > 2, error_msg_s) # some traceback
2895 units = [simplejson.loads(line) for line in lines[:first_error]]
2896 self.failUnlessEqual(len(units), 5) # includes subdir
2897 last_unit = units[-1]
2898 self.failUnlessEqual(last_unit["path"], ["subdir"])
2899 d.addCallback(_check_broken_manifest)
2901 d.addCallback(self.CHECK, "root", "t=stream-deep-check")
2902 def _check_broken_deepcheck(res):
2903 lines = res.splitlines()
2905 for (i,line) in enumerate(lines)
2906 if line.startswith("ERROR:")]
2908 self.fail("no ERROR: in output: %s" % (res,))
2909 first_error = error_lines[0]
2910 error_line = lines[first_error]
2911 error_msg = lines[first_error+1:]
2912 error_msg_s = "\n".join(error_msg) + "\n"
2913 self.failUnlessIn("ERROR: UnrecoverableFileError(no recoverable versions)",
2915 self.failUnless(len(error_msg) > 2, error_msg_s) # some traceback
2916 units = [simplejson.loads(line) for line in lines[:first_error]]
2917 self.failUnlessEqual(len(units), 5) # includes subdir
2918 last_unit = units[-1]
2919 self.failUnlessEqual(last_unit["path"], ["subdir"])
2920 r = last_unit["check-results"]["results"]
2921 self.failUnlessEqual(r["count-recoverable-versions"], 0)
2922 self.failUnlessEqual(r["count-shares-good"], 1)
2923 self.failUnlessEqual(r["recoverable"], False)
2924 d.addCallback(_check_broken_deepcheck)
2926 d.addErrback(self.explain_web_error)
2929 def test_deep_check_and_repair(self):
2930 self.basedir = "web/Grid/deep_check_and_repair"
2932 c0 = self.g.clients[0]
2936 d = c0.create_empty_dirnode()
2937 def _stash_root_and_create_file(n):
2939 self.fileurls["root"] = "uri/" + urllib.quote(n.get_uri()) + "/"
2940 return n.add_file(u"good", upload.Data(DATA, convergence=""))
2941 d.addCallback(_stash_root_and_create_file)
2942 def _stash_uri(fn, which):
2943 self.uris[which] = fn.get_uri()
2944 d.addCallback(_stash_uri, "good")
2945 d.addCallback(lambda ign:
2946 self.rootnode.add_file(u"small",
2947 upload.Data("literal",
2949 d.addCallback(_stash_uri, "small")
2950 d.addCallback(lambda ign:
2951 self.rootnode.add_file(u"sick",
2952 upload.Data(DATA+"1",
2954 d.addCallback(_stash_uri, "sick")
2955 #d.addCallback(lambda ign:
2956 # self.rootnode.add_file(u"dead",
2957 # upload.Data(DATA+"2",
2959 #d.addCallback(_stash_uri, "dead")
2961 #d.addCallback(lambda ign: c0.create_mutable_file("mutable"))
2962 #d.addCallback(lambda fn: self.rootnode.set_node(u"corrupt", fn))
2963 #d.addCallback(_stash_uri, "corrupt")
2965 def _clobber_shares(ignored):
2966 good_shares = self.find_shares(self.uris["good"])
2967 self.failUnlessEqual(len(good_shares), 10)
2968 sick_shares = self.find_shares(self.uris["sick"])
2969 os.unlink(sick_shares[0][2])
2970 #dead_shares = self.find_shares(self.uris["dead"])
2971 #for i in range(1, 10):
2972 # os.unlink(dead_shares[i][2])
2974 #c_shares = self.find_shares(self.uris["corrupt"])
2975 #cso = CorruptShareOptions()
2976 #cso.stdout = StringIO()
2977 #cso.parseOptions([c_shares[0][2]])
2979 d.addCallback(_clobber_shares)
2982 # root/good CHK, 10 shares
2984 # root/sick CHK, 9 shares
2986 d.addCallback(self.CHECK, "root", "t=stream-deep-check&repair=true")
2988 units = [simplejson.loads(line)
2989 for line in res.splitlines()
2991 self.failUnlessEqual(len(units), 4+1)
2992 # should be parent-first
2994 self.failUnlessEqual(u0["path"], [])
2995 self.failUnlessEqual(u0["type"], "directory")
2996 self.failUnlessEqual(u0["cap"], self.rootnode.get_uri())
2997 u0crr = u0["check-and-repair-results"]
2998 self.failUnlessEqual(u0crr["repair-attempted"], False)
2999 self.failUnlessEqual(u0crr["pre-repair-results"]["results"]["count-shares-good"], 10)
3001 ugood = [u for u in units
3002 if u["type"] == "file" and u["path"] == [u"good"]][0]
3003 self.failUnlessEqual(ugood["cap"], self.uris["good"])
3004 ugoodcrr = ugood["check-and-repair-results"]
3005 self.failUnlessEqual(u0crr["repair-attempted"], False)
3006 self.failUnlessEqual(u0crr["pre-repair-results"]["results"]["count-shares-good"], 10)
3008 usick = [u for u in units
3009 if u["type"] == "file" and u["path"] == [u"sick"]][0]
3010 self.failUnlessEqual(usick["cap"], self.uris["sick"])
3011 usickcrr = usick["check-and-repair-results"]
3012 self.failUnlessEqual(usickcrr["repair-attempted"], True)
3013 self.failUnlessEqual(usickcrr["repair-successful"], True)
3014 self.failUnlessEqual(usickcrr["pre-repair-results"]["results"]["count-shares-good"], 9)
3015 self.failUnlessEqual(usickcrr["post-repair-results"]["results"]["count-shares-good"], 10)
3018 self.failUnlessEqual(stats["type"], "stats")
3020 self.failUnlessEqual(s["count-immutable-files"], 2)
3021 self.failUnlessEqual(s["count-literal-files"], 1)
3022 self.failUnlessEqual(s["count-directories"], 1)
3023 d.addCallback(_done)
3025 d.addErrback(self.explain_web_error)
3028 def _count_leases(self, ignored, which):
3029 u = self.uris[which]
3030 shares = self.find_shares(u)
3032 for shnum, serverid, fn in shares:
3033 sf = get_share_file(fn)
3034 num_leases = len(list(sf.get_leases()))
3035 lease_counts.append( (fn, num_leases) )
3038 def _assert_leasecount(self, lease_counts, expected):
3039 for (fn, num_leases) in lease_counts:
3040 if num_leases != expected:
3041 self.fail("expected %d leases, have %d, on %s" %
3042 (expected, num_leases, fn))
3044 def test_add_lease(self):
3045 self.basedir = "web/Grid/add_lease"
3046 self.set_up_grid(num_clients=2)
3047 c0 = self.g.clients[0]
3050 d = c0.upload(upload.Data(DATA, convergence=""))
3051 def _stash_uri(ur, which):
3052 self.uris[which] = ur.uri
3053 d.addCallback(_stash_uri, "one")
3054 d.addCallback(lambda ign:
3055 c0.upload(upload.Data(DATA+"1", convergence="")))
3056 d.addCallback(_stash_uri, "two")
3057 def _stash_mutable_uri(n, which):
3058 self.uris[which] = n.get_uri()
3059 assert isinstance(self.uris[which], str)
3060 d.addCallback(lambda ign: c0.create_mutable_file(DATA+"2"))
3061 d.addCallback(_stash_mutable_uri, "mutable")
3063 def _compute_fileurls(ignored):
3065 for which in self.uris:
3066 self.fileurls[which] = "uri/" + urllib.quote(self.uris[which])
3067 d.addCallback(_compute_fileurls)
3069 d.addCallback(self._count_leases, "one")
3070 d.addCallback(self._assert_leasecount, 1)
3071 d.addCallback(self._count_leases, "two")
3072 d.addCallback(self._assert_leasecount, 1)
3073 d.addCallback(self._count_leases, "mutable")
3074 d.addCallback(self._assert_leasecount, 1)
3076 d.addCallback(self.CHECK, "one", "t=check") # no add-lease
3077 def _got_html_good(res):
3078 self.failUnless("Healthy" in res, res)
3079 self.failIf("Not Healthy" in res, res)
3080 d.addCallback(_got_html_good)
3082 d.addCallback(self._count_leases, "one")
3083 d.addCallback(self._assert_leasecount, 1)
3084 d.addCallback(self._count_leases, "two")
3085 d.addCallback(self._assert_leasecount, 1)
3086 d.addCallback(self._count_leases, "mutable")
3087 d.addCallback(self._assert_leasecount, 1)
3089 # this CHECK uses the original client, which uses the same
3090 # lease-secrets, so it will just renew the original lease
3091 d.addCallback(self.CHECK, "one", "t=check&add-lease=true")
3092 d.addCallback(_got_html_good)
3094 d.addCallback(self._count_leases, "one")
3095 d.addCallback(self._assert_leasecount, 1)
3096 d.addCallback(self._count_leases, "two")
3097 d.addCallback(self._assert_leasecount, 1)
3098 d.addCallback(self._count_leases, "mutable")
3099 d.addCallback(self._assert_leasecount, 1)
3101 # this CHECK uses an alternate client, which adds a second lease
3102 d.addCallback(self.CHECK, "one", "t=check&add-lease=true", clientnum=1)
3103 d.addCallback(_got_html_good)
3105 d.addCallback(self._count_leases, "one")
3106 d.addCallback(self._assert_leasecount, 2)
3107 d.addCallback(self._count_leases, "two")
3108 d.addCallback(self._assert_leasecount, 1)
3109 d.addCallback(self._count_leases, "mutable")
3110 d.addCallback(self._assert_leasecount, 1)
3112 d.addCallback(self.CHECK, "mutable", "t=check&add-lease=true")
3113 d.addCallback(_got_html_good)
3115 d.addCallback(self._count_leases, "one")
3116 d.addCallback(self._assert_leasecount, 2)
3117 d.addCallback(self._count_leases, "two")
3118 d.addCallback(self._assert_leasecount, 1)
3119 d.addCallback(self._count_leases, "mutable")
3120 d.addCallback(self._assert_leasecount, 1)
3122 d.addCallback(self.CHECK, "mutable", "t=check&add-lease=true",
3124 d.addCallback(_got_html_good)
3126 d.addCallback(self._count_leases, "one")
3127 d.addCallback(self._assert_leasecount, 2)
3128 d.addCallback(self._count_leases, "two")
3129 d.addCallback(self._assert_leasecount, 1)
3130 d.addCallback(self._count_leases, "mutable")
3131 d.addCallback(self._assert_leasecount, 2)
3133 d.addErrback(self.explain_web_error)
3136 def test_deep_add_lease(self):
3137 self.basedir = "web/Grid/deep_add_lease"
3138 self.set_up_grid(num_clients=2)
3139 c0 = self.g.clients[0]
3143 d = c0.create_empty_dirnode()
3144 def _stash_root_and_create_file(n):
3146 self.uris["root"] = n.get_uri()
3147 self.fileurls["root"] = "uri/" + urllib.quote(n.get_uri()) + "/"
3148 return n.add_file(u"one", upload.Data(DATA, convergence=""))
3149 d.addCallback(_stash_root_and_create_file)
3150 def _stash_uri(fn, which):
3151 self.uris[which] = fn.get_uri()
3152 d.addCallback(_stash_uri, "one")
3153 d.addCallback(lambda ign:
3154 self.rootnode.add_file(u"small",
3155 upload.Data("literal",
3157 d.addCallback(_stash_uri, "small")
3159 d.addCallback(lambda ign: c0.create_mutable_file("mutable"))
3160 d.addCallback(lambda fn: self.rootnode.set_node(u"mutable", fn))
3161 d.addCallback(_stash_uri, "mutable")
3163 d.addCallback(self.CHECK, "root", "t=stream-deep-check") # no add-lease
3165 units = [simplejson.loads(line)
3166 for line in res.splitlines()
3168 # root, one, small, mutable, stats
3169 self.failUnlessEqual(len(units), 4+1)
3170 d.addCallback(_done)
3172 d.addCallback(self._count_leases, "root")
3173 d.addCallback(self._assert_leasecount, 1)
3174 d.addCallback(self._count_leases, "one")
3175 d.addCallback(self._assert_leasecount, 1)
3176 d.addCallback(self._count_leases, "mutable")
3177 d.addCallback(self._assert_leasecount, 1)
3179 d.addCallback(self.CHECK, "root", "t=stream-deep-check&add-lease=true")
3180 d.addCallback(_done)
3182 d.addCallback(self._count_leases, "root")
3183 d.addCallback(self._assert_leasecount, 1)
3184 d.addCallback(self._count_leases, "one")
3185 d.addCallback(self._assert_leasecount, 1)
3186 d.addCallback(self._count_leases, "mutable")
3187 d.addCallback(self._assert_leasecount, 1)
3189 d.addCallback(self.CHECK, "root", "t=stream-deep-check&add-lease=true",
3191 d.addCallback(_done)
3193 d.addCallback(self._count_leases, "root")
3194 d.addCallback(self._assert_leasecount, 2)
3195 d.addCallback(self._count_leases, "one")
3196 d.addCallback(self._assert_leasecount, 2)
3197 d.addCallback(self._count_leases, "mutable")
3198 d.addCallback(self._assert_leasecount, 2)
3200 d.addErrback(self.explain_web_error)
3204 def test_exceptions(self):
3205 self.basedir = "web/Grid/exceptions"
3206 self.set_up_grid(num_clients=1, num_servers=2)
3207 c0 = self.g.clients[0]
3210 d = c0.create_empty_dirnode()
3212 self.fileurls["root"] = "uri/" + urllib.quote(n.get_uri()) + "/"
3213 self.fileurls["imaginary"] = self.fileurls["root"] + "imaginary"
3215 d.addCallback(_stash_root)
3216 d.addCallback(lambda ign: c0.upload(upload.Data(DATA, convergence="")))
3218 self.fileurls["1share"] = "uri/" + urllib.quote(ur.uri)
3219 self.delete_shares_numbered(ur.uri, range(1,10))
3221 u = uri.from_string(ur.uri)
3222 u.key = testutil.flip_bit(u.key, 0)
3223 baduri = u.to_string()
3224 self.fileurls["0shares"] = "uri/" + urllib.quote(baduri)
3225 d.addCallback(_stash_bad)
3226 d.addCallback(lambda ign: c0.create_empty_dirnode())
3227 def _mangle_dirnode_1share(n):
3229 url = self.fileurls["dir-1share"] = "uri/" + urllib.quote(u) + "/"
3230 self.fileurls["dir-1share-json"] = url + "?t=json"
3231 self.delete_shares_numbered(u, range(1,10))
3232 d.addCallback(_mangle_dirnode_1share)
3233 d.addCallback(lambda ign: c0.create_empty_dirnode())
3234 def _mangle_dirnode_0share(n):
3236 url = self.fileurls["dir-0share"] = "uri/" + urllib.quote(u) + "/"
3237 self.fileurls["dir-0share-json"] = url + "?t=json"
3238 self.delete_shares_numbered(u, range(0,10))
3239 d.addCallback(_mangle_dirnode_0share)
3241 # NotEnoughSharesError should be reported sensibly, with a
3242 # text/plain explanation of the problem, and perhaps some
3243 # information on which shares *could* be found.
3245 d.addCallback(lambda ignored:
3246 self.shouldHTTPError("GET unrecoverable",
3247 410, "Gone", "NotEnoughSharesError",
3248 self.GET, self.fileurls["0shares"]))
3249 def _check_zero_shares(body):
3250 self.failIf("<html>" in body, body)
3251 body = " ".join(body.strip().split())
3252 exp = ("NotEnoughSharesError: no shares could be found. "
3253 "Zero shares usually indicates a corrupt URI, or that "
3254 "no servers were connected, but it might also indicate "
3255 "severe corruption. You should perform a filecheck on "
3256 "this object to learn more.")
3257 self.failUnlessEqual(exp, body)
3258 d.addCallback(_check_zero_shares)
3261 d.addCallback(lambda ignored:
3262 self.shouldHTTPError("GET 1share",
3263 410, "Gone", "NotEnoughSharesError",
3264 self.GET, self.fileurls["1share"]))
3265 def _check_one_share(body):
3266 self.failIf("<html>" in body, body)
3267 body = " ".join(body.strip().split())
3268 exp = ("NotEnoughSharesError: 1 share found, but we need "
3269 "3 to recover the file. This indicates that some "
3270 "servers were unavailable, or that shares have been "
3271 "lost to server departure, hard drive failure, or disk "
3272 "corruption. You should perform a filecheck on "
3273 "this object to learn more.")
3274 self.failUnlessEqual(exp, body)
3275 d.addCallback(_check_one_share)
3277 d.addCallback(lambda ignored:
3278 self.shouldHTTPError("GET imaginary",
3279 404, "Not Found", None,
3280 self.GET, self.fileurls["imaginary"]))
3281 def _missing_child(body):
3282 self.failUnless("No such child: imaginary" in body, body)
3283 d.addCallback(_missing_child)
3285 d.addCallback(lambda ignored: self.GET(self.fileurls["dir-0share"]))
3286 def _check_0shares_dir_html(body):
3287 self.failUnless("<html>" in body, body)
3288 # we should see the regular page, but without the child table or
3290 body = " ".join(body.strip().split())
3291 self.failUnlessIn('href="?t=info">More info on this directory',
3293 exp = ("UnrecoverableFileError: the directory (or mutable file) "
3294 "could not be retrieved, because there were insufficient "
3295 "good shares. This might indicate that no servers were "
3296 "connected, insufficient servers were connected, the URI "
3297 "was corrupt, or that shares have been lost due to server "
3298 "departure, hard drive failure, or disk corruption. You "
3299 "should perform a filecheck on this object to learn more.")
3300 self.failUnlessIn(exp, body)
3301 self.failUnlessIn("No upload forms: directory is unreadable", body)
3302 d.addCallback(_check_0shares_dir_html)
3304 d.addCallback(lambda ignored: self.GET(self.fileurls["dir-1share"]))
3305 def _check_1shares_dir_html(body):
3306 # at some point, we'll split UnrecoverableFileError into 0-shares
3307 # and some-shares like we did for immutable files (since there
3308 # are different sorts of advice to offer in each case). For now,
3309 # they present the same way.
3310 self.failUnless("<html>" in body, body)
3311 body = " ".join(body.strip().split())
3312 self.failUnlessIn('href="?t=info">More info on this directory',
3314 exp = ("UnrecoverableFileError: the directory (or mutable file) "
3315 "could not be retrieved, because there were insufficient "
3316 "good shares. This might indicate that no servers were "
3317 "connected, insufficient servers were connected, the URI "
3318 "was corrupt, or that shares have been lost due to server "
3319 "departure, hard drive failure, or disk corruption. You "
3320 "should perform a filecheck on this object to learn more.")
3321 self.failUnlessIn(exp, body)
3322 self.failUnlessIn("No upload forms: directory is unreadable", body)
3323 d.addCallback(_check_1shares_dir_html)
3325 d.addCallback(lambda ignored:
3326 self.shouldHTTPError("GET dir-0share-json",
3327 410, "Gone", "UnrecoverableFileError",
3329 self.fileurls["dir-0share-json"]))
3330 def _check_unrecoverable_file(body):
3331 self.failIf("<html>" in body, body)
3332 body = " ".join(body.strip().split())
3333 exp = ("UnrecoverableFileError: the directory (or mutable file) "
3334 "could not be retrieved, because there were insufficient "
3335 "good shares. This might indicate that no servers were "
3336 "connected, insufficient servers were connected, the URI "
3337 "was corrupt, or that shares have been lost due to server "
3338 "departure, hard drive failure, or disk corruption. You "
3339 "should perform a filecheck on this object to learn more.")
3340 self.failUnlessEqual(exp, body)
3341 d.addCallback(_check_unrecoverable_file)
3343 d.addCallback(lambda ignored:
3344 self.shouldHTTPError("GET dir-1share-json",
3345 410, "Gone", "UnrecoverableFileError",
3347 self.fileurls["dir-1share-json"]))
3348 d.addCallback(_check_unrecoverable_file)
3350 d.addCallback(lambda ignored:
3351 self.shouldHTTPError("GET imaginary",
3352 404, "Not Found", None,
3353 self.GET, self.fileurls["imaginary"]))
3355 # attach a webapi child that throws a random error, to test how it
3357 w = c0.getServiceNamed("webish")
3358 w.root.putChild("ERRORBOOM", ErrorBoom())
3360 d.addCallback(lambda ignored:
3361 self.shouldHTTPError("GET errorboom_html",
3362 500, "Internal Server Error", None,
3363 self.GET, "ERRORBOOM"))
3364 def _internal_error_html(body):
3365 # test that a weird exception during a webapi operation with
3366 # Accept:*/* results in a text/html stack trace, while one
3367 # without that Accept: line gets us a text/plain stack trace
3368 self.failUnless("<html>" in body, "expected HTML, not '%s'" % body)
3369 d.addCallback(_internal_error_html)
3371 d.addCallback(lambda ignored:
3372 self.shouldHTTPError("GET errorboom_text",
3373 500, "Internal Server Error", None,
3374 self.GET, "ERRORBOOM",
3375 headers={"accept": ["text/plain"]}))
3376 def _internal_error_text(body):
3377 # test that a weird exception during a webapi operation with
3378 # Accept:*/* results in a text/html stack trace, while one
3379 # without that Accept: line gets us a text/plain stack trace
3380 self.failIf("<html>" in body, body)
3381 self.failUnless(body.startswith("Traceback "), body)
3382 d.addCallback(_internal_error_text)
3384 def _flush_errors(res):
3385 # Trial: please ignore the CompletelyUnhandledError in the logs
3386 self.flushLoggedErrors(CompletelyUnhandledError)
3388 d.addBoth(_flush_errors)
3392 class CompletelyUnhandledError(Exception):
3394 class ErrorBoom(rend.Page):
3395 def beforeRender(self, ctx):
3396 raise CompletelyUnhandledError("whoops")