1 import os.path, re, urllib
3 from StringIO import StringIO
4 from twisted.application import service
5 from twisted.trial import unittest
6 from twisted.internet import defer, reactor
7 from twisted.web import client, error, http
8 from twisted.python import failure, log
10 from allmydata import interfaces, uri, webish
11 from allmydata.storage.shares import get_share_file
12 from allmydata.storage_client import StorageFarmBroker
13 from allmydata.immutable import upload, download
14 from allmydata.unknown import UnknownNode
15 from allmydata.web import status, common
16 from allmydata.scripts.debug import CorruptShareOptions, corrupt_share
17 from allmydata.util import fileutil, base32
18 from allmydata.util.assertutil import precondition
19 from allmydata.test.common import FakeDirectoryNode, FakeCHKFileNode, \
20 FakeMutableFileNode, create_chk_filenode, WebErrorMixin, ShouldFailMixin
21 from allmydata.interfaces import IURI, INewDirectoryURI, \
22 IReadonlyNewDirectoryURI, IFileURI, IMutableFileURI, IMutableFileNode, \
24 from allmydata.mutable import servermap, publish, retrieve
25 import common_util as testutil
26 from allmydata.test.no_network import GridTestMixin
28 from allmydata.test.common_web import HTTPClientGETFactory, \
31 # create a fake uploader/downloader, and a couple of fake dirnodes, then
32 # create a webserver that works against them
34 timeout = 480 # Most of these take longer than 240 seconds on Francois's arm box.
36 class FakeStatsProvider:
38 stats = {'stats': {}, 'counters': {}}
41 class FakeClient(service.MultiService):
42 nodeid = "fake_nodeid"
43 nickname = "fake_nickname"
44 basedir = "fake_basedir"
45 def get_versions(self):
46 return {'allmydata': "fake",
51 introducer_furl = "None"
53 _all_upload_status = [upload.UploadStatus()]
54 _all_download_status = [download.DownloadStatus()]
55 _all_mapupdate_statuses = [servermap.UpdateStatus()]
56 _all_publish_statuses = [publish.PublishStatus()]
57 _all_retrieve_statuses = [retrieve.RetrieveStatus()]
58 convergence = "some random string"
59 stats_provider = FakeStatsProvider()
61 def connected_to_introducer(self):
64 storage_broker = StorageFarmBroker(None, permute_peers=True)
65 def get_storage_broker(self):
66 return self.storage_broker
68 def create_node_from_uri(self, auri, readcap=None):
71 precondition(isinstance(auri, str), auri)
72 u = uri.from_string(auri)
73 if (INewDirectoryURI.providedBy(u)
74 or IReadonlyNewDirectoryURI.providedBy(u)):
75 return FakeDirectoryNode(self).init_from_uri(u)
76 if IFileURI.providedBy(u):
77 return FakeCHKFileNode(u, self)
78 if IMutableFileURI.providedBy(u):
79 return FakeMutableFileNode(self).init_from_uri(u)
80 raise UnhandledCapTypeError("cap '%s' is recognized, but has no Node" % auri)
82 def create_empty_dirnode(self):
83 n = FakeDirectoryNode(self)
85 d.addCallback(lambda res: n)
88 MUTABLE_SIZELIMIT = FakeMutableFileNode.MUTABLE_SIZELIMIT
89 def create_mutable_file(self, contents=""):
90 n = FakeMutableFileNode(self)
91 return n.create(contents)
93 def upload(self, uploadable):
94 d = uploadable.get_size()
95 d.addCallback(lambda size: uploadable.read(size))
98 n = create_chk_filenode(self, data)
99 results = upload.UploadResults()
100 results.uri = n.get_uri()
102 d.addCallback(_got_data)
105 def list_all_upload_statuses(self):
106 return self._all_upload_status
107 def list_all_download_statuses(self):
108 return self._all_download_status
109 def list_all_mapupdate_statuses(self):
110 return self._all_mapupdate_statuses
111 def list_all_publish_statuses(self):
112 return self._all_publish_statuses
113 def list_all_retrieve_statuses(self):
114 return self._all_retrieve_statuses
115 def list_all_helper_statuses(self):
118 class WebMixin(object):
120 self.s = FakeClient()
121 self.s.startService()
122 self.staticdir = self.mktemp()
123 self.ws = s = webish.WebishServer(self.s, "0", staticdir=self.staticdir)
124 s.setServiceParent(self.s)
125 self.webish_port = port = s.listener._port.getHost().port
126 self.webish_url = "http://localhost:%d" % port
128 l = [ self.s.create_empty_dirnode() for x in range(6) ]
129 d = defer.DeferredList(l)
131 self.public_root = res[0][1]
132 assert interfaces.IDirectoryNode.providedBy(self.public_root), res
133 self.public_url = "/uri/" + self.public_root.get_uri()
134 self.private_root = res[1][1]
138 self._foo_uri = foo.get_uri()
139 self._foo_readonly_uri = foo.get_readonly_uri()
140 self._foo_verifycap = foo.get_verify_cap().to_string()
141 # NOTE: we ignore the deferred on all set_uri() calls, because we
142 # know the fake nodes do these synchronously
143 self.public_root.set_uri(u"foo", foo.get_uri())
145 self.BAR_CONTENTS, n, self._bar_txt_uri = self.makefile(0)
146 foo.set_uri(u"bar.txt", self._bar_txt_uri)
147 self._bar_txt_verifycap = n.get_verify_cap().to_string()
149 foo.set_uri(u"empty", res[3][1].get_uri())
150 sub_uri = res[4][1].get_uri()
151 self._sub_uri = sub_uri
152 foo.set_uri(u"sub", sub_uri)
153 sub = self.s.create_node_from_uri(sub_uri)
155 _ign, n, blocking_uri = self.makefile(1)
156 foo.set_uri(u"blockingfile", blocking_uri)
158 unicode_filename = u"n\u00fc.txt" # n u-umlaut . t x t
159 # ok, unicode calls it LATIN SMALL LETTER U WITH DIAERESIS but I
160 # still think of it as an umlaut
161 foo.set_uri(unicode_filename, self._bar_txt_uri)
163 _ign, n, baz_file = self.makefile(2)
164 self._baz_file_uri = baz_file
165 sub.set_uri(u"baz.txt", baz_file)
167 _ign, n, self._bad_file_uri = self.makefile(3)
168 # this uri should not be downloadable
169 del FakeCHKFileNode.all_contents[self._bad_file_uri]
172 self.public_root.set_uri(u"reedownlee", rodir.get_readonly_uri())
173 rodir.set_uri(u"nor", baz_file)
178 # public/foo/blockingfile
181 # public/foo/sub/baz.txt
183 # public/reedownlee/nor
184 self.NEWFILE_CONTENTS = "newfile contents\n"
186 return foo.get_metadata_for(u"bar.txt")
188 def _got_metadata(metadata):
189 self._bar_txt_metadata = metadata
190 d.addCallback(_got_metadata)
193 def makefile(self, number):
194 contents = "contents of file %s\n" % number
195 n = create_chk_filenode(self.s, contents)
196 return contents, n, n.get_uri()
199 return self.s.stopService()
201 def failUnlessIsBarDotTxt(self, res):
202 self.failUnlessEqual(res, self.BAR_CONTENTS, res)
204 def failUnlessIsBarJSON(self, res):
205 data = simplejson.loads(res)
206 self.failUnless(isinstance(data, list))
207 self.failUnlessEqual(data[0], u"filenode")
208 self.failUnless(isinstance(data[1], dict))
209 self.failIf(data[1]["mutable"])
210 self.failIf("rw_uri" in data[1]) # immutable
211 self.failUnlessEqual(data[1]["ro_uri"], self._bar_txt_uri)
212 self.failUnlessEqual(data[1]["verify_uri"], self._bar_txt_verifycap)
213 self.failUnlessEqual(data[1]["size"], len(self.BAR_CONTENTS))
215 def failUnlessIsFooJSON(self, res):
216 data = simplejson.loads(res)
217 self.failUnless(isinstance(data, list))
218 self.failUnlessEqual(data[0], "dirnode", res)
219 self.failUnless(isinstance(data[1], dict))
220 self.failUnless(data[1]["mutable"])
221 self.failUnless("rw_uri" in data[1]) # mutable
222 self.failUnlessEqual(data[1]["rw_uri"], self._foo_uri)
223 self.failUnlessEqual(data[1]["ro_uri"], self._foo_readonly_uri)
224 self.failUnlessEqual(data[1]["verify_uri"], self._foo_verifycap)
226 kidnames = sorted([unicode(n) for n in data[1]["children"]])
227 self.failUnlessEqual(kidnames,
228 [u"bar.txt", u"blockingfile", u"empty",
229 u"n\u00fc.txt", u"sub"])
230 kids = dict( [(unicode(name),value)
232 in data[1]["children"].iteritems()] )
233 self.failUnlessEqual(kids[u"sub"][0], "dirnode")
234 self.failUnless("metadata" in kids[u"sub"][1])
235 self.failUnless("ctime" in kids[u"sub"][1]["metadata"])
236 self.failUnless("mtime" in kids[u"sub"][1]["metadata"])
237 self.failUnlessEqual(kids[u"bar.txt"][0], "filenode")
238 self.failUnlessEqual(kids[u"bar.txt"][1]["size"], len(self.BAR_CONTENTS))
239 self.failUnlessEqual(kids[u"bar.txt"][1]["ro_uri"], self._bar_txt_uri)
240 self.failUnlessEqual(kids[u"bar.txt"][1]["verify_uri"],
241 self._bar_txt_verifycap)
242 self.failUnlessEqual(kids[u"bar.txt"][1]["metadata"]["ctime"],
243 self._bar_txt_metadata["ctime"])
244 self.failUnlessEqual(kids[u"n\u00fc.txt"][1]["ro_uri"],
247 def GET(self, urlpath, followRedirect=False, return_response=False,
249 # if return_response=True, this fires with (data, statuscode,
250 # respheaders) instead of just data.
251 assert not isinstance(urlpath, unicode)
252 url = self.webish_url + urlpath
253 factory = HTTPClientGETFactory(url, method="GET",
254 followRedirect=followRedirect, **kwargs)
255 reactor.connectTCP("localhost", self.webish_port, factory)
258 return (data, factory.status, factory.response_headers)
260 d.addCallback(_got_data)
261 return factory.deferred
263 def HEAD(self, urlpath, return_response=False, **kwargs):
264 # this requires some surgery, because twisted.web.client doesn't want
265 # to give us back the response headers.
266 factory = HTTPClientHEADFactory(urlpath, method="HEAD", **kwargs)
267 reactor.connectTCP("localhost", self.webish_port, factory)
270 return (data, factory.status, factory.response_headers)
272 d.addCallback(_got_data)
273 return factory.deferred
275 def PUT(self, urlpath, data, **kwargs):
276 url = self.webish_url + urlpath
277 return client.getPage(url, method="PUT", postdata=data, **kwargs)
279 def DELETE(self, urlpath):
280 url = self.webish_url + urlpath
281 return client.getPage(url, method="DELETE")
283 def POST(self, urlpath, followRedirect=False, **fields):
284 url = self.webish_url + urlpath
285 sepbase = "boogabooga"
289 form.append('Content-Disposition: form-data; name="_charset"')
293 for name, value in fields.iteritems():
294 if isinstance(value, tuple):
295 filename, value = value
296 form.append('Content-Disposition: form-data; name="%s"; '
297 'filename="%s"' % (name, filename.encode("utf-8")))
299 form.append('Content-Disposition: form-data; name="%s"' % name)
301 if isinstance(value, unicode):
302 value = value.encode("utf-8")
305 assert isinstance(value, str)
309 body = "\r\n".join(form) + "\r\n"
310 headers = {"content-type": "multipart/form-data; boundary=%s" % sepbase,
312 return client.getPage(url, method="POST", postdata=body,
313 headers=headers, followRedirect=followRedirect)
315 def shouldFail(self, res, expected_failure, which,
316 substring=None, response_substring=None):
317 if isinstance(res, failure.Failure):
318 res.trap(expected_failure)
320 self.failUnless(substring in str(res),
321 "substring '%s' not in '%s'"
322 % (substring, str(res)))
323 if response_substring:
324 self.failUnless(response_substring in res.value.response,
325 "response substring '%s' not in '%s'"
326 % (response_substring, res.value.response))
328 self.fail("%s was supposed to raise %s, not get '%s'" %
329 (which, expected_failure, res))
331 def shouldFail2(self, expected_failure, which, substring,
333 callable, *args, **kwargs):
334 assert substring is None or isinstance(substring, str)
335 assert response_substring is None or isinstance(response_substring, str)
336 d = defer.maybeDeferred(callable, *args, **kwargs)
338 if isinstance(res, failure.Failure):
339 res.trap(expected_failure)
341 self.failUnless(substring in str(res),
342 "%s: substring '%s' not in '%s'"
343 % (which, substring, str(res)))
344 if response_substring:
345 self.failUnless(response_substring in res.value.response,
346 "%s: response substring '%s' not in '%s'"
348 response_substring, res.value.response))
350 self.fail("%s was supposed to raise %s, not get '%s'" %
351 (which, expected_failure, res))
355 def should404(self, res, which):
356 if isinstance(res, failure.Failure):
357 res.trap(error.Error)
358 self.failUnlessEqual(res.value.status, "404")
360 self.fail("%s was supposed to Error(404), not get '%s'" %
364 class Web(WebMixin, WebErrorMixin, testutil.StallMixin, unittest.TestCase):
365 def test_create(self):
368 def test_welcome(self):
371 self.failUnless('Welcome To TahoeLAFS' in res, res)
373 self.s.basedir = 'web/test_welcome'
374 fileutil.make_dirs("web/test_welcome")
375 fileutil.make_dirs("web/test_welcome/private")
377 d.addCallback(_check)
380 def test_provisioning(self):
381 d = self.GET("/provisioning/")
383 self.failUnless('Tahoe Provisioning Tool' in res)
384 fields = {'filled': True,
385 "num_users": int(50e3),
386 "files_per_user": 1000,
387 "space_per_user": int(1e9),
388 "sharing_ratio": 1.0,
389 "encoding_parameters": "3-of-10-5",
391 "ownership_mode": "A",
392 "download_rate": 100,
397 return self.POST("/provisioning/", **fields)
399 d.addCallback(_check)
401 self.failUnless('Tahoe Provisioning Tool' in res)
402 self.failUnless("Share space consumed: 167.01TB" in res)
404 fields = {'filled': True,
405 "num_users": int(50e6),
406 "files_per_user": 1000,
407 "space_per_user": int(5e9),
408 "sharing_ratio": 1.0,
409 "encoding_parameters": "25-of-100-50",
410 "num_servers": 30000,
411 "ownership_mode": "E",
412 "drive_failure_model": "U",
414 "download_rate": 1000,
419 return self.POST("/provisioning/", **fields)
420 d.addCallback(_check2)
422 self.failUnless("Share space consumed: huge!" in res)
423 fields = {'filled': True}
424 return self.POST("/provisioning/", **fields)
425 d.addCallback(_check3)
427 self.failUnless("Share space consumed:" in res)
428 d.addCallback(_check4)
431 def test_reliability_tool(self):
433 from allmydata import reliability
434 _hush_pyflakes = reliability
436 raise unittest.SkipTest("reliability tool requires NumPy")
438 d = self.GET("/reliability/")
440 self.failUnless('Tahoe Reliability Tool' in res)
441 fields = {'drive_lifetime': "8Y",
446 "check_period": "1M",
447 "report_period": "3M",
450 return self.POST("/reliability/", **fields)
452 d.addCallback(_check)
454 self.failUnless('Tahoe Reliability Tool' in res)
455 r = r'Probability of loss \(no maintenance\):\s+<span>0.033591'
456 self.failUnless(re.search(r, res), res)
457 d.addCallback(_check2)
460 def test_status(self):
461 dl_num = self.s.list_all_download_statuses()[0].get_counter()
462 ul_num = self.s.list_all_upload_statuses()[0].get_counter()
463 mu_num = self.s.list_all_mapupdate_statuses()[0].get_counter()
464 pub_num = self.s.list_all_publish_statuses()[0].get_counter()
465 ret_num = self.s.list_all_retrieve_statuses()[0].get_counter()
466 d = self.GET("/status", followRedirect=True)
468 self.failUnless('Upload and Download Status' in res, res)
469 self.failUnless('"down-%d"' % dl_num in res, res)
470 self.failUnless('"up-%d"' % ul_num in res, res)
471 self.failUnless('"mapupdate-%d"' % mu_num in res, res)
472 self.failUnless('"publish-%d"' % pub_num in res, res)
473 self.failUnless('"retrieve-%d"' % ret_num in res, res)
474 d.addCallback(_check)
475 d.addCallback(lambda res: self.GET("/status/?t=json"))
476 def _check_json(res):
477 data = simplejson.loads(res)
478 self.failUnless(isinstance(data, dict))
479 active = data["active"]
480 # TODO: test more. We need a way to fake an active operation
482 d.addCallback(_check_json)
484 d.addCallback(lambda res: self.GET("/status/down-%d" % dl_num))
486 self.failUnless("File Download Status" in res, res)
487 d.addCallback(_check_dl)
488 d.addCallback(lambda res: self.GET("/status/up-%d" % ul_num))
490 self.failUnless("File Upload Status" in res, res)
491 d.addCallback(_check_ul)
492 d.addCallback(lambda res: self.GET("/status/mapupdate-%d" % mu_num))
493 def _check_mapupdate(res):
494 self.failUnless("Mutable File Servermap Update Status" in res, res)
495 d.addCallback(_check_mapupdate)
496 d.addCallback(lambda res: self.GET("/status/publish-%d" % pub_num))
497 def _check_publish(res):
498 self.failUnless("Mutable File Publish Status" in res, res)
499 d.addCallback(_check_publish)
500 d.addCallback(lambda res: self.GET("/status/retrieve-%d" % ret_num))
501 def _check_retrieve(res):
502 self.failUnless("Mutable File Retrieve Status" in res, res)
503 d.addCallback(_check_retrieve)
507 def test_status_numbers(self):
508 drrm = status.DownloadResultsRendererMixin()
509 self.failUnlessEqual(drrm.render_time(None, None), "")
510 self.failUnlessEqual(drrm.render_time(None, 2.5), "2.50s")
511 self.failUnlessEqual(drrm.render_time(None, 0.25), "250ms")
512 self.failUnlessEqual(drrm.render_time(None, 0.0021), "2.1ms")
513 self.failUnlessEqual(drrm.render_time(None, 0.000123), "123us")
514 self.failUnlessEqual(drrm.render_rate(None, None), "")
515 self.failUnlessEqual(drrm.render_rate(None, 2500000), "2.50MBps")
516 self.failUnlessEqual(drrm.render_rate(None, 30100), "30.1kBps")
517 self.failUnlessEqual(drrm.render_rate(None, 123), "123Bps")
519 urrm = status.UploadResultsRendererMixin()
520 self.failUnlessEqual(urrm.render_time(None, None), "")
521 self.failUnlessEqual(urrm.render_time(None, 2.5), "2.50s")
522 self.failUnlessEqual(urrm.render_time(None, 0.25), "250ms")
523 self.failUnlessEqual(urrm.render_time(None, 0.0021), "2.1ms")
524 self.failUnlessEqual(urrm.render_time(None, 0.000123), "123us")
525 self.failUnlessEqual(urrm.render_rate(None, None), "")
526 self.failUnlessEqual(urrm.render_rate(None, 2500000), "2.50MBps")
527 self.failUnlessEqual(urrm.render_rate(None, 30100), "30.1kBps")
528 self.failUnlessEqual(urrm.render_rate(None, 123), "123Bps")
530 def test_GET_FILEURL(self):
531 d = self.GET(self.public_url + "/foo/bar.txt")
532 d.addCallback(self.failUnlessIsBarDotTxt)
535 def test_GET_FILEURL_range(self):
536 headers = {"range": "bytes=1-10"}
537 d = self.GET(self.public_url + "/foo/bar.txt", headers=headers,
538 return_response=True)
539 def _got((res, status, headers)):
540 self.failUnlessEqual(int(status), 206)
541 self.failUnless(headers.has_key("content-range"))
542 self.failUnlessEqual(headers["content-range"][0],
543 "bytes 1-10/%d" % len(self.BAR_CONTENTS))
544 self.failUnlessEqual(res, self.BAR_CONTENTS[1:11])
548 def test_GET_FILEURL_partial_range(self):
549 headers = {"range": "bytes=5-"}
550 length = len(self.BAR_CONTENTS)
551 d = self.GET(self.public_url + "/foo/bar.txt", headers=headers,
552 return_response=True)
553 def _got((res, status, headers)):
554 self.failUnlessEqual(int(status), 206)
555 self.failUnless(headers.has_key("content-range"))
556 self.failUnlessEqual(headers["content-range"][0],
557 "bytes 5-%d/%d" % (length-1, length))
558 self.failUnlessEqual(res, self.BAR_CONTENTS[5:])
562 def test_HEAD_FILEURL_range(self):
563 headers = {"range": "bytes=1-10"}
564 d = self.HEAD(self.public_url + "/foo/bar.txt", headers=headers,
565 return_response=True)
566 def _got((res, status, headers)):
567 self.failUnlessEqual(res, "")
568 self.failUnlessEqual(int(status), 206)
569 self.failUnless(headers.has_key("content-range"))
570 self.failUnlessEqual(headers["content-range"][0],
571 "bytes 1-10/%d" % len(self.BAR_CONTENTS))
575 def test_HEAD_FILEURL_partial_range(self):
576 headers = {"range": "bytes=5-"}
577 length = len(self.BAR_CONTENTS)
578 d = self.HEAD(self.public_url + "/foo/bar.txt", headers=headers,
579 return_response=True)
580 def _got((res, status, headers)):
581 self.failUnlessEqual(int(status), 206)
582 self.failUnless(headers.has_key("content-range"))
583 self.failUnlessEqual(headers["content-range"][0],
584 "bytes 5-%d/%d" % (length-1, length))
588 def test_GET_FILEURL_range_bad(self):
589 headers = {"range": "BOGUS=fizbop-quarnak"}
590 d = self.shouldFail2(error.Error, "test_GET_FILEURL_range_bad",
592 "Syntactically invalid http range header",
593 self.GET, self.public_url + "/foo/bar.txt",
597 def test_HEAD_FILEURL(self):
598 d = self.HEAD(self.public_url + "/foo/bar.txt", return_response=True)
599 def _got((res, status, headers)):
600 self.failUnlessEqual(res, "")
601 self.failUnlessEqual(headers["content-length"][0],
602 str(len(self.BAR_CONTENTS)))
603 self.failUnlessEqual(headers["content-type"], ["text/plain"])
607 def test_GET_FILEURL_named(self):
608 base = "/file/%s" % urllib.quote(self._bar_txt_uri)
609 base2 = "/named/%s" % urllib.quote(self._bar_txt_uri)
610 d = self.GET(base + "/@@name=/blah.txt")
611 d.addCallback(self.failUnlessIsBarDotTxt)
612 d.addCallback(lambda res: self.GET(base + "/blah.txt"))
613 d.addCallback(self.failUnlessIsBarDotTxt)
614 d.addCallback(lambda res: self.GET(base + "/ignore/lots/blah.txt"))
615 d.addCallback(self.failUnlessIsBarDotTxt)
616 d.addCallback(lambda res: self.GET(base2 + "/@@name=/blah.txt"))
617 d.addCallback(self.failUnlessIsBarDotTxt)
618 save_url = base + "?save=true&filename=blah.txt"
619 d.addCallback(lambda res: self.GET(save_url))
620 d.addCallback(self.failUnlessIsBarDotTxt) # TODO: check headers
621 u_filename = u"n\u00e9wer.txt" # n e-acute w e r . t x t
622 u_fn_e = urllib.quote(u_filename.encode("utf-8"))
623 u_url = base + "?save=true&filename=" + u_fn_e
624 d.addCallback(lambda res: self.GET(u_url))
625 d.addCallback(self.failUnlessIsBarDotTxt) # TODO: check headers
628 def test_PUT_FILEURL_named_bad(self):
629 base = "/file/%s" % urllib.quote(self._bar_txt_uri)
630 d = self.shouldFail2(error.Error, "test_PUT_FILEURL_named_bad",
632 "/file can only be used with GET or HEAD",
633 self.PUT, base + "/@@name=/blah.txt", "")
636 def test_GET_DIRURL_named_bad(self):
637 base = "/file/%s" % urllib.quote(self._foo_uri)
638 d = self.shouldFail2(error.Error, "test_PUT_DIRURL_named_bad",
641 self.GET, base + "/@@name=/blah.txt")
644 def test_GET_slash_file_bad(self):
645 d = self.shouldFail2(error.Error, "test_GET_slash_file_bad",
647 "/file must be followed by a file-cap and a name",
651 def test_GET_unhandled_URI_named(self):
652 contents, n, newuri = self.makefile(12)
653 verifier_cap = n.get_verify_cap().to_string()
654 base = "/file/%s" % urllib.quote(verifier_cap)
655 # client.create_node_from_uri() can't handle verify-caps
656 d = self.shouldFail2(error.Error, "GET_unhandled_URI_named",
658 "is not a valid file- or directory- cap",
662 def test_GET_unhandled_URI(self):
663 contents, n, newuri = self.makefile(12)
664 verifier_cap = n.get_verify_cap().to_string()
665 base = "/uri/%s" % urllib.quote(verifier_cap)
666 # client.create_node_from_uri() can't handle verify-caps
667 d = self.shouldFail2(error.Error, "test_GET_unhandled_URI",
669 "is not a valid file- or directory- cap",
673 def test_GET_FILE_URI(self):
674 base = "/uri/%s" % urllib.quote(self._bar_txt_uri)
676 d.addCallback(self.failUnlessIsBarDotTxt)
679 def test_GET_FILE_URI_badchild(self):
680 base = "/uri/%s/boguschild" % urllib.quote(self._bar_txt_uri)
681 errmsg = "Files have no children, certainly not named 'boguschild'"
682 d = self.shouldFail2(error.Error, "test_GET_FILE_URI_badchild",
683 "400 Bad Request", errmsg,
687 def test_PUT_FILE_URI_badchild(self):
688 base = "/uri/%s/boguschild" % urllib.quote(self._bar_txt_uri)
689 errmsg = "Cannot create directory 'boguschild', because its parent is a file, not a directory"
690 d = self.shouldFail2(error.Error, "test_GET_FILE_URI_badchild",
691 "400 Bad Request", errmsg,
695 def test_GET_FILEURL_save(self):
696 d = self.GET(self.public_url + "/foo/bar.txt?filename=bar.txt&save=true")
697 # TODO: look at the headers, expect a Content-Disposition: attachment
699 d.addCallback(self.failUnlessIsBarDotTxt)
702 def test_GET_FILEURL_missing(self):
703 d = self.GET(self.public_url + "/foo/missing")
704 d.addBoth(self.should404, "test_GET_FILEURL_missing")
707 def test_PUT_NEWFILEURL(self):
708 d = self.PUT(self.public_url + "/foo/new.txt", self.NEWFILE_CONTENTS)
709 # TODO: we lose the response code, so we can't check this
710 #self.failUnlessEqual(responsecode, 201)
711 d.addCallback(self.failUnlessURIMatchesChild, self._foo_node, u"new.txt")
712 d.addCallback(lambda res:
713 self.failUnlessChildContentsAre(self._foo_node, u"new.txt",
714 self.NEWFILE_CONTENTS))
717 def test_PUT_NEWFILEURL_not_mutable(self):
718 d = self.PUT(self.public_url + "/foo/new.txt?mutable=false",
719 self.NEWFILE_CONTENTS)
720 # TODO: we lose the response code, so we can't check this
721 #self.failUnlessEqual(responsecode, 201)
722 d.addCallback(self.failUnlessURIMatchesChild, self._foo_node, u"new.txt")
723 d.addCallback(lambda res:
724 self.failUnlessChildContentsAre(self._foo_node, u"new.txt",
725 self.NEWFILE_CONTENTS))
728 def test_PUT_NEWFILEURL_range_bad(self):
729 headers = {"content-range": "bytes 1-10/%d" % len(self.NEWFILE_CONTENTS)}
730 target = self.public_url + "/foo/new.txt"
731 d = self.shouldFail2(error.Error, "test_PUT_NEWFILEURL_range_bad",
732 "501 Not Implemented",
733 "Content-Range in PUT not yet supported",
734 # (and certainly not for immutable files)
735 self.PUT, target, self.NEWFILE_CONTENTS[1:11],
737 d.addCallback(lambda res:
738 self.failIfNodeHasChild(self._foo_node, u"new.txt"))
741 def test_PUT_NEWFILEURL_mutable(self):
742 d = self.PUT(self.public_url + "/foo/new.txt?mutable=true",
743 self.NEWFILE_CONTENTS)
744 # TODO: we lose the response code, so we can't check this
745 #self.failUnlessEqual(responsecode, 201)
747 u = uri.from_string_mutable_filenode(res)
748 self.failUnless(u.is_mutable())
749 self.failIf(u.is_readonly())
751 d.addCallback(_check_uri)
752 d.addCallback(self.failUnlessURIMatchesChild, self._foo_node, u"new.txt")
753 d.addCallback(lambda res:
754 self.failUnlessMutableChildContentsAre(self._foo_node,
756 self.NEWFILE_CONTENTS))
759 def test_PUT_NEWFILEURL_mutable_toobig(self):
760 d = self.shouldFail2(error.Error, "test_PUT_NEWFILEURL_mutable_toobig",
761 "413 Request Entity Too Large",
762 "SDMF is limited to one segment, and 10001 > 10000",
764 self.public_url + "/foo/new.txt?mutable=true",
765 "b" * (self.s.MUTABLE_SIZELIMIT+1))
768 def test_PUT_NEWFILEURL_replace(self):
769 d = self.PUT(self.public_url + "/foo/bar.txt", self.NEWFILE_CONTENTS)
770 # TODO: we lose the response code, so we can't check this
771 #self.failUnlessEqual(responsecode, 200)
772 d.addCallback(self.failUnlessURIMatchesChild, self._foo_node, u"bar.txt")
773 d.addCallback(lambda res:
774 self.failUnlessChildContentsAre(self._foo_node, u"bar.txt",
775 self.NEWFILE_CONTENTS))
778 def test_PUT_NEWFILEURL_bad_t(self):
779 d = self.shouldFail2(error.Error, "PUT_bad_t", "400 Bad Request",
780 "PUT to a file: bad t=bogus",
781 self.PUT, self.public_url + "/foo/bar.txt?t=bogus",
785 def test_PUT_NEWFILEURL_no_replace(self):
786 d = self.PUT(self.public_url + "/foo/bar.txt?replace=false",
787 self.NEWFILE_CONTENTS)
788 d.addBoth(self.shouldFail, error.Error, "PUT_NEWFILEURL_no_replace",
790 "There was already a child by that name, and you asked me "
794 def test_PUT_NEWFILEURL_mkdirs(self):
795 d = self.PUT(self.public_url + "/foo/newdir/new.txt", self.NEWFILE_CONTENTS)
797 d.addCallback(self.failUnlessURIMatchesChild, fn, u"newdir/new.txt")
798 d.addCallback(lambda res: self.failIfNodeHasChild(fn, u"new.txt"))
799 d.addCallback(lambda res: self.failUnlessNodeHasChild(fn, u"newdir"))
800 d.addCallback(lambda res:
801 self.failUnlessChildContentsAre(fn, u"newdir/new.txt",
802 self.NEWFILE_CONTENTS))
805 def test_PUT_NEWFILEURL_blocked(self):
806 d = self.PUT(self.public_url + "/foo/blockingfile/new.txt",
807 self.NEWFILE_CONTENTS)
808 d.addBoth(self.shouldFail, error.Error, "PUT_NEWFILEURL_blocked",
810 "Unable to create directory 'blockingfile': a file was in the way")
813 def test_DELETE_FILEURL(self):
814 d = self.DELETE(self.public_url + "/foo/bar.txt")
815 d.addCallback(lambda res:
816 self.failIfNodeHasChild(self._foo_node, u"bar.txt"))
819 def test_DELETE_FILEURL_missing(self):
820 d = self.DELETE(self.public_url + "/foo/missing")
821 d.addBoth(self.should404, "test_DELETE_FILEURL_missing")
824 def test_DELETE_FILEURL_missing2(self):
825 d = self.DELETE(self.public_url + "/missing/missing")
826 d.addBoth(self.should404, "test_DELETE_FILEURL_missing2")
829 def test_GET_FILEURL_json(self):
830 # twisted.web.http.parse_qs ignores any query args without an '=', so
831 # I can't do "GET /path?json", I have to do "GET /path/t=json"
832 # instead. This may make it tricky to emulate the S3 interface
834 d = self.GET(self.public_url + "/foo/bar.txt?t=json")
835 d.addCallback(self.failUnlessIsBarJSON)
838 def test_GET_FILEURL_json_missing(self):
839 d = self.GET(self.public_url + "/foo/missing?json")
840 d.addBoth(self.should404, "test_GET_FILEURL_json_missing")
843 def test_GET_FILEURL_uri(self):
844 d = self.GET(self.public_url + "/foo/bar.txt?t=uri")
846 self.failUnlessEqual(res, self._bar_txt_uri)
847 d.addCallback(_check)
848 d.addCallback(lambda res:
849 self.GET(self.public_url + "/foo/bar.txt?t=readonly-uri"))
851 # for now, for files, uris and readonly-uris are the same
852 self.failUnlessEqual(res, self._bar_txt_uri)
853 d.addCallback(_check2)
856 def test_GET_FILEURL_badtype(self):
857 d = self.shouldHTTPError("GET t=bogus", 400, "Bad Request",
860 self.public_url + "/foo/bar.txt?t=bogus")
863 def test_GET_FILEURL_uri_missing(self):
864 d = self.GET(self.public_url + "/foo/missing?t=uri")
865 d.addBoth(self.should404, "test_GET_FILEURL_uri_missing")
868 def test_GET_DIRURL(self):
869 # the addSlash means we get a redirect here
870 # from /uri/$URI/foo/ , we need ../../../ to get back to the root
872 d = self.GET(self.public_url + "/foo", followRedirect=True)
874 self.failUnless(('<a href="%s">Return to Welcome page' % ROOT)
876 # the FILE reference points to a URI, but it should end in bar.txt
877 bar_url = ("%s/file/%s/@@named=/bar.txt" %
878 (ROOT, urllib.quote(self._bar_txt_uri)))
879 get_bar = "".join([r'<td>FILE</td>',
881 r'<a href="%s">bar.txt</a>' % bar_url,
883 r'\s+<td>%d</td>' % len(self.BAR_CONTENTS),
885 self.failUnless(re.search(get_bar, res), res)
886 for line in res.split("\n"):
887 # find the line that contains the delete button for bar.txt
888 if ("form action" in line and
889 'value="delete"' in line and
890 'value="bar.txt"' in line):
891 # the form target should use a relative URL
892 foo_url = urllib.quote("%s/uri/%s/" % (ROOT, self._foo_uri))
893 self.failUnless(('action="%s"' % foo_url) in line, line)
894 # and the when_done= should too
895 #done_url = urllib.quote(???)
896 #self.failUnless(('name="when_done" value="%s"' % done_url)
900 self.fail("unable to find delete-bar.txt line", res)
902 # the DIR reference just points to a URI
903 sub_url = ("%s/uri/%s/" % (ROOT, urllib.quote(self._sub_uri)))
904 get_sub = ((r'<td>DIR</td>')
905 +r'\s+<td><a href="%s">sub</a></td>' % sub_url)
906 self.failUnless(re.search(get_sub, res), res)
907 d.addCallback(_check)
909 # look at a directory which is readonly
910 d.addCallback(lambda res:
911 self.GET(self.public_url + "/reedownlee", followRedirect=True))
913 self.failUnless("(read-only)" in res, res)
914 self.failIf("Upload a file" in res, res)
915 d.addCallback(_check2)
917 # and at a directory that contains a readonly directory
918 d.addCallback(lambda res:
919 self.GET(self.public_url, followRedirect=True))
921 self.failUnless(re.search('<td>DIR-RO</td>'
922 r'\s+<td><a href="[\.\/]+/uri/URI%3ADIR2-RO%3A[^"]+">reedownlee</a></td>', res), res)
923 d.addCallback(_check3)
925 # and an empty directory
926 d.addCallback(lambda res: self.GET(self.public_url + "/foo/empty/"))
928 self.failUnless("directory is empty" in res, res)
929 MKDIR_BUTTON_RE=re.compile('<input type="hidden" name="t" value="mkdir" />.*<legend class="freeform-form-label">Create a new directory in this directory</legend>.*<input type="submit" value="Create" />', re.I)
930 self.failUnless(MKDIR_BUTTON_RE.search(res), res)
931 d.addCallback(_check4)
935 def test_GET_DIRURL_badtype(self):
936 d = self.shouldHTTPError("test_GET_DIRURL_badtype",
940 self.public_url + "/foo?t=bogus")
943 def test_GET_DIRURL_json(self):
944 d = self.GET(self.public_url + "/foo?t=json")
945 d.addCallback(self.failUnlessIsFooJSON)
949 def test_POST_DIRURL_manifest_no_ophandle(self):
950 d = self.shouldFail2(error.Error,
951 "test_POST_DIRURL_manifest_no_ophandle",
953 "slow operation requires ophandle=",
954 self.POST, self.public_url, t="start-manifest")
957 def test_POST_DIRURL_manifest(self):
958 d = defer.succeed(None)
959 def getman(ignored, output):
960 d = self.POST(self.public_url + "/foo/?t=start-manifest&ophandle=125",
962 d.addCallback(self.wait_for_operation, "125")
963 d.addCallback(self.get_operation_results, "125", output)
965 d.addCallback(getman, None)
966 def _got_html(manifest):
967 self.failUnless("Manifest of SI=" in manifest)
968 self.failUnless("<td>sub</td>" in manifest)
969 self.failUnless(self._sub_uri in manifest)
970 self.failUnless("<td>sub/baz.txt</td>" in manifest)
971 d.addCallback(_got_html)
973 # both t=status and unadorned GET should be identical
974 d.addCallback(lambda res: self.GET("/operations/125"))
975 d.addCallback(_got_html)
977 d.addCallback(getman, "html")
978 d.addCallback(_got_html)
979 d.addCallback(getman, "text")
980 def _got_text(manifest):
981 self.failUnless("\nsub " + self._sub_uri + "\n" in manifest)
982 self.failUnless("\nsub/baz.txt URI:CHK:" in manifest)
983 d.addCallback(_got_text)
984 d.addCallback(getman, "JSON")
986 data = res["manifest"]
988 for (path_list, cap) in data:
989 got[tuple(path_list)] = cap
990 self.failUnlessEqual(got[(u"sub",)], self._sub_uri)
991 self.failUnless((u"sub",u"baz.txt") in got)
992 self.failUnless("finished" in res)
993 self.failUnless("origin" in res)
994 self.failUnless("storage-index" in res)
995 self.failUnless("verifycaps" in res)
996 self.failUnless("stats" in res)
997 d.addCallback(_got_json)
1000 def test_POST_DIRURL_deepsize_no_ophandle(self):
1001 d = self.shouldFail2(error.Error,
1002 "test_POST_DIRURL_deepsize_no_ophandle",
1004 "slow operation requires ophandle=",
1005 self.POST, self.public_url, t="start-deep-size")
1008 def test_POST_DIRURL_deepsize(self):
1009 d = self.POST(self.public_url + "/foo/?t=start-deep-size&ophandle=126",
1010 followRedirect=True)
1011 d.addCallback(self.wait_for_operation, "126")
1012 d.addCallback(self.get_operation_results, "126", "json")
1013 def _got_json(data):
1014 self.failUnlessEqual(data["finished"], True)
1016 self.failUnless(size > 1000)
1017 d.addCallback(_got_json)
1018 d.addCallback(self.get_operation_results, "126", "text")
1020 mo = re.search(r'^size: (\d+)$', res, re.M)
1021 self.failUnless(mo, res)
1022 size = int(mo.group(1))
1023 # with directories, the size varies.
1024 self.failUnless(size > 1000)
1025 d.addCallback(_got_text)
1028 def test_POST_DIRURL_deepstats_no_ophandle(self):
1029 d = self.shouldFail2(error.Error,
1030 "test_POST_DIRURL_deepstats_no_ophandle",
1032 "slow operation requires ophandle=",
1033 self.POST, self.public_url, t="start-deep-stats")
1036 def test_POST_DIRURL_deepstats(self):
1037 d = self.POST(self.public_url + "/foo/?t=start-deep-stats&ophandle=127",
1038 followRedirect=True)
1039 d.addCallback(self.wait_for_operation, "127")
1040 d.addCallback(self.get_operation_results, "127", "json")
1041 def _got_json(stats):
1042 expected = {"count-immutable-files": 3,
1043 "count-mutable-files": 0,
1044 "count-literal-files": 0,
1046 "count-directories": 3,
1047 "size-immutable-files": 57,
1048 "size-literal-files": 0,
1049 #"size-directories": 1912, # varies
1050 #"largest-directory": 1590,
1051 "largest-directory-children": 5,
1052 "largest-immutable-file": 19,
1054 for k,v in expected.iteritems():
1055 self.failUnlessEqual(stats[k], v,
1056 "stats[%s] was %s, not %s" %
1058 self.failUnlessEqual(stats["size-files-histogram"],
1060 d.addCallback(_got_json)
1063 def test_POST_DIRURL_stream_manifest(self):
1064 d = self.POST(self.public_url + "/foo/?t=stream-manifest")
1066 self.failUnless(res.endswith("\n"))
1067 units = [simplejson.loads(t) for t in res[:-1].split("\n")]
1068 self.failUnlessEqual(len(units), 7)
1069 self.failUnlessEqual(units[-1]["type"], "stats")
1071 self.failUnlessEqual(first["path"], [])
1072 self.failUnlessEqual(first["cap"], self._foo_uri)
1073 self.failUnlessEqual(first["type"], "directory")
1074 baz = [u for u in units[:-1] if u["cap"] == self._baz_file_uri][0]
1075 self.failUnlessEqual(baz["path"], ["sub", "baz.txt"])
1076 self.failIfEqual(baz["storage-index"], None)
1077 self.failIfEqual(baz["verifycap"], None)
1078 self.failIfEqual(baz["repaircap"], None)
1080 d.addCallback(_check)
1083 def test_GET_DIRURL_uri(self):
1084 d = self.GET(self.public_url + "/foo?t=uri")
1086 self.failUnlessEqual(res, self._foo_uri)
1087 d.addCallback(_check)
1090 def test_GET_DIRURL_readonly_uri(self):
1091 d = self.GET(self.public_url + "/foo?t=readonly-uri")
1093 self.failUnlessEqual(res, self._foo_readonly_uri)
1094 d.addCallback(_check)
1097 def test_PUT_NEWDIRURL(self):
1098 d = self.PUT(self.public_url + "/foo/newdir?t=mkdir", "")
1099 d.addCallback(lambda res:
1100 self.failUnlessNodeHasChild(self._foo_node, u"newdir"))
1101 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
1102 d.addCallback(self.failUnlessNodeKeysAre, [])
1105 def test_PUT_NEWDIRURL_exists(self):
1106 d = self.PUT(self.public_url + "/foo/sub?t=mkdir", "")
1107 d.addCallback(lambda res:
1108 self.failUnlessNodeHasChild(self._foo_node, u"sub"))
1109 d.addCallback(lambda res: self._foo_node.get(u"sub"))
1110 d.addCallback(self.failUnlessNodeKeysAre, [u"baz.txt"])
1113 def test_PUT_NEWDIRURL_blocked(self):
1114 d = self.shouldFail2(error.Error, "PUT_NEWDIRURL_blocked",
1115 "409 Conflict", "Unable to create directory 'bar.txt': a file was in the way",
1117 self.public_url + "/foo/bar.txt/sub?t=mkdir", "")
1118 d.addCallback(lambda res:
1119 self.failUnlessNodeHasChild(self._foo_node, u"sub"))
1120 d.addCallback(lambda res: self._foo_node.get(u"sub"))
1121 d.addCallback(self.failUnlessNodeKeysAre, [u"baz.txt"])
1124 def test_PUT_NEWDIRURL_mkdir_p(self):
1125 d = defer.succeed(None)
1126 d.addCallback(lambda res: self.POST(self.public_url + "/foo", t='mkdir', name='mkp'))
1127 d.addCallback(lambda res: self.failUnlessNodeHasChild(self._foo_node, u"mkp"))
1128 d.addCallback(lambda res: self._foo_node.get(u"mkp"))
1129 def mkdir_p(mkpnode):
1130 url = '/uri/%s?t=mkdir-p&path=/sub1/sub2' % urllib.quote(mkpnode.get_uri())
1132 def made_subsub(ssuri):
1133 d = self._foo_node.get_child_at_path(u"mkp/sub1/sub2")
1134 d.addCallback(lambda ssnode: self.failUnlessEqual(ssnode.get_uri(), ssuri))
1136 d.addCallback(lambda uri2: self.failUnlessEqual(uri2, ssuri))
1138 d.addCallback(made_subsub)
1140 d.addCallback(mkdir_p)
1143 def test_PUT_NEWDIRURL_mkdirs(self):
1144 d = self.PUT(self.public_url + "/foo/subdir/newdir?t=mkdir", "")
1145 d.addCallback(lambda res:
1146 self.failIfNodeHasChild(self._foo_node, u"newdir"))
1147 d.addCallback(lambda res:
1148 self.failUnlessNodeHasChild(self._foo_node, u"subdir"))
1149 d.addCallback(lambda res:
1150 self._foo_node.get_child_at_path(u"subdir/newdir"))
1151 d.addCallback(self.failUnlessNodeKeysAre, [])
1154 def test_DELETE_DIRURL(self):
1155 d = self.DELETE(self.public_url + "/foo")
1156 d.addCallback(lambda res:
1157 self.failIfNodeHasChild(self.public_root, u"foo"))
1160 def test_DELETE_DIRURL_missing(self):
1161 d = self.DELETE(self.public_url + "/foo/missing")
1162 d.addBoth(self.should404, "test_DELETE_DIRURL_missing")
1163 d.addCallback(lambda res:
1164 self.failUnlessNodeHasChild(self.public_root, u"foo"))
1167 def test_DELETE_DIRURL_missing2(self):
1168 d = self.DELETE(self.public_url + "/missing")
1169 d.addBoth(self.should404, "test_DELETE_DIRURL_missing2")
1172 def dump_root(self):
1174 w = webish.DirnodeWalkerMixin()
1175 def visitor(childpath, childnode, metadata):
1177 d = w.walk(self.public_root, visitor)
1180 def failUnlessNodeKeysAre(self, node, expected_keys):
1181 for k in expected_keys:
1182 assert isinstance(k, unicode)
1184 def _check(children):
1185 self.failUnlessEqual(sorted(children.keys()), sorted(expected_keys))
1186 d.addCallback(_check)
1188 def failUnlessNodeHasChild(self, node, name):
1189 assert isinstance(name, unicode)
1191 def _check(children):
1192 self.failUnless(name in children)
1193 d.addCallback(_check)
1195 def failIfNodeHasChild(self, node, name):
1196 assert isinstance(name, unicode)
1198 def _check(children):
1199 self.failIf(name in children)
1200 d.addCallback(_check)
1203 def failUnlessChildContentsAre(self, node, name, expected_contents):
1204 assert isinstance(name, unicode)
1205 d = node.get_child_at_path(name)
1206 d.addCallback(lambda node: node.download_to_data())
1207 def _check(contents):
1208 self.failUnlessEqual(contents, expected_contents)
1209 d.addCallback(_check)
1212 def failUnlessMutableChildContentsAre(self, node, name, expected_contents):
1213 assert isinstance(name, unicode)
1214 d = node.get_child_at_path(name)
1215 d.addCallback(lambda node: node.download_best_version())
1216 def _check(contents):
1217 self.failUnlessEqual(contents, expected_contents)
1218 d.addCallback(_check)
1221 def failUnlessChildURIIs(self, node, name, expected_uri):
1222 assert isinstance(name, unicode)
1223 d = node.get_child_at_path(name)
1225 self.failUnlessEqual(child.get_uri(), expected_uri.strip())
1226 d.addCallback(_check)
1229 def failUnlessURIMatchesChild(self, got_uri, node, name):
1230 assert isinstance(name, unicode)
1231 d = node.get_child_at_path(name)
1233 self.failUnlessEqual(got_uri.strip(), child.get_uri())
1234 d.addCallback(_check)
1237 def failUnlessCHKURIHasContents(self, got_uri, contents):
1238 self.failUnless(FakeCHKFileNode.all_contents[got_uri] == contents)
1240 def test_POST_upload(self):
1241 d = self.POST(self.public_url + "/foo", t="upload",
1242 file=("new.txt", self.NEWFILE_CONTENTS))
1244 d.addCallback(self.failUnlessURIMatchesChild, fn, u"new.txt")
1245 d.addCallback(lambda res:
1246 self.failUnlessChildContentsAre(fn, u"new.txt",
1247 self.NEWFILE_CONTENTS))
1250 def test_POST_upload_unicode(self):
1251 filename = u"n\u00e9wer.txt" # n e-acute w e r . t x t
1252 d = self.POST(self.public_url + "/foo", t="upload",
1253 file=(filename, self.NEWFILE_CONTENTS))
1255 d.addCallback(self.failUnlessURIMatchesChild, fn, filename)
1256 d.addCallback(lambda res:
1257 self.failUnlessChildContentsAre(fn, filename,
1258 self.NEWFILE_CONTENTS))
1259 target_url = self.public_url + "/foo/" + filename.encode("utf-8")
1260 d.addCallback(lambda res: self.GET(target_url))
1261 d.addCallback(lambda contents: self.failUnlessEqual(contents,
1262 self.NEWFILE_CONTENTS,
1266 def test_POST_upload_unicode_named(self):
1267 filename = u"n\u00e9wer.txt" # n e-acute w e r . t x t
1268 d = self.POST(self.public_url + "/foo", t="upload",
1270 file=("overridden", self.NEWFILE_CONTENTS))
1272 d.addCallback(self.failUnlessURIMatchesChild, fn, filename)
1273 d.addCallback(lambda res:
1274 self.failUnlessChildContentsAre(fn, filename,
1275 self.NEWFILE_CONTENTS))
1276 target_url = self.public_url + "/foo/" + filename.encode("utf-8")
1277 d.addCallback(lambda res: self.GET(target_url))
1278 d.addCallback(lambda contents: self.failUnlessEqual(contents,
1279 self.NEWFILE_CONTENTS,
1283 def test_POST_upload_no_link(self):
1284 d = self.POST("/uri", t="upload",
1285 file=("new.txt", self.NEWFILE_CONTENTS))
1286 def _check_upload_results(page):
1287 # this should be a page which describes the results of the upload
1288 # that just finished.
1289 self.failUnless("Upload Results:" in page)
1290 self.failUnless("URI:" in page)
1291 uri_re = re.compile("URI: <tt><span>(.*)</span>")
1292 mo = uri_re.search(page)
1293 self.failUnless(mo, page)
1294 new_uri = mo.group(1)
1296 d.addCallback(_check_upload_results)
1297 d.addCallback(self.failUnlessCHKURIHasContents, self.NEWFILE_CONTENTS)
1300 def test_POST_upload_no_link_whendone(self):
1301 d = self.POST("/uri", t="upload", when_done="/",
1302 file=("new.txt", self.NEWFILE_CONTENTS))
1303 d.addBoth(self.shouldRedirect, "/")
1306 def shouldRedirect2(self, which, checker, callable, *args, **kwargs):
1307 d = defer.maybeDeferred(callable, *args, **kwargs)
1309 if isinstance(res, failure.Failure):
1310 res.trap(error.PageRedirect)
1311 statuscode = res.value.status
1312 target = res.value.location
1313 return checker(statuscode, target)
1314 self.fail("%s: callable was supposed to redirect, not return '%s'"
1319 def test_POST_upload_no_link_whendone_results(self):
1320 def check(statuscode, target):
1321 self.failUnlessEqual(statuscode, str(http.FOUND))
1322 self.failUnless(target.startswith(self.webish_url), target)
1323 return client.getPage(target, method="GET")
1324 d = self.shouldRedirect2("test_POST_upload_no_link_whendone_results",
1326 self.POST, "/uri", t="upload",
1327 when_done="/uri/%(uri)s",
1328 file=("new.txt", self.NEWFILE_CONTENTS))
1329 d.addCallback(lambda res:
1330 self.failUnlessEqual(res, self.NEWFILE_CONTENTS))
1333 def test_POST_upload_no_link_mutable(self):
1334 d = self.POST("/uri", t="upload", mutable="true",
1335 file=("new.txt", self.NEWFILE_CONTENTS))
1336 def _check(new_uri):
1337 new_uri = new_uri.strip()
1338 self.new_uri = new_uri
1340 self.failUnless(IMutableFileURI.providedBy(u))
1341 self.failUnless(u.storage_index in FakeMutableFileNode.all_contents)
1342 n = self.s.create_node_from_uri(new_uri)
1343 return n.download_best_version()
1344 d.addCallback(_check)
1346 self.failUnlessEqual(data, self.NEWFILE_CONTENTS)
1347 return self.GET("/uri/%s" % urllib.quote(self.new_uri))
1348 d.addCallback(_check2)
1350 self.failUnlessEqual(data, self.NEWFILE_CONTENTS)
1351 return self.GET("/file/%s" % urllib.quote(self.new_uri))
1352 d.addCallback(_check3)
1354 self.failUnlessEqual(data, self.NEWFILE_CONTENTS)
1355 d.addCallback(_check4)
1358 def test_POST_upload_no_link_mutable_toobig(self):
1359 d = self.shouldFail2(error.Error,
1360 "test_POST_upload_no_link_mutable_toobig",
1361 "413 Request Entity Too Large",
1362 "SDMF is limited to one segment, and 10001 > 10000",
1364 "/uri", t="upload", mutable="true",
1366 "b" * (self.s.MUTABLE_SIZELIMIT+1)) )
1369 def test_POST_upload_mutable(self):
1370 # this creates a mutable file
1371 d = self.POST(self.public_url + "/foo", t="upload", mutable="true",
1372 file=("new.txt", self.NEWFILE_CONTENTS))
1374 d.addCallback(self.failUnlessURIMatchesChild, fn, u"new.txt")
1375 d.addCallback(lambda res:
1376 self.failUnlessMutableChildContentsAre(fn, u"new.txt",
1377 self.NEWFILE_CONTENTS))
1378 d.addCallback(lambda res: self._foo_node.get(u"new.txt"))
1380 self.failUnless(IMutableFileNode.providedBy(newnode))
1381 self.failUnless(newnode.is_mutable())
1382 self.failIf(newnode.is_readonly())
1383 self._mutable_node = newnode
1384 self._mutable_uri = newnode.get_uri()
1387 # now upload it again and make sure that the URI doesn't change
1388 NEWER_CONTENTS = self.NEWFILE_CONTENTS + "newer\n"
1389 d.addCallback(lambda res:
1390 self.POST(self.public_url + "/foo", t="upload",
1392 file=("new.txt", NEWER_CONTENTS)))
1393 d.addCallback(self.failUnlessURIMatchesChild, fn, u"new.txt")
1394 d.addCallback(lambda res:
1395 self.failUnlessMutableChildContentsAre(fn, u"new.txt",
1397 d.addCallback(lambda res: self._foo_node.get(u"new.txt"))
1399 self.failUnless(IMutableFileNode.providedBy(newnode))
1400 self.failUnless(newnode.is_mutable())
1401 self.failIf(newnode.is_readonly())
1402 self.failUnlessEqual(self._mutable_uri, newnode.get_uri())
1403 d.addCallback(_got2)
1405 # upload a second time, using PUT instead of POST
1406 NEW2_CONTENTS = NEWER_CONTENTS + "overwrite with PUT\n"
1407 d.addCallback(lambda res:
1408 self.PUT(self.public_url + "/foo/new.txt", NEW2_CONTENTS))
1409 d.addCallback(self.failUnlessURIMatchesChild, fn, u"new.txt")
1410 d.addCallback(lambda res:
1411 self.failUnlessMutableChildContentsAre(fn, u"new.txt",
1414 # finally list the directory, since mutable files are displayed
1415 # slightly differently
1417 d.addCallback(lambda res:
1418 self.GET(self.public_url + "/foo/",
1419 followRedirect=True))
1420 def _check_page(res):
1421 # TODO: assert more about the contents
1422 self.failUnless("SSK" in res)
1424 d.addCallback(_check_page)
1426 d.addCallback(lambda res: self._foo_node.get(u"new.txt"))
1428 self.failUnless(IMutableFileNode.providedBy(newnode))
1429 self.failUnless(newnode.is_mutable())
1430 self.failIf(newnode.is_readonly())
1431 self.failUnlessEqual(self._mutable_uri, newnode.get_uri())
1432 d.addCallback(_got3)
1434 # look at the JSON form of the enclosing directory
1435 d.addCallback(lambda res:
1436 self.GET(self.public_url + "/foo/?t=json",
1437 followRedirect=True))
1438 def _check_page_json(res):
1439 parsed = simplejson.loads(res)
1440 self.failUnlessEqual(parsed[0], "dirnode")
1441 children = dict( [(unicode(name),value)
1443 in parsed[1]["children"].iteritems()] )
1444 self.failUnless("new.txt" in children)
1445 new_json = children["new.txt"]
1446 self.failUnlessEqual(new_json[0], "filenode")
1447 self.failUnless(new_json[1]["mutable"])
1448 self.failUnlessEqual(new_json[1]["rw_uri"], self._mutable_uri)
1449 ro_uri = unicode(self._mutable_node.get_readonly().to_string())
1450 self.failUnlessEqual(new_json[1]["ro_uri"], ro_uri)
1451 d.addCallback(_check_page_json)
1453 # and the JSON form of the file
1454 d.addCallback(lambda res:
1455 self.GET(self.public_url + "/foo/new.txt?t=json"))
1456 def _check_file_json(res):
1457 parsed = simplejson.loads(res)
1458 self.failUnlessEqual(parsed[0], "filenode")
1459 self.failUnless(parsed[1]["mutable"])
1460 self.failUnlessEqual(parsed[1]["rw_uri"], self._mutable_uri)
1461 ro_uri = unicode(self._mutable_node.get_readonly().to_string())
1462 self.failUnlessEqual(parsed[1]["ro_uri"], ro_uri)
1463 d.addCallback(_check_file_json)
1465 # and look at t=uri and t=readonly-uri
1466 d.addCallback(lambda res:
1467 self.GET(self.public_url + "/foo/new.txt?t=uri"))
1468 d.addCallback(lambda res: self.failUnlessEqual(res, self._mutable_uri))
1469 d.addCallback(lambda res:
1470 self.GET(self.public_url + "/foo/new.txt?t=readonly-uri"))
1471 def _check_ro_uri(res):
1472 ro_uri = unicode(self._mutable_node.get_readonly().to_string())
1473 self.failUnlessEqual(res, ro_uri)
1474 d.addCallback(_check_ro_uri)
1476 # make sure we can get to it from /uri/URI
1477 d.addCallback(lambda res:
1478 self.GET("/uri/%s" % urllib.quote(self._mutable_uri)))
1479 d.addCallback(lambda res:
1480 self.failUnlessEqual(res, NEW2_CONTENTS))
1482 # and that HEAD computes the size correctly
1483 d.addCallback(lambda res:
1484 self.HEAD(self.public_url + "/foo/new.txt",
1485 return_response=True))
1486 def _got_headers((res, status, headers)):
1487 self.failUnlessEqual(res, "")
1488 self.failUnlessEqual(headers["content-length"][0],
1489 str(len(NEW2_CONTENTS)))
1490 self.failUnlessEqual(headers["content-type"], ["text/plain"])
1491 d.addCallback(_got_headers)
1493 # make sure that size errors are displayed correctly for overwrite
1494 d.addCallback(lambda res:
1495 self.shouldFail2(error.Error,
1496 "test_POST_upload_mutable-toobig",
1497 "413 Request Entity Too Large",
1498 "SDMF is limited to one segment, and 10001 > 10000",
1500 self.public_url + "/foo", t="upload",
1503 "b" * (self.s.MUTABLE_SIZELIMIT+1)),
1506 d.addErrback(self.dump_error)
1509 def test_POST_upload_mutable_toobig(self):
1510 d = self.shouldFail2(error.Error,
1511 "test_POST_upload_no_link_mutable_toobig",
1512 "413 Request Entity Too Large",
1513 "SDMF is limited to one segment, and 10001 > 10000",
1515 self.public_url + "/foo",
1516 t="upload", mutable="true",
1518 "b" * (self.s.MUTABLE_SIZELIMIT+1)) )
1521 def dump_error(self, f):
1522 # if the web server returns an error code (like 400 Bad Request),
1523 # web.client.getPage puts the HTTP response body into the .response
1524 # attribute of the exception object that it gives back. It does not
1525 # appear in the Failure's repr(), so the ERROR that trial displays
1526 # will be rather terse and unhelpful. addErrback this method to the
1527 # end of your chain to get more information out of these errors.
1528 if f.check(error.Error):
1529 print "web.error.Error:"
1531 print f.value.response
1534 def test_POST_upload_replace(self):
1535 d = self.POST(self.public_url + "/foo", t="upload",
1536 file=("bar.txt", self.NEWFILE_CONTENTS))
1538 d.addCallback(self.failUnlessURIMatchesChild, fn, u"bar.txt")
1539 d.addCallback(lambda res:
1540 self.failUnlessChildContentsAre(fn, u"bar.txt",
1541 self.NEWFILE_CONTENTS))
1544 def test_POST_upload_no_replace_ok(self):
1545 d = self.POST(self.public_url + "/foo?replace=false", t="upload",
1546 file=("new.txt", self.NEWFILE_CONTENTS))
1547 d.addCallback(lambda res: self.GET(self.public_url + "/foo/new.txt"))
1548 d.addCallback(lambda res: self.failUnlessEqual(res,
1549 self.NEWFILE_CONTENTS))
1552 def test_POST_upload_no_replace_queryarg(self):
1553 d = self.POST(self.public_url + "/foo?replace=false", t="upload",
1554 file=("bar.txt", self.NEWFILE_CONTENTS))
1555 d.addBoth(self.shouldFail, error.Error,
1556 "POST_upload_no_replace_queryarg",
1558 "There was already a child by that name, and you asked me "
1559 "to not replace it")
1560 d.addCallback(lambda res: self.GET(self.public_url + "/foo/bar.txt"))
1561 d.addCallback(self.failUnlessIsBarDotTxt)
1564 def test_POST_upload_no_replace_field(self):
1565 d = self.POST(self.public_url + "/foo", t="upload", replace="false",
1566 file=("bar.txt", self.NEWFILE_CONTENTS))
1567 d.addBoth(self.shouldFail, error.Error, "POST_upload_no_replace_field",
1569 "There was already a child by that name, and you asked me "
1570 "to not replace it")
1571 d.addCallback(lambda res: self.GET(self.public_url + "/foo/bar.txt"))
1572 d.addCallback(self.failUnlessIsBarDotTxt)
1575 def test_POST_upload_whendone(self):
1576 d = self.POST(self.public_url + "/foo", t="upload", when_done="/THERE",
1577 file=("new.txt", self.NEWFILE_CONTENTS))
1578 d.addBoth(self.shouldRedirect, "/THERE")
1580 d.addCallback(lambda res:
1581 self.failUnlessChildContentsAre(fn, u"new.txt",
1582 self.NEWFILE_CONTENTS))
1585 def test_POST_upload_named(self):
1587 d = self.POST(self.public_url + "/foo", t="upload",
1588 name="new.txt", file=self.NEWFILE_CONTENTS)
1589 d.addCallback(self.failUnlessURIMatchesChild, fn, u"new.txt")
1590 d.addCallback(lambda res:
1591 self.failUnlessChildContentsAre(fn, u"new.txt",
1592 self.NEWFILE_CONTENTS))
1595 def test_POST_upload_named_badfilename(self):
1596 d = self.POST(self.public_url + "/foo", t="upload",
1597 name="slashes/are/bad.txt", file=self.NEWFILE_CONTENTS)
1598 d.addBoth(self.shouldFail, error.Error,
1599 "test_POST_upload_named_badfilename",
1601 "name= may not contain a slash",
1603 # make sure that nothing was added
1604 d.addCallback(lambda res:
1605 self.failUnlessNodeKeysAre(self._foo_node,
1606 [u"bar.txt", u"blockingfile",
1607 u"empty", u"n\u00fc.txt",
1611 def test_POST_FILEURL_check(self):
1612 bar_url = self.public_url + "/foo/bar.txt"
1613 d = self.POST(bar_url, t="check")
1615 self.failUnless("Healthy :" in res)
1616 d.addCallback(_check)
1617 redir_url = "http://allmydata.org/TARGET"
1618 def _check2(statuscode, target):
1619 self.failUnlessEqual(statuscode, str(http.FOUND))
1620 self.failUnlessEqual(target, redir_url)
1621 d.addCallback(lambda res:
1622 self.shouldRedirect2("test_POST_FILEURL_check",
1626 when_done=redir_url))
1627 d.addCallback(lambda res:
1628 self.POST(bar_url, t="check", return_to=redir_url))
1630 self.failUnless("Healthy :" in res)
1631 self.failUnless("Return to file" in res)
1632 self.failUnless(redir_url in res)
1633 d.addCallback(_check3)
1635 d.addCallback(lambda res:
1636 self.POST(bar_url, t="check", output="JSON"))
1637 def _check_json(res):
1638 data = simplejson.loads(res)
1639 self.failUnless("storage-index" in data)
1640 self.failUnless(data["results"]["healthy"])
1641 d.addCallback(_check_json)
1645 def test_POST_FILEURL_check_and_repair(self):
1646 bar_url = self.public_url + "/foo/bar.txt"
1647 d = self.POST(bar_url, t="check", repair="true")
1649 self.failUnless("Healthy :" in res)
1650 d.addCallback(_check)
1651 redir_url = "http://allmydata.org/TARGET"
1652 def _check2(statuscode, target):
1653 self.failUnlessEqual(statuscode, str(http.FOUND))
1654 self.failUnlessEqual(target, redir_url)
1655 d.addCallback(lambda res:
1656 self.shouldRedirect2("test_POST_FILEURL_check_and_repair",
1659 t="check", repair="true",
1660 when_done=redir_url))
1661 d.addCallback(lambda res:
1662 self.POST(bar_url, t="check", return_to=redir_url))
1664 self.failUnless("Healthy :" in res)
1665 self.failUnless("Return to file" in res)
1666 self.failUnless(redir_url in res)
1667 d.addCallback(_check3)
1670 def test_POST_DIRURL_check(self):
1671 foo_url = self.public_url + "/foo/"
1672 d = self.POST(foo_url, t="check")
1674 self.failUnless("Healthy :" in res, res)
1675 d.addCallback(_check)
1676 redir_url = "http://allmydata.org/TARGET"
1677 def _check2(statuscode, target):
1678 self.failUnlessEqual(statuscode, str(http.FOUND))
1679 self.failUnlessEqual(target, redir_url)
1680 d.addCallback(lambda res:
1681 self.shouldRedirect2("test_POST_DIRURL_check",
1685 when_done=redir_url))
1686 d.addCallback(lambda res:
1687 self.POST(foo_url, t="check", return_to=redir_url))
1689 self.failUnless("Healthy :" in res, res)
1690 self.failUnless("Return to file/directory" in res)
1691 self.failUnless(redir_url in res)
1692 d.addCallback(_check3)
1694 d.addCallback(lambda res:
1695 self.POST(foo_url, t="check", output="JSON"))
1696 def _check_json(res):
1697 data = simplejson.loads(res)
1698 self.failUnless("storage-index" in data)
1699 self.failUnless(data["results"]["healthy"])
1700 d.addCallback(_check_json)
1704 def test_POST_DIRURL_check_and_repair(self):
1705 foo_url = self.public_url + "/foo/"
1706 d = self.POST(foo_url, t="check", repair="true")
1708 self.failUnless("Healthy :" in res, res)
1709 d.addCallback(_check)
1710 redir_url = "http://allmydata.org/TARGET"
1711 def _check2(statuscode, target):
1712 self.failUnlessEqual(statuscode, str(http.FOUND))
1713 self.failUnlessEqual(target, redir_url)
1714 d.addCallback(lambda res:
1715 self.shouldRedirect2("test_POST_DIRURL_check_and_repair",
1718 t="check", repair="true",
1719 when_done=redir_url))
1720 d.addCallback(lambda res:
1721 self.POST(foo_url, t="check", return_to=redir_url))
1723 self.failUnless("Healthy :" in res)
1724 self.failUnless("Return to file/directory" in res)
1725 self.failUnless(redir_url in res)
1726 d.addCallback(_check3)
1729 def wait_for_operation(self, ignored, ophandle):
1730 url = "/operations/" + ophandle
1731 url += "?t=status&output=JSON"
1734 data = simplejson.loads(res)
1735 if not data["finished"]:
1736 d = self.stall(delay=1.0)
1737 d.addCallback(self.wait_for_operation, ophandle)
1743 def get_operation_results(self, ignored, ophandle, output=None):
1744 url = "/operations/" + ophandle
1747 url += "&output=" + output
1750 if output and output.lower() == "json":
1751 return simplejson.loads(res)
1756 def test_POST_DIRURL_deepcheck_no_ophandle(self):
1757 d = self.shouldFail2(error.Error,
1758 "test_POST_DIRURL_deepcheck_no_ophandle",
1760 "slow operation requires ophandle=",
1761 self.POST, self.public_url, t="start-deep-check")
1764 def test_POST_DIRURL_deepcheck(self):
1765 def _check_redirect(statuscode, target):
1766 self.failUnlessEqual(statuscode, str(http.FOUND))
1767 self.failUnless(target.endswith("/operations/123"))
1768 d = self.shouldRedirect2("test_POST_DIRURL_deepcheck", _check_redirect,
1769 self.POST, self.public_url,
1770 t="start-deep-check", ophandle="123")
1771 d.addCallback(self.wait_for_operation, "123")
1772 def _check_json(data):
1773 self.failUnlessEqual(data["finished"], True)
1774 self.failUnlessEqual(data["count-objects-checked"], 8)
1775 self.failUnlessEqual(data["count-objects-healthy"], 8)
1776 d.addCallback(_check_json)
1777 d.addCallback(self.get_operation_results, "123", "html")
1778 def _check_html(res):
1779 self.failUnless("Objects Checked: <span>8</span>" in res)
1780 self.failUnless("Objects Healthy: <span>8</span>" in res)
1781 d.addCallback(_check_html)
1783 d.addCallback(lambda res:
1784 self.GET("/operations/123/"))
1785 d.addCallback(_check_html) # should be the same as without the slash
1787 d.addCallback(lambda res:
1788 self.shouldFail2(error.Error, "one", "404 Not Found",
1789 "No detailed results for SI bogus",
1790 self.GET, "/operations/123/bogus"))
1792 foo_si = self._foo_node.get_storage_index()
1793 foo_si_s = base32.b2a(foo_si)
1794 d.addCallback(lambda res:
1795 self.GET("/operations/123/%s?output=JSON" % foo_si_s))
1796 def _check_foo_json(res):
1797 data = simplejson.loads(res)
1798 self.failUnlessEqual(data["storage-index"], foo_si_s)
1799 self.failUnless(data["results"]["healthy"])
1800 d.addCallback(_check_foo_json)
1803 def test_POST_DIRURL_deepcheck_and_repair(self):
1804 d = self.POST(self.public_url, t="start-deep-check", repair="true",
1805 ophandle="124", output="json", followRedirect=True)
1806 d.addCallback(self.wait_for_operation, "124")
1807 def _check_json(data):
1808 self.failUnlessEqual(data["finished"], True)
1809 self.failUnlessEqual(data["count-objects-checked"], 8)
1810 self.failUnlessEqual(data["count-objects-healthy-pre-repair"], 8)
1811 self.failUnlessEqual(data["count-objects-unhealthy-pre-repair"], 0)
1812 self.failUnlessEqual(data["count-corrupt-shares-pre-repair"], 0)
1813 self.failUnlessEqual(data["count-repairs-attempted"], 0)
1814 self.failUnlessEqual(data["count-repairs-successful"], 0)
1815 self.failUnlessEqual(data["count-repairs-unsuccessful"], 0)
1816 self.failUnlessEqual(data["count-objects-healthy-post-repair"], 8)
1817 self.failUnlessEqual(data["count-objects-unhealthy-post-repair"], 0)
1818 self.failUnlessEqual(data["count-corrupt-shares-post-repair"], 0)
1819 d.addCallback(_check_json)
1820 d.addCallback(self.get_operation_results, "124", "html")
1821 def _check_html(res):
1822 self.failUnless("Objects Checked: <span>8</span>" in res)
1824 self.failUnless("Objects Healthy (before repair): <span>8</span>" in res)
1825 self.failUnless("Objects Unhealthy (before repair): <span>0</span>" in res)
1826 self.failUnless("Corrupt Shares (before repair): <span>0</span>" in res)
1828 self.failUnless("Repairs Attempted: <span>0</span>" in res)
1829 self.failUnless("Repairs Successful: <span>0</span>" in res)
1830 self.failUnless("Repairs Unsuccessful: <span>0</span>" in res)
1832 self.failUnless("Objects Healthy (after repair): <span>8</span>" in res)
1833 self.failUnless("Objects Unhealthy (after repair): <span>0</span>" in res)
1834 self.failUnless("Corrupt Shares (after repair): <span>0</span>" in res)
1835 d.addCallback(_check_html)
1838 def test_POST_FILEURL_bad_t(self):
1839 d = self.shouldFail2(error.Error, "POST_bad_t", "400 Bad Request",
1840 "POST to file: bad t=bogus",
1841 self.POST, self.public_url + "/foo/bar.txt",
1845 def test_POST_mkdir(self): # return value?
1846 d = self.POST(self.public_url + "/foo", t="mkdir", name="newdir")
1847 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
1848 d.addCallback(self.failUnlessNodeKeysAre, [])
1851 def test_POST_mkdir_2(self):
1852 d = self.POST(self.public_url + "/foo/newdir?t=mkdir", "")
1853 d.addCallback(lambda res:
1854 self.failUnlessNodeHasChild(self._foo_node, u"newdir"))
1855 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
1856 d.addCallback(self.failUnlessNodeKeysAre, [])
1859 def test_POST_mkdirs_2(self):
1860 d = self.POST(self.public_url + "/foo/bardir/newdir?t=mkdir", "")
1861 d.addCallback(lambda res:
1862 self.failUnlessNodeHasChild(self._foo_node, u"bardir"))
1863 d.addCallback(lambda res: self._foo_node.get(u"bardir"))
1864 d.addCallback(lambda bardirnode: bardirnode.get(u"newdir"))
1865 d.addCallback(self.failUnlessNodeKeysAre, [])
1868 def test_POST_mkdir_no_parentdir_noredirect(self):
1869 d = self.POST("/uri?t=mkdir")
1870 def _after_mkdir(res):
1871 uri.NewDirectoryURI.init_from_string(res)
1872 d.addCallback(_after_mkdir)
1875 def test_POST_mkdir_no_parentdir_redirect(self):
1876 d = self.POST("/uri?t=mkdir&redirect_to_result=true")
1877 d.addBoth(self.shouldRedirect, None, statuscode='303')
1878 def _check_target(target):
1879 target = urllib.unquote(target)
1880 self.failUnless(target.startswith("uri/URI:DIR2:"), target)
1881 d.addCallback(_check_target)
1884 def test_POST_noparent_bad(self):
1885 d = self.shouldHTTPError("POST /uri?t=bogus", 400, "Bad Request",
1886 "/uri accepts only PUT, PUT?t=mkdir, "
1887 "POST?t=upload, and POST?t=mkdir",
1888 self.POST, "/uri?t=bogus")
1891 def test_welcome_page_mkdir_button(self):
1892 # Fetch the welcome page.
1894 def _after_get_welcome_page(res):
1895 MKDIR_BUTTON_RE=re.compile('<form action="([^"]*)" method="post".*?<input type="hidden" name="t" value="([^"]*)" /><input type="hidden" name="([^"]*)" value="([^"]*)" /><input type="submit" value="Create a directory" />', re.I)
1896 mo = MKDIR_BUTTON_RE.search(res)
1897 formaction = mo.group(1)
1899 formaname = mo.group(3)
1900 formavalue = mo.group(4)
1901 return (formaction, formt, formaname, formavalue)
1902 d.addCallback(_after_get_welcome_page)
1903 def _after_parse_form(res):
1904 (formaction, formt, formaname, formavalue) = res
1905 return self.POST("/%s?t=%s&%s=%s" % (formaction, formt, formaname, formavalue))
1906 d.addCallback(_after_parse_form)
1907 d.addBoth(self.shouldRedirect, None, statuscode='303')
1910 def test_POST_mkdir_replace(self): # return value?
1911 d = self.POST(self.public_url + "/foo", t="mkdir", name="sub")
1912 d.addCallback(lambda res: self._foo_node.get(u"sub"))
1913 d.addCallback(self.failUnlessNodeKeysAre, [])
1916 def test_POST_mkdir_no_replace_queryarg(self): # return value?
1917 d = self.POST(self.public_url + "/foo?replace=false", t="mkdir", name="sub")
1918 d.addBoth(self.shouldFail, error.Error,
1919 "POST_mkdir_no_replace_queryarg",
1921 "There was already a child by that name, and you asked me "
1922 "to not replace it")
1923 d.addCallback(lambda res: self._foo_node.get(u"sub"))
1924 d.addCallback(self.failUnlessNodeKeysAre, [u"baz.txt"])
1927 def test_POST_mkdir_no_replace_field(self): # return value?
1928 d = self.POST(self.public_url + "/foo", t="mkdir", name="sub",
1930 d.addBoth(self.shouldFail, error.Error, "POST_mkdir_no_replace_field",
1932 "There was already a child by that name, and you asked me "
1933 "to not replace it")
1934 d.addCallback(lambda res: self._foo_node.get(u"sub"))
1935 d.addCallback(self.failUnlessNodeKeysAre, [u"baz.txt"])
1938 def test_POST_mkdir_whendone_field(self):
1939 d = self.POST(self.public_url + "/foo",
1940 t="mkdir", name="newdir", when_done="/THERE")
1941 d.addBoth(self.shouldRedirect, "/THERE")
1942 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
1943 d.addCallback(self.failUnlessNodeKeysAre, [])
1946 def test_POST_mkdir_whendone_queryarg(self):
1947 d = self.POST(self.public_url + "/foo?when_done=/THERE",
1948 t="mkdir", name="newdir")
1949 d.addBoth(self.shouldRedirect, "/THERE")
1950 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
1951 d.addCallback(self.failUnlessNodeKeysAre, [])
1954 def test_POST_bad_t(self):
1955 d = self.shouldFail2(error.Error, "POST_bad_t", "400 Bad Request",
1956 "POST to a directory with bad t=BOGUS",
1957 self.POST, self.public_url + "/foo", t="BOGUS")
1960 def test_POST_set_children(self):
1961 contents9, n9, newuri9 = self.makefile(9)
1962 contents10, n10, newuri10 = self.makefile(10)
1963 contents11, n11, newuri11 = self.makefile(11)
1966 "atomic_added_1": [ "filenode", { "rw_uri": "%s",
1969 "ctime": 1002777696.7564139,
1970 "mtime": 1002777696.7564139
1973 "atomic_added_2": [ "filenode", { "rw_uri": "%s",
1976 "ctime": 1002777696.7564139,
1977 "mtime": 1002777696.7564139
1980 "atomic_added_3": [ "filenode", { "rw_uri": "%s",
1983 "ctime": 1002777696.7564139,
1984 "mtime": 1002777696.7564139
1987 }""" % (newuri9, newuri10, newuri11)
1989 url = self.webish_url + self.public_url + "/foo" + "?t=set_children"
1991 d = client.getPage(url, method="POST", postdata=reqbody)
1993 self.failUnlessURIMatchesChild(newuri9, self._foo_node, u"atomic_added_1")
1994 self.failUnlessURIMatchesChild(newuri10, self._foo_node, u"atomic_added_2")
1995 self.failUnlessURIMatchesChild(newuri11, self._foo_node, u"atomic_added_3")
1997 d.addCallback(_then)
1998 d.addErrback(self.dump_error)
2001 def test_POST_put_uri(self):
2002 contents, n, newuri = self.makefile(8)
2003 d = self.POST(self.public_url + "/foo", t="uri", name="new.txt", uri=newuri)
2004 d.addCallback(self.failUnlessURIMatchesChild, self._foo_node, u"new.txt")
2005 d.addCallback(lambda res:
2006 self.failUnlessChildContentsAre(self._foo_node, u"new.txt",
2010 def test_POST_put_uri_replace(self):
2011 contents, n, newuri = self.makefile(8)
2012 d = self.POST(self.public_url + "/foo", t="uri", name="bar.txt", uri=newuri)
2013 d.addCallback(self.failUnlessURIMatchesChild, self._foo_node, u"bar.txt")
2014 d.addCallback(lambda res:
2015 self.failUnlessChildContentsAre(self._foo_node, u"bar.txt",
2019 def test_POST_put_uri_no_replace_queryarg(self):
2020 contents, n, newuri = self.makefile(8)
2021 d = self.POST(self.public_url + "/foo?replace=false", t="uri",
2022 name="bar.txt", uri=newuri)
2023 d.addBoth(self.shouldFail, error.Error,
2024 "POST_put_uri_no_replace_queryarg",
2026 "There was already a child by that name, and you asked me "
2027 "to not replace it")
2028 d.addCallback(lambda res: self.GET(self.public_url + "/foo/bar.txt"))
2029 d.addCallback(self.failUnlessIsBarDotTxt)
2032 def test_POST_put_uri_no_replace_field(self):
2033 contents, n, newuri = self.makefile(8)
2034 d = self.POST(self.public_url + "/foo", t="uri", replace="false",
2035 name="bar.txt", uri=newuri)
2036 d.addBoth(self.shouldFail, error.Error,
2037 "POST_put_uri_no_replace_field",
2039 "There was already a child by that name, and you asked me "
2040 "to not replace it")
2041 d.addCallback(lambda res: self.GET(self.public_url + "/foo/bar.txt"))
2042 d.addCallback(self.failUnlessIsBarDotTxt)
2045 def test_POST_delete(self):
2046 d = self.POST(self.public_url + "/foo", t="delete", name="bar.txt")
2047 d.addCallback(lambda res: self._foo_node.list())
2048 def _check(children):
2049 self.failIf(u"bar.txt" in children)
2050 d.addCallback(_check)
2053 def test_POST_rename_file(self):
2054 d = self.POST(self.public_url + "/foo", t="rename",
2055 from_name="bar.txt", to_name='wibble.txt')
2056 d.addCallback(lambda res:
2057 self.failIfNodeHasChild(self._foo_node, u"bar.txt"))
2058 d.addCallback(lambda res:
2059 self.failUnlessNodeHasChild(self._foo_node, u"wibble.txt"))
2060 d.addCallback(lambda res: self.GET(self.public_url + "/foo/wibble.txt"))
2061 d.addCallback(self.failUnlessIsBarDotTxt)
2062 d.addCallback(lambda res: self.GET(self.public_url + "/foo/wibble.txt?t=json"))
2063 d.addCallback(self.failUnlessIsBarJSON)
2066 def test_POST_rename_file_redundant(self):
2067 d = self.POST(self.public_url + "/foo", t="rename",
2068 from_name="bar.txt", to_name='bar.txt')
2069 d.addCallback(lambda res:
2070 self.failUnlessNodeHasChild(self._foo_node, u"bar.txt"))
2071 d.addCallback(lambda res: self.GET(self.public_url + "/foo/bar.txt"))
2072 d.addCallback(self.failUnlessIsBarDotTxt)
2073 d.addCallback(lambda res: self.GET(self.public_url + "/foo/bar.txt?t=json"))
2074 d.addCallback(self.failUnlessIsBarJSON)
2077 def test_POST_rename_file_replace(self):
2078 # rename a file and replace a directory with it
2079 d = self.POST(self.public_url + "/foo", t="rename",
2080 from_name="bar.txt", to_name='empty')
2081 d.addCallback(lambda res:
2082 self.failIfNodeHasChild(self._foo_node, u"bar.txt"))
2083 d.addCallback(lambda res:
2084 self.failUnlessNodeHasChild(self._foo_node, u"empty"))
2085 d.addCallback(lambda res: self.GET(self.public_url + "/foo/empty"))
2086 d.addCallback(self.failUnlessIsBarDotTxt)
2087 d.addCallback(lambda res: self.GET(self.public_url + "/foo/empty?t=json"))
2088 d.addCallback(self.failUnlessIsBarJSON)
2091 def test_POST_rename_file_no_replace_queryarg(self):
2092 # rename a file and replace a directory with it
2093 d = self.POST(self.public_url + "/foo?replace=false", t="rename",
2094 from_name="bar.txt", to_name='empty')
2095 d.addBoth(self.shouldFail, error.Error,
2096 "POST_rename_file_no_replace_queryarg",
2098 "There was already a child by that name, and you asked me "
2099 "to not replace it")
2100 d.addCallback(lambda res: self.GET(self.public_url + "/foo/empty?t=json"))
2101 d.addCallback(self.failUnlessIsEmptyJSON)
2104 def test_POST_rename_file_no_replace_field(self):
2105 # rename a file and replace a directory with it
2106 d = self.POST(self.public_url + "/foo", t="rename", replace="false",
2107 from_name="bar.txt", to_name='empty')
2108 d.addBoth(self.shouldFail, error.Error,
2109 "POST_rename_file_no_replace_field",
2111 "There was already a child by that name, and you asked me "
2112 "to not replace it")
2113 d.addCallback(lambda res: self.GET(self.public_url + "/foo/empty?t=json"))
2114 d.addCallback(self.failUnlessIsEmptyJSON)
2117 def failUnlessIsEmptyJSON(self, res):
2118 data = simplejson.loads(res)
2119 self.failUnlessEqual(data[0], "dirnode", data)
2120 self.failUnlessEqual(len(data[1]["children"]), 0)
2122 def test_POST_rename_file_slash_fail(self):
2123 d = self.POST(self.public_url + "/foo", t="rename",
2124 from_name="bar.txt", to_name='kirk/spock.txt')
2125 d.addBoth(self.shouldFail, error.Error,
2126 "test_POST_rename_file_slash_fail",
2128 "to_name= may not contain a slash",
2130 d.addCallback(lambda res:
2131 self.failUnlessNodeHasChild(self._foo_node, u"bar.txt"))
2134 def test_POST_rename_dir(self):
2135 d = self.POST(self.public_url, t="rename",
2136 from_name="foo", to_name='plunk')
2137 d.addCallback(lambda res:
2138 self.failIfNodeHasChild(self.public_root, u"foo"))
2139 d.addCallback(lambda res:
2140 self.failUnlessNodeHasChild(self.public_root, u"plunk"))
2141 d.addCallback(lambda res: self.GET(self.public_url + "/plunk?t=json"))
2142 d.addCallback(self.failUnlessIsFooJSON)
2145 def shouldRedirect(self, res, target=None, statuscode=None, which=""):
2146 """ If target is not None then the redirection has to go to target. If
2147 statuscode is not None then the redirection has to be accomplished with
2148 that HTTP status code."""
2149 if not isinstance(res, failure.Failure):
2150 to_where = (target is None) and "somewhere" or ("to " + target)
2151 self.fail("%s: we were expecting to get redirected %s, not get an"
2152 " actual page: %s" % (which, to_where, res))
2153 res.trap(error.PageRedirect)
2154 if statuscode is not None:
2155 self.failUnlessEqual(res.value.status, statuscode,
2156 "%s: not a redirect" % which)
2157 if target is not None:
2158 # the PageRedirect does not seem to capture the uri= query arg
2159 # properly, so we can't check for it.
2160 realtarget = self.webish_url + target
2161 self.failUnlessEqual(res.value.location, realtarget,
2162 "%s: wrong target" % which)
2163 return res.value.location
2165 def test_GET_URI_form(self):
2166 base = "/uri?uri=%s" % self._bar_txt_uri
2167 # this is supposed to give us a redirect to /uri/$URI, plus arguments
2168 targetbase = "/uri/%s" % urllib.quote(self._bar_txt_uri)
2170 d.addBoth(self.shouldRedirect, targetbase)
2171 d.addCallback(lambda res: self.GET(base+"&filename=bar.txt"))
2172 d.addBoth(self.shouldRedirect, targetbase+"?filename=bar.txt")
2173 d.addCallback(lambda res: self.GET(base+"&t=json"))
2174 d.addBoth(self.shouldRedirect, targetbase+"?t=json")
2175 d.addCallback(self.log, "about to get file by uri")
2176 d.addCallback(lambda res: self.GET(base, followRedirect=True))
2177 d.addCallback(self.failUnlessIsBarDotTxt)
2178 d.addCallback(self.log, "got file by uri, about to get dir by uri")
2179 d.addCallback(lambda res: self.GET("/uri?uri=%s&t=json" % self._foo_uri,
2180 followRedirect=True))
2181 d.addCallback(self.failUnlessIsFooJSON)
2182 d.addCallback(self.log, "got dir by uri")
2186 def test_GET_URI_form_bad(self):
2187 d = self.shouldFail2(error.Error, "test_GET_URI_form_bad",
2188 "400 Bad Request", "GET /uri requires uri=",
2192 def test_GET_rename_form(self):
2193 d = self.GET(self.public_url + "/foo?t=rename-form&name=bar.txt",
2194 followRedirect=True)
2196 self.failUnless('name="when_done" value="."' in res, res)
2197 self.failUnless(re.search(r'name="from_name" value="bar\.txt"', res))
2198 d.addCallback(_check)
2201 def log(self, res, msg):
2202 #print "MSG: %s RES: %s" % (msg, res)
2206 def test_GET_URI_URL(self):
2207 base = "/uri/%s" % self._bar_txt_uri
2209 d.addCallback(self.failUnlessIsBarDotTxt)
2210 d.addCallback(lambda res: self.GET(base+"?filename=bar.txt"))
2211 d.addCallback(self.failUnlessIsBarDotTxt)
2212 d.addCallback(lambda res: self.GET(base+"?filename=bar.txt&save=true"))
2213 d.addCallback(self.failUnlessIsBarDotTxt)
2216 def test_GET_URI_URL_dir(self):
2217 base = "/uri/%s?t=json" % self._foo_uri
2219 d.addCallback(self.failUnlessIsFooJSON)
2222 def test_GET_URI_URL_missing(self):
2223 base = "/uri/%s" % self._bad_file_uri
2224 d = self.shouldHTTPError("test_GET_URI_URL_missing",
2225 http.GONE, None, "NotEnoughSharesError",
2227 # TODO: how can we exercise both sides of WebDownloadTarget.fail
2228 # here? we must arrange for a download to fail after target.open()
2229 # has been called, and then inspect the response to see that it is
2230 # shorter than we expected.
2233 def test_PUT_DIRURL_uri(self):
2234 d = self.s.create_empty_dirnode()
2236 new_uri = dn.get_uri()
2237 # replace /foo with a new (empty) directory
2238 d = self.PUT(self.public_url + "/foo?t=uri", new_uri)
2239 d.addCallback(lambda res:
2240 self.failUnlessEqual(res.strip(), new_uri))
2241 d.addCallback(lambda res:
2242 self.failUnlessChildURIIs(self.public_root,
2246 d.addCallback(_made_dir)
2249 def test_PUT_DIRURL_uri_noreplace(self):
2250 d = self.s.create_empty_dirnode()
2252 new_uri = dn.get_uri()
2253 # replace /foo with a new (empty) directory, but ask that
2254 # replace=false, so it should fail
2255 d = self.shouldFail2(error.Error, "test_PUT_DIRURL_uri_noreplace",
2256 "409 Conflict", "There was already a child by that name, and you asked me to not replace it",
2258 self.public_url + "/foo?t=uri&replace=false",
2260 d.addCallback(lambda res:
2261 self.failUnlessChildURIIs(self.public_root,
2265 d.addCallback(_made_dir)
2268 def test_PUT_DIRURL_bad_t(self):
2269 d = self.shouldFail2(error.Error, "test_PUT_DIRURL_bad_t",
2270 "400 Bad Request", "PUT to a directory",
2271 self.PUT, self.public_url + "/foo?t=BOGUS", "")
2272 d.addCallback(lambda res:
2273 self.failUnlessChildURIIs(self.public_root,
2278 def test_PUT_NEWFILEURL_uri(self):
2279 contents, n, new_uri = self.makefile(8)
2280 d = self.PUT(self.public_url + "/foo/new.txt?t=uri", new_uri)
2281 d.addCallback(lambda res: self.failUnlessEqual(res.strip(), new_uri))
2282 d.addCallback(lambda res:
2283 self.failUnlessChildContentsAre(self._foo_node, u"new.txt",
2287 def test_PUT_NEWFILEURL_uri_replace(self):
2288 contents, n, new_uri = self.makefile(8)
2289 d = self.PUT(self.public_url + "/foo/bar.txt?t=uri", new_uri)
2290 d.addCallback(lambda res: self.failUnlessEqual(res.strip(), new_uri))
2291 d.addCallback(lambda res:
2292 self.failUnlessChildContentsAre(self._foo_node, u"bar.txt",
2296 def test_PUT_NEWFILEURL_uri_no_replace(self):
2297 contents, n, new_uri = self.makefile(8)
2298 d = self.PUT(self.public_url + "/foo/bar.txt?t=uri&replace=false", new_uri)
2299 d.addBoth(self.shouldFail, error.Error, "PUT_NEWFILEURL_uri_no_replace",
2301 "There was already a child by that name, and you asked me "
2302 "to not replace it")
2305 def test_PUT_NEWFILE_URI(self):
2306 file_contents = "New file contents here\n"
2307 d = self.PUT("/uri", file_contents)
2309 assert isinstance(uri, str), uri
2310 self.failUnless(uri in FakeCHKFileNode.all_contents)
2311 self.failUnlessEqual(FakeCHKFileNode.all_contents[uri],
2313 return self.GET("/uri/%s" % uri)
2314 d.addCallback(_check)
2316 self.failUnlessEqual(res, file_contents)
2317 d.addCallback(_check2)
2320 def test_PUT_NEWFILE_URI_not_mutable(self):
2321 file_contents = "New file contents here\n"
2322 d = self.PUT("/uri?mutable=false", file_contents)
2324 assert isinstance(uri, str), uri
2325 self.failUnless(uri in FakeCHKFileNode.all_contents)
2326 self.failUnlessEqual(FakeCHKFileNode.all_contents[uri],
2328 return self.GET("/uri/%s" % uri)
2329 d.addCallback(_check)
2331 self.failUnlessEqual(res, file_contents)
2332 d.addCallback(_check2)
2335 def test_PUT_NEWFILE_URI_only_PUT(self):
2336 d = self.PUT("/uri?t=bogus", "")
2337 d.addBoth(self.shouldFail, error.Error,
2338 "PUT_NEWFILE_URI_only_PUT",
2340 "/uri accepts only PUT, PUT?t=mkdir, POST?t=upload, and POST?t=mkdir")
2343 def test_PUT_NEWFILE_URI_mutable(self):
2344 file_contents = "New file contents here\n"
2345 d = self.PUT("/uri?mutable=true", file_contents)
2346 def _check_mutable(uri):
2349 self.failUnless(IMutableFileURI.providedBy(u))
2350 self.failUnless(u.storage_index in FakeMutableFileNode.all_contents)
2351 n = self.s.create_node_from_uri(uri)
2352 return n.download_best_version()
2353 d.addCallback(_check_mutable)
2354 def _check2_mutable(data):
2355 self.failUnlessEqual(data, file_contents)
2356 d.addCallback(_check2_mutable)
2360 self.failUnless(uri.to_string() in FakeCHKFileNode.all_contents)
2361 self.failUnlessEqual(FakeCHKFileNode.all_contents[uri.to_string()],
2363 return self.GET("/uri/%s" % uri)
2364 d.addCallback(_check)
2366 self.failUnlessEqual(res, file_contents)
2367 d.addCallback(_check2)
2370 def test_PUT_mkdir(self):
2371 d = self.PUT("/uri?t=mkdir", "")
2373 n = self.s.create_node_from_uri(uri.strip())
2374 d2 = self.failUnlessNodeKeysAre(n, [])
2375 d2.addCallback(lambda res:
2376 self.GET("/uri/%s?t=json" % uri))
2378 d.addCallback(_check)
2379 d.addCallback(self.failUnlessIsEmptyJSON)
2382 def test_POST_check(self):
2383 d = self.POST(self.public_url + "/foo", t="check", name="bar.txt")
2385 # this returns a string form of the results, which are probably
2386 # None since we're using fake filenodes.
2387 # TODO: verify that the check actually happened, by changing
2388 # FakeCHKFileNode to count how many times .check() has been
2391 d.addCallback(_done)
2394 def test_bad_method(self):
2395 url = self.webish_url + self.public_url + "/foo/bar.txt"
2396 d = self.shouldHTTPError("test_bad_method",
2397 501, "Not Implemented",
2398 "I don't know how to treat a BOGUS request.",
2399 client.getPage, url, method="BOGUS")
2402 def test_short_url(self):
2403 url = self.webish_url + "/uri"
2404 d = self.shouldHTTPError("test_short_url", 501, "Not Implemented",
2405 "I don't know how to treat a DELETE request.",
2406 client.getPage, url, method="DELETE")
2409 def test_ophandle_bad(self):
2410 url = self.webish_url + "/operations/bogus?t=status"
2411 d = self.shouldHTTPError("test_ophandle_bad", 404, "404 Not Found",
2412 "unknown/expired handle 'bogus'",
2413 client.getPage, url)
2416 def test_ophandle_cancel(self):
2417 d = self.POST(self.public_url + "/foo/?t=start-manifest&ophandle=128",
2418 followRedirect=True)
2419 d.addCallback(lambda ignored:
2420 self.GET("/operations/128?t=status&output=JSON"))
2422 data = simplejson.loads(res)
2423 self.failUnless("finished" in data, res)
2424 monitor = self.ws.root.child_operations.handles["128"][0]
2425 d = self.POST("/operations/128?t=cancel&output=JSON")
2427 data = simplejson.loads(res)
2428 self.failUnless("finished" in data, res)
2429 # t=cancel causes the handle to be forgotten
2430 self.failUnless(monitor.is_cancelled())
2431 d.addCallback(_check2)
2433 d.addCallback(_check1)
2434 d.addCallback(lambda ignored:
2435 self.shouldHTTPError("test_ophandle_cancel",
2436 404, "404 Not Found",
2437 "unknown/expired handle '128'",
2439 "/operations/128?t=status&output=JSON"))
2442 def test_ophandle_retainfor(self):
2443 d = self.POST(self.public_url + "/foo/?t=start-manifest&ophandle=129&retain-for=60",
2444 followRedirect=True)
2445 d.addCallback(lambda ignored:
2446 self.GET("/operations/129?t=status&output=JSON&retain-for=0"))
2448 data = simplejson.loads(res)
2449 self.failUnless("finished" in data, res)
2450 d.addCallback(_check1)
2451 # the retain-for=0 will cause the handle to be expired very soon
2452 d.addCallback(self.stall, 2.0)
2453 d.addCallback(lambda ignored:
2454 self.shouldHTTPError("test_ophandle_retainfor",
2455 404, "404 Not Found",
2456 "unknown/expired handle '129'",
2458 "/operations/129?t=status&output=JSON"))
2461 def test_ophandle_release_after_complete(self):
2462 d = self.POST(self.public_url + "/foo/?t=start-manifest&ophandle=130",
2463 followRedirect=True)
2464 d.addCallback(self.wait_for_operation, "130")
2465 d.addCallback(lambda ignored:
2466 self.GET("/operations/130?t=status&output=JSON&release-after-complete=true"))
2467 # the release-after-complete=true will cause the handle to be expired
2468 d.addCallback(lambda ignored:
2469 self.shouldHTTPError("test_ophandle_release_after_complete",
2470 404, "404 Not Found",
2471 "unknown/expired handle '130'",
2473 "/operations/130?t=status&output=JSON"))
2476 def test_incident(self):
2477 d = self.POST("/report_incident", details="eek")
2479 self.failUnless("Thank you for your report!" in res, res)
2480 d.addCallback(_done)
2483 def test_static(self):
2484 webdir = os.path.join(self.staticdir, "subdir")
2485 fileutil.make_dirs(webdir)
2486 f = open(os.path.join(webdir, "hello.txt"), "wb")
2490 d = self.GET("/static/subdir/hello.txt")
2492 self.failUnlessEqual(res, "hello")
2493 d.addCallback(_check)
2497 class Util(unittest.TestCase):
2498 def test_abbreviate_time(self):
2499 self.failUnlessEqual(common.abbreviate_time(None), "")
2500 self.failUnlessEqual(common.abbreviate_time(1.234), "1.23s")
2501 self.failUnlessEqual(common.abbreviate_time(0.123), "123ms")
2502 self.failUnlessEqual(common.abbreviate_time(0.00123), "1.2ms")
2503 self.failUnlessEqual(common.abbreviate_time(0.000123), "123us")
2505 def test_abbreviate_rate(self):
2506 self.failUnlessEqual(common.abbreviate_rate(None), "")
2507 self.failUnlessEqual(common.abbreviate_rate(1234000), "1.23MBps")
2508 self.failUnlessEqual(common.abbreviate_rate(12340), "12.3kBps")
2509 self.failUnlessEqual(common.abbreviate_rate(123), "123Bps")
2511 def test_abbreviate_size(self):
2512 self.failUnlessEqual(common.abbreviate_size(None), "")
2513 self.failUnlessEqual(common.abbreviate_size(1.23*1000*1000*1000), "1.23GB")
2514 self.failUnlessEqual(common.abbreviate_size(1.23*1000*1000), "1.23MB")
2515 self.failUnlessEqual(common.abbreviate_size(1230), "1.2kB")
2516 self.failUnlessEqual(common.abbreviate_size(123), "123B")
2518 def test_plural(self):
2520 return "%d second%s" % (s, status.plural(s))
2521 self.failUnlessEqual(convert(0), "0 seconds")
2522 self.failUnlessEqual(convert(1), "1 second")
2523 self.failUnlessEqual(convert(2), "2 seconds")
2525 return "has share%s: %s" % (status.plural(s), ",".join(s))
2526 self.failUnlessEqual(convert2([]), "has shares: ")
2527 self.failUnlessEqual(convert2(["1"]), "has share: 1")
2528 self.failUnlessEqual(convert2(["1","2"]), "has shares: 1,2")
2531 class Grid(GridTestMixin, WebErrorMixin, unittest.TestCase, ShouldFailMixin):
2533 def CHECK(self, ign, which, args, clientnum=0):
2534 fileurl = self.fileurls[which]
2535 url = fileurl + "?" + args
2536 return self.GET(url, method="POST", clientnum=clientnum)
2538 def test_filecheck(self):
2539 self.basedir = "web/Grid/filecheck"
2541 c0 = self.g.clients[0]
2544 d = c0.upload(upload.Data(DATA, convergence=""))
2545 def _stash_uri(ur, which):
2546 self.uris[which] = ur.uri
2547 d.addCallback(_stash_uri, "good")
2548 d.addCallback(lambda ign:
2549 c0.upload(upload.Data(DATA+"1", convergence="")))
2550 d.addCallback(_stash_uri, "sick")
2551 d.addCallback(lambda ign:
2552 c0.upload(upload.Data(DATA+"2", convergence="")))
2553 d.addCallback(_stash_uri, "dead")
2554 def _stash_mutable_uri(n, which):
2555 self.uris[which] = n.get_uri()
2556 assert isinstance(self.uris[which], str)
2557 d.addCallback(lambda ign: c0.create_mutable_file(DATA+"3"))
2558 d.addCallback(_stash_mutable_uri, "corrupt")
2559 d.addCallback(lambda ign:
2560 c0.upload(upload.Data("literal", convergence="")))
2561 d.addCallback(_stash_uri, "small")
2563 def _compute_fileurls(ignored):
2565 for which in self.uris:
2566 self.fileurls[which] = "uri/" + urllib.quote(self.uris[which])
2567 d.addCallback(_compute_fileurls)
2569 def _clobber_shares(ignored):
2570 good_shares = self.find_shares(self.uris["good"])
2571 self.failUnlessEqual(len(good_shares), 10)
2572 sick_shares = self.find_shares(self.uris["sick"])
2573 os.unlink(sick_shares[0][2])
2574 dead_shares = self.find_shares(self.uris["dead"])
2575 for i in range(1, 10):
2576 os.unlink(dead_shares[i][2])
2577 c_shares = self.find_shares(self.uris["corrupt"])
2578 cso = CorruptShareOptions()
2579 cso.stdout = StringIO()
2580 cso.parseOptions([c_shares[0][2]])
2582 d.addCallback(_clobber_shares)
2584 d.addCallback(self.CHECK, "good", "t=check")
2585 def _got_html_good(res):
2586 self.failUnless("Healthy" in res, res)
2587 self.failIf("Not Healthy" in res, res)
2588 d.addCallback(_got_html_good)
2589 d.addCallback(self.CHECK, "good", "t=check&return_to=somewhere")
2590 def _got_html_good_return_to(res):
2591 self.failUnless("Healthy" in res, res)
2592 self.failIf("Not Healthy" in res, res)
2593 self.failUnless('<a href="somewhere">Return to file'
2595 d.addCallback(_got_html_good_return_to)
2596 d.addCallback(self.CHECK, "good", "t=check&output=json")
2597 def _got_json_good(res):
2598 r = simplejson.loads(res)
2599 self.failUnlessEqual(r["summary"], "Healthy")
2600 self.failUnless(r["results"]["healthy"])
2601 self.failIf(r["results"]["needs-rebalancing"])
2602 self.failUnless(r["results"]["recoverable"])
2603 d.addCallback(_got_json_good)
2605 d.addCallback(self.CHECK, "small", "t=check")
2606 def _got_html_small(res):
2607 self.failUnless("Literal files are always healthy" in res, res)
2608 self.failIf("Not Healthy" in res, res)
2609 d.addCallback(_got_html_small)
2610 d.addCallback(self.CHECK, "small", "t=check&return_to=somewhere")
2611 def _got_html_small_return_to(res):
2612 self.failUnless("Literal files are always healthy" in res, res)
2613 self.failIf("Not Healthy" in res, res)
2614 self.failUnless('<a href="somewhere">Return to file'
2616 d.addCallback(_got_html_small_return_to)
2617 d.addCallback(self.CHECK, "small", "t=check&output=json")
2618 def _got_json_small(res):
2619 r = simplejson.loads(res)
2620 self.failUnlessEqual(r["storage-index"], "")
2621 self.failUnless(r["results"]["healthy"])
2622 d.addCallback(_got_json_small)
2624 d.addCallback(self.CHECK, "sick", "t=check")
2625 def _got_html_sick(res):
2626 self.failUnless("Not Healthy" in res, res)
2627 d.addCallback(_got_html_sick)
2628 d.addCallback(self.CHECK, "sick", "t=check&output=json")
2629 def _got_json_sick(res):
2630 r = simplejson.loads(res)
2631 self.failUnlessEqual(r["summary"],
2632 "Not Healthy: 9 shares (enc 3-of-10)")
2633 self.failIf(r["results"]["healthy"])
2634 self.failIf(r["results"]["needs-rebalancing"])
2635 self.failUnless(r["results"]["recoverable"])
2636 d.addCallback(_got_json_sick)
2638 d.addCallback(self.CHECK, "dead", "t=check")
2639 def _got_html_dead(res):
2640 self.failUnless("Not Healthy" in res, res)
2641 d.addCallback(_got_html_dead)
2642 d.addCallback(self.CHECK, "dead", "t=check&output=json")
2643 def _got_json_dead(res):
2644 r = simplejson.loads(res)
2645 self.failUnlessEqual(r["summary"],
2646 "Not Healthy: 1 shares (enc 3-of-10)")
2647 self.failIf(r["results"]["healthy"])
2648 self.failIf(r["results"]["needs-rebalancing"])
2649 self.failIf(r["results"]["recoverable"])
2650 d.addCallback(_got_json_dead)
2652 d.addCallback(self.CHECK, "corrupt", "t=check&verify=true")
2653 def _got_html_corrupt(res):
2654 self.failUnless("Not Healthy! : Unhealthy" in res, res)
2655 d.addCallback(_got_html_corrupt)
2656 d.addCallback(self.CHECK, "corrupt", "t=check&verify=true&output=json")
2657 def _got_json_corrupt(res):
2658 r = simplejson.loads(res)
2659 self.failUnless("Unhealthy: 9 shares (enc 3-of-10)" in r["summary"],
2661 self.failIf(r["results"]["healthy"])
2662 self.failUnless(r["results"]["recoverable"])
2663 self.failUnlessEqual(r["results"]["count-shares-good"], 9)
2664 self.failUnlessEqual(r["results"]["count-corrupt-shares"], 1)
2665 d.addCallback(_got_json_corrupt)
2667 d.addErrback(self.explain_web_error)
2670 def test_repair_html(self):
2671 self.basedir = "web/Grid/repair_html"
2673 c0 = self.g.clients[0]
2676 d = c0.upload(upload.Data(DATA, convergence=""))
2677 def _stash_uri(ur, which):
2678 self.uris[which] = ur.uri
2679 d.addCallback(_stash_uri, "good")
2680 d.addCallback(lambda ign:
2681 c0.upload(upload.Data(DATA+"1", convergence="")))
2682 d.addCallback(_stash_uri, "sick")
2683 d.addCallback(lambda ign:
2684 c0.upload(upload.Data(DATA+"2", convergence="")))
2685 d.addCallback(_stash_uri, "dead")
2686 def _stash_mutable_uri(n, which):
2687 self.uris[which] = n.get_uri()
2688 assert isinstance(self.uris[which], str)
2689 d.addCallback(lambda ign: c0.create_mutable_file(DATA+"3"))
2690 d.addCallback(_stash_mutable_uri, "corrupt")
2692 def _compute_fileurls(ignored):
2694 for which in self.uris:
2695 self.fileurls[which] = "uri/" + urllib.quote(self.uris[which])
2696 d.addCallback(_compute_fileurls)
2698 def _clobber_shares(ignored):
2699 good_shares = self.find_shares(self.uris["good"])
2700 self.failUnlessEqual(len(good_shares), 10)
2701 sick_shares = self.find_shares(self.uris["sick"])
2702 os.unlink(sick_shares[0][2])
2703 dead_shares = self.find_shares(self.uris["dead"])
2704 for i in range(1, 10):
2705 os.unlink(dead_shares[i][2])
2706 c_shares = self.find_shares(self.uris["corrupt"])
2707 cso = CorruptShareOptions()
2708 cso.stdout = StringIO()
2709 cso.parseOptions([c_shares[0][2]])
2711 d.addCallback(_clobber_shares)
2713 d.addCallback(self.CHECK, "good", "t=check&repair=true")
2714 def _got_html_good(res):
2715 self.failUnless("Healthy" in res, res)
2716 self.failIf("Not Healthy" in res, res)
2717 self.failUnless("No repair necessary" in res, res)
2718 d.addCallback(_got_html_good)
2720 d.addCallback(self.CHECK, "sick", "t=check&repair=true")
2721 def _got_html_sick(res):
2722 self.failUnless("Healthy : healthy" in res, res)
2723 self.failIf("Not Healthy" in res, res)
2724 self.failUnless("Repair successful" in res, res)
2725 d.addCallback(_got_html_sick)
2727 # repair of a dead file will fail, of course, but it isn't yet
2728 # clear how this should be reported. Right now it shows up as
2731 #d.addCallback(self.CHECK, "dead", "t=check&repair=true")
2732 #def _got_html_dead(res):
2734 # self.failUnless("Healthy : healthy" in res, res)
2735 # self.failIf("Not Healthy" in res, res)
2736 # self.failUnless("No repair necessary" in res, res)
2737 #d.addCallback(_got_html_dead)
2739 d.addCallback(self.CHECK, "corrupt", "t=check&verify=true&repair=true")
2740 def _got_html_corrupt(res):
2741 self.failUnless("Healthy : Healthy" in res, res)
2742 self.failIf("Not Healthy" in res, res)
2743 self.failUnless("Repair successful" in res, res)
2744 d.addCallback(_got_html_corrupt)
2746 d.addErrback(self.explain_web_error)
2749 def test_repair_json(self):
2750 self.basedir = "web/Grid/repair_json"
2752 c0 = self.g.clients[0]
2755 d = c0.upload(upload.Data(DATA+"1", convergence=""))
2756 def _stash_uri(ur, which):
2757 self.uris[which] = ur.uri
2758 d.addCallback(_stash_uri, "sick")
2760 def _compute_fileurls(ignored):
2762 for which in self.uris:
2763 self.fileurls[which] = "uri/" + urllib.quote(self.uris[which])
2764 d.addCallback(_compute_fileurls)
2766 def _clobber_shares(ignored):
2767 sick_shares = self.find_shares(self.uris["sick"])
2768 os.unlink(sick_shares[0][2])
2769 d.addCallback(_clobber_shares)
2771 d.addCallback(self.CHECK, "sick", "t=check&repair=true&output=json")
2772 def _got_json_sick(res):
2773 r = simplejson.loads(res)
2774 self.failUnlessEqual(r["repair-attempted"], True)
2775 self.failUnlessEqual(r["repair-successful"], True)
2776 self.failUnlessEqual(r["pre-repair-results"]["summary"],
2777 "Not Healthy: 9 shares (enc 3-of-10)")
2778 self.failIf(r["pre-repair-results"]["results"]["healthy"])
2779 self.failUnlessEqual(r["post-repair-results"]["summary"], "healthy")
2780 self.failUnless(r["post-repair-results"]["results"]["healthy"])
2781 d.addCallback(_got_json_sick)
2783 d.addErrback(self.explain_web_error)
2786 def test_unknown(self):
2787 self.basedir = "web/Grid/unknown"
2789 c0 = self.g.clients[0]
2793 future_writecap = "x-tahoe-crazy://I_am_from_the_future."
2794 future_readcap = "x-tahoe-crazy-readonly://I_am_from_the_future."
2795 # the future cap format may contain slashes, which must be tolerated
2796 expected_info_url = "uri/%s?t=info" % urllib.quote(future_writecap,
2798 future_node = UnknownNode(future_writecap, future_readcap)
2800 d = c0.create_empty_dirnode()
2801 def _stash_root_and_create_file(n):
2803 self.rooturl = "uri/" + urllib.quote(n.get_uri()) + "/"
2804 self.rourl = "uri/" + urllib.quote(n.get_readonly_uri()) + "/"
2805 return self.rootnode.set_node(u"future", future_node)
2806 d.addCallback(_stash_root_and_create_file)
2807 # make sure directory listing tolerates unknown nodes
2808 d.addCallback(lambda ign: self.GET(self.rooturl))
2809 def _check_html(res):
2810 self.failUnlessIn("<td>future</td>", res)
2811 # find the More Info link for "future", should be relative
2812 mo = re.search(r'<a href="([^"]+)">More Info</a>', res)
2813 info_url = mo.group(1)
2814 self.failUnlessEqual(info_url, "future?t=info")
2816 d.addCallback(_check_html)
2817 d.addCallback(lambda ign: self.GET(self.rooturl+"?t=json"))
2818 def _check_json(res, expect_writecap):
2819 data = simplejson.loads(res)
2820 self.failUnlessEqual(data[0], "dirnode")
2821 f = data[1]["children"]["future"]
2822 self.failUnlessEqual(f[0], "unknown")
2824 self.failUnlessEqual(f[1]["rw_uri"], future_writecap)
2826 self.failIfIn("rw_uri", f[1])
2827 self.failUnlessEqual(f[1]["ro_uri"], future_readcap)
2828 self.failUnless("metadata" in f[1])
2829 d.addCallback(_check_json, expect_writecap=True)
2830 d.addCallback(lambda ign: self.GET(expected_info_url))
2831 def _check_info(res, expect_readcap):
2832 self.failUnlessIn("Object Type: <span>unknown</span>", res)
2833 self.failUnlessIn(future_writecap, res)
2835 self.failUnlessIn(future_readcap, res)
2836 self.failIfIn("Raw data as", res)
2837 self.failIfIn("Directory writecap", res)
2838 self.failIfIn("Checker Operations", res)
2839 self.failIfIn("Mutable File Operations", res)
2840 self.failIfIn("Directory Operations", res)
2841 d.addCallback(_check_info, expect_readcap=False)
2842 d.addCallback(lambda ign: self.GET(self.rooturl+"future?t=info"))
2843 d.addCallback(_check_info, expect_readcap=True)
2845 # and make sure that a read-only version of the directory can be
2846 # rendered too. This version will not have future_writecap
2847 d.addCallback(lambda ign: self.GET(self.rourl))
2848 d.addCallback(_check_html)
2849 d.addCallback(lambda ign: self.GET(self.rourl+"?t=json"))
2850 d.addCallback(_check_json, expect_writecap=False)
2853 def test_deep_check(self):
2854 self.basedir = "web/Grid/deep_check"
2856 c0 = self.g.clients[0]
2860 d = c0.create_empty_dirnode()
2861 def _stash_root_and_create_file(n):
2863 self.fileurls["root"] = "uri/" + urllib.quote(n.get_uri()) + "/"
2864 return n.add_file(u"good", upload.Data(DATA, convergence=""))
2865 d.addCallback(_stash_root_and_create_file)
2866 def _stash_uri(fn, which):
2867 self.uris[which] = fn.get_uri()
2869 d.addCallback(_stash_uri, "good")
2870 d.addCallback(lambda ign:
2871 self.rootnode.add_file(u"small",
2872 upload.Data("literal",
2874 d.addCallback(_stash_uri, "small")
2875 d.addCallback(lambda ign:
2876 self.rootnode.add_file(u"sick",
2877 upload.Data(DATA+"1",
2879 d.addCallback(_stash_uri, "sick")
2881 # this tests that deep-check and stream-manifest will ignore
2882 # UnknownNode instances. Hopefully this will also cover deep-stats.
2883 future_writecap = "x-tahoe-crazy://I_am_from_the_future."
2884 future_readcap = "x-tahoe-crazy-readonly://I_am_from_the_future."
2885 future_node = UnknownNode(future_writecap, future_readcap)
2886 d.addCallback(lambda ign: self.rootnode.set_node(u"future",future_node))
2888 def _clobber_shares(ignored):
2889 self.delete_shares_numbered(self.uris["sick"], [0,1])
2890 d.addCallback(_clobber_shares)
2898 d.addCallback(self.CHECK, "root", "t=stream-deep-check")
2901 units = [simplejson.loads(line)
2902 for line in res.splitlines()
2905 print "response is:", res
2906 print "undecodeable line was '%s'" % line
2908 self.failUnlessEqual(len(units), 5+1)
2909 # should be parent-first
2911 self.failUnlessEqual(u0["path"], [])
2912 self.failUnlessEqual(u0["type"], "directory")
2913 self.failUnlessEqual(u0["cap"], self.rootnode.get_uri())
2914 u0cr = u0["check-results"]
2915 self.failUnlessEqual(u0cr["results"]["count-shares-good"], 10)
2917 ugood = [u for u in units
2918 if u["type"] == "file" and u["path"] == [u"good"]][0]
2919 self.failUnlessEqual(ugood["cap"], self.uris["good"])
2920 ugoodcr = ugood["check-results"]
2921 self.failUnlessEqual(ugoodcr["results"]["count-shares-good"], 10)
2924 self.failUnlessEqual(stats["type"], "stats")
2926 self.failUnlessEqual(s["count-immutable-files"], 2)
2927 self.failUnlessEqual(s["count-literal-files"], 1)
2928 self.failUnlessEqual(s["count-directories"], 1)
2929 self.failUnlessEqual(s["count-unknown"], 1)
2930 d.addCallback(_done)
2932 d.addCallback(self.CHECK, "root", "t=stream-manifest")
2933 def _check_manifest(res):
2934 self.failUnless(res.endswith("\n"))
2935 units = [simplejson.loads(t) for t in res[:-1].split("\n")]
2936 self.failUnlessEqual(len(units), 5+1)
2937 self.failUnlessEqual(units[-1]["type"], "stats")
2939 self.failUnlessEqual(first["path"], [])
2940 self.failUnlessEqual(first["cap"], self.rootnode.get_uri())
2941 self.failUnlessEqual(first["type"], "directory")
2942 stats = units[-1]["stats"]
2943 self.failUnlessEqual(stats["count-immutable-files"], 2)
2944 self.failUnlessEqual(stats["count-literal-files"], 1)
2945 self.failUnlessEqual(stats["count-mutable-files"], 0)
2946 self.failUnlessEqual(stats["count-immutable-files"], 2)
2947 self.failUnlessEqual(stats["count-unknown"], 1)
2948 d.addCallback(_check_manifest)
2950 # now add root/subdir and root/subdir/grandchild, then make subdir
2951 # unrecoverable, then see what happens
2953 d.addCallback(lambda ign:
2954 self.rootnode.create_empty_directory(u"subdir"))
2955 d.addCallback(_stash_uri, "subdir")
2956 d.addCallback(lambda subdir_node:
2957 subdir_node.add_file(u"grandchild",
2958 upload.Data(DATA+"2",
2960 d.addCallback(_stash_uri, "grandchild")
2962 d.addCallback(lambda ign:
2963 self.delete_shares_numbered(self.uris["subdir"],
2971 # root/subdir [unrecoverable]
2972 # root/subdir/grandchild
2974 # how should a streaming-JSON API indicate fatal error?
2975 # answer: emit ERROR: instead of a JSON string
2977 d.addCallback(self.CHECK, "root", "t=stream-manifest")
2978 def _check_broken_manifest(res):
2979 lines = res.splitlines()
2981 for (i,line) in enumerate(lines)
2982 if line.startswith("ERROR:")]
2984 self.fail("no ERROR: in output: %s" % (res,))
2985 first_error = error_lines[0]
2986 error_line = lines[first_error]
2987 error_msg = lines[first_error+1:]
2988 error_msg_s = "\n".join(error_msg) + "\n"
2989 self.failUnlessIn("ERROR: UnrecoverableFileError(no recoverable versions)",
2991 self.failUnless(len(error_msg) > 2, error_msg_s) # some traceback
2992 units = [simplejson.loads(line) for line in lines[:first_error]]
2993 self.failUnlessEqual(len(units), 6) # includes subdir
2994 last_unit = units[-1]
2995 self.failUnlessEqual(last_unit["path"], ["subdir"])
2996 d.addCallback(_check_broken_manifest)
2998 d.addCallback(self.CHECK, "root", "t=stream-deep-check")
2999 def _check_broken_deepcheck(res):
3000 lines = res.splitlines()
3002 for (i,line) in enumerate(lines)
3003 if line.startswith("ERROR:")]
3005 self.fail("no ERROR: in output: %s" % (res,))
3006 first_error = error_lines[0]
3007 error_line = lines[first_error]
3008 error_msg = lines[first_error+1:]
3009 error_msg_s = "\n".join(error_msg) + "\n"
3010 self.failUnlessIn("ERROR: UnrecoverableFileError(no recoverable versions)",
3012 self.failUnless(len(error_msg) > 2, error_msg_s) # some traceback
3013 units = [simplejson.loads(line) for line in lines[:first_error]]
3014 self.failUnlessEqual(len(units), 6) # includes subdir
3015 last_unit = units[-1]
3016 self.failUnlessEqual(last_unit["path"], ["subdir"])
3017 r = last_unit["check-results"]["results"]
3018 self.failUnlessEqual(r["count-recoverable-versions"], 0)
3019 self.failUnlessEqual(r["count-shares-good"], 1)
3020 self.failUnlessEqual(r["recoverable"], False)
3021 d.addCallback(_check_broken_deepcheck)
3023 d.addErrback(self.explain_web_error)
3026 def test_deep_check_and_repair(self):
3027 self.basedir = "web/Grid/deep_check_and_repair"
3029 c0 = self.g.clients[0]
3033 d = c0.create_empty_dirnode()
3034 def _stash_root_and_create_file(n):
3036 self.fileurls["root"] = "uri/" + urllib.quote(n.get_uri()) + "/"
3037 return n.add_file(u"good", upload.Data(DATA, convergence=""))
3038 d.addCallback(_stash_root_and_create_file)
3039 def _stash_uri(fn, which):
3040 self.uris[which] = fn.get_uri()
3041 d.addCallback(_stash_uri, "good")
3042 d.addCallback(lambda ign:
3043 self.rootnode.add_file(u"small",
3044 upload.Data("literal",
3046 d.addCallback(_stash_uri, "small")
3047 d.addCallback(lambda ign:
3048 self.rootnode.add_file(u"sick",
3049 upload.Data(DATA+"1",
3051 d.addCallback(_stash_uri, "sick")
3052 #d.addCallback(lambda ign:
3053 # self.rootnode.add_file(u"dead",
3054 # upload.Data(DATA+"2",
3056 #d.addCallback(_stash_uri, "dead")
3058 #d.addCallback(lambda ign: c0.create_mutable_file("mutable"))
3059 #d.addCallback(lambda fn: self.rootnode.set_node(u"corrupt", fn))
3060 #d.addCallback(_stash_uri, "corrupt")
3062 def _clobber_shares(ignored):
3063 good_shares = self.find_shares(self.uris["good"])
3064 self.failUnlessEqual(len(good_shares), 10)
3065 sick_shares = self.find_shares(self.uris["sick"])
3066 os.unlink(sick_shares[0][2])
3067 #dead_shares = self.find_shares(self.uris["dead"])
3068 #for i in range(1, 10):
3069 # os.unlink(dead_shares[i][2])
3071 #c_shares = self.find_shares(self.uris["corrupt"])
3072 #cso = CorruptShareOptions()
3073 #cso.stdout = StringIO()
3074 #cso.parseOptions([c_shares[0][2]])
3076 d.addCallback(_clobber_shares)
3079 # root/good CHK, 10 shares
3081 # root/sick CHK, 9 shares
3083 d.addCallback(self.CHECK, "root", "t=stream-deep-check&repair=true")
3085 units = [simplejson.loads(line)
3086 for line in res.splitlines()
3088 self.failUnlessEqual(len(units), 4+1)
3089 # should be parent-first
3091 self.failUnlessEqual(u0["path"], [])
3092 self.failUnlessEqual(u0["type"], "directory")
3093 self.failUnlessEqual(u0["cap"], self.rootnode.get_uri())
3094 u0crr = u0["check-and-repair-results"]
3095 self.failUnlessEqual(u0crr["repair-attempted"], False)
3096 self.failUnlessEqual(u0crr["pre-repair-results"]["results"]["count-shares-good"], 10)
3098 ugood = [u for u in units
3099 if u["type"] == "file" and u["path"] == [u"good"]][0]
3100 self.failUnlessEqual(ugood["cap"], self.uris["good"])
3101 ugoodcrr = ugood["check-and-repair-results"]
3102 self.failUnlessEqual(u0crr["repair-attempted"], False)
3103 self.failUnlessEqual(u0crr["pre-repair-results"]["results"]["count-shares-good"], 10)
3105 usick = [u for u in units
3106 if u["type"] == "file" and u["path"] == [u"sick"]][0]
3107 self.failUnlessEqual(usick["cap"], self.uris["sick"])
3108 usickcrr = usick["check-and-repair-results"]
3109 self.failUnlessEqual(usickcrr["repair-attempted"], True)
3110 self.failUnlessEqual(usickcrr["repair-successful"], True)
3111 self.failUnlessEqual(usickcrr["pre-repair-results"]["results"]["count-shares-good"], 9)
3112 self.failUnlessEqual(usickcrr["post-repair-results"]["results"]["count-shares-good"], 10)
3115 self.failUnlessEqual(stats["type"], "stats")
3117 self.failUnlessEqual(s["count-immutable-files"], 2)
3118 self.failUnlessEqual(s["count-literal-files"], 1)
3119 self.failUnlessEqual(s["count-directories"], 1)
3120 d.addCallback(_done)
3122 d.addErrback(self.explain_web_error)
3125 def _count_leases(self, ignored, which):
3126 u = self.uris[which]
3127 shares = self.find_shares(u)
3129 for shnum, serverid, fn in shares:
3130 sf = get_share_file(fn)
3131 num_leases = len(list(sf.get_leases()))
3132 lease_counts.append( (fn, num_leases) )
3135 def _assert_leasecount(self, lease_counts, expected):
3136 for (fn, num_leases) in lease_counts:
3137 if num_leases != expected:
3138 self.fail("expected %d leases, have %d, on %s" %
3139 (expected, num_leases, fn))
3141 def test_add_lease(self):
3142 self.basedir = "web/Grid/add_lease"
3143 self.set_up_grid(num_clients=2)
3144 c0 = self.g.clients[0]
3147 d = c0.upload(upload.Data(DATA, convergence=""))
3148 def _stash_uri(ur, which):
3149 self.uris[which] = ur.uri
3150 d.addCallback(_stash_uri, "one")
3151 d.addCallback(lambda ign:
3152 c0.upload(upload.Data(DATA+"1", convergence="")))
3153 d.addCallback(_stash_uri, "two")
3154 def _stash_mutable_uri(n, which):
3155 self.uris[which] = n.get_uri()
3156 assert isinstance(self.uris[which], str)
3157 d.addCallback(lambda ign: c0.create_mutable_file(DATA+"2"))
3158 d.addCallback(_stash_mutable_uri, "mutable")
3160 def _compute_fileurls(ignored):
3162 for which in self.uris:
3163 self.fileurls[which] = "uri/" + urllib.quote(self.uris[which])
3164 d.addCallback(_compute_fileurls)
3166 d.addCallback(self._count_leases, "one")
3167 d.addCallback(self._assert_leasecount, 1)
3168 d.addCallback(self._count_leases, "two")
3169 d.addCallback(self._assert_leasecount, 1)
3170 d.addCallback(self._count_leases, "mutable")
3171 d.addCallback(self._assert_leasecount, 1)
3173 d.addCallback(self.CHECK, "one", "t=check") # no add-lease
3174 def _got_html_good(res):
3175 self.failUnless("Healthy" in res, res)
3176 self.failIf("Not Healthy" in res, res)
3177 d.addCallback(_got_html_good)
3179 d.addCallback(self._count_leases, "one")
3180 d.addCallback(self._assert_leasecount, 1)
3181 d.addCallback(self._count_leases, "two")
3182 d.addCallback(self._assert_leasecount, 1)
3183 d.addCallback(self._count_leases, "mutable")
3184 d.addCallback(self._assert_leasecount, 1)
3186 # this CHECK uses the original client, which uses the same
3187 # lease-secrets, so it will just renew the original lease
3188 d.addCallback(self.CHECK, "one", "t=check&add-lease=true")
3189 d.addCallback(_got_html_good)
3191 d.addCallback(self._count_leases, "one")
3192 d.addCallback(self._assert_leasecount, 1)
3193 d.addCallback(self._count_leases, "two")
3194 d.addCallback(self._assert_leasecount, 1)
3195 d.addCallback(self._count_leases, "mutable")
3196 d.addCallback(self._assert_leasecount, 1)
3198 # this CHECK uses an alternate client, which adds a second lease
3199 d.addCallback(self.CHECK, "one", "t=check&add-lease=true", clientnum=1)
3200 d.addCallback(_got_html_good)
3202 d.addCallback(self._count_leases, "one")
3203 d.addCallback(self._assert_leasecount, 2)
3204 d.addCallback(self._count_leases, "two")
3205 d.addCallback(self._assert_leasecount, 1)
3206 d.addCallback(self._count_leases, "mutable")
3207 d.addCallback(self._assert_leasecount, 1)
3209 d.addCallback(self.CHECK, "mutable", "t=check&add-lease=true")
3210 d.addCallback(_got_html_good)
3212 d.addCallback(self._count_leases, "one")
3213 d.addCallback(self._assert_leasecount, 2)
3214 d.addCallback(self._count_leases, "two")
3215 d.addCallback(self._assert_leasecount, 1)
3216 d.addCallback(self._count_leases, "mutable")
3217 d.addCallback(self._assert_leasecount, 1)
3219 d.addCallback(self.CHECK, "mutable", "t=check&add-lease=true",
3221 d.addCallback(_got_html_good)
3223 d.addCallback(self._count_leases, "one")
3224 d.addCallback(self._assert_leasecount, 2)
3225 d.addCallback(self._count_leases, "two")
3226 d.addCallback(self._assert_leasecount, 1)
3227 d.addCallback(self._count_leases, "mutable")
3228 d.addCallback(self._assert_leasecount, 2)
3230 d.addErrback(self.explain_web_error)
3233 def test_deep_add_lease(self):
3234 self.basedir = "web/Grid/deep_add_lease"
3235 self.set_up_grid(num_clients=2)
3236 c0 = self.g.clients[0]
3240 d = c0.create_empty_dirnode()
3241 def _stash_root_and_create_file(n):
3243 self.uris["root"] = n.get_uri()
3244 self.fileurls["root"] = "uri/" + urllib.quote(n.get_uri()) + "/"
3245 return n.add_file(u"one", upload.Data(DATA, convergence=""))
3246 d.addCallback(_stash_root_and_create_file)
3247 def _stash_uri(fn, which):
3248 self.uris[which] = fn.get_uri()
3249 d.addCallback(_stash_uri, "one")
3250 d.addCallback(lambda ign:
3251 self.rootnode.add_file(u"small",
3252 upload.Data("literal",
3254 d.addCallback(_stash_uri, "small")
3256 d.addCallback(lambda ign: c0.create_mutable_file("mutable"))
3257 d.addCallback(lambda fn: self.rootnode.set_node(u"mutable", fn))
3258 d.addCallback(_stash_uri, "mutable")
3260 d.addCallback(self.CHECK, "root", "t=stream-deep-check") # no add-lease
3262 units = [simplejson.loads(line)
3263 for line in res.splitlines()
3265 # root, one, small, mutable, stats
3266 self.failUnlessEqual(len(units), 4+1)
3267 d.addCallback(_done)
3269 d.addCallback(self._count_leases, "root")
3270 d.addCallback(self._assert_leasecount, 1)
3271 d.addCallback(self._count_leases, "one")
3272 d.addCallback(self._assert_leasecount, 1)
3273 d.addCallback(self._count_leases, "mutable")
3274 d.addCallback(self._assert_leasecount, 1)
3276 d.addCallback(self.CHECK, "root", "t=stream-deep-check&add-lease=true")
3277 d.addCallback(_done)
3279 d.addCallback(self._count_leases, "root")
3280 d.addCallback(self._assert_leasecount, 1)
3281 d.addCallback(self._count_leases, "one")
3282 d.addCallback(self._assert_leasecount, 1)
3283 d.addCallback(self._count_leases, "mutable")
3284 d.addCallback(self._assert_leasecount, 1)
3286 d.addCallback(self.CHECK, "root", "t=stream-deep-check&add-lease=true",
3288 d.addCallback(_done)
3290 d.addCallback(self._count_leases, "root")
3291 d.addCallback(self._assert_leasecount, 2)
3292 d.addCallback(self._count_leases, "one")
3293 d.addCallback(self._assert_leasecount, 2)
3294 d.addCallback(self._count_leases, "mutable")
3295 d.addCallback(self._assert_leasecount, 2)
3297 d.addErrback(self.explain_web_error)
3301 def test_exceptions(self):
3302 self.basedir = "web/Grid/exceptions"
3303 self.set_up_grid(num_clients=1, num_servers=2)
3304 c0 = self.g.clients[0]
3307 d = c0.create_empty_dirnode()
3309 self.fileurls["root"] = "uri/" + urllib.quote(n.get_uri()) + "/"
3310 self.fileurls["imaginary"] = self.fileurls["root"] + "imaginary"
3312 d.addCallback(_stash_root)
3313 d.addCallback(lambda ign: c0.upload(upload.Data(DATA, convergence="")))
3315 self.fileurls["1share"] = "uri/" + urllib.quote(ur.uri)
3316 self.delete_shares_numbered(ur.uri, range(1,10))
3318 u = uri.from_string(ur.uri)
3319 u.key = testutil.flip_bit(u.key, 0)
3320 baduri = u.to_string()
3321 self.fileurls["0shares"] = "uri/" + urllib.quote(baduri)
3322 d.addCallback(_stash_bad)
3323 d.addCallback(lambda ign: c0.create_empty_dirnode())
3324 def _mangle_dirnode_1share(n):
3326 url = self.fileurls["dir-1share"] = "uri/" + urllib.quote(u) + "/"
3327 self.fileurls["dir-1share-json"] = url + "?t=json"
3328 self.delete_shares_numbered(u, range(1,10))
3329 d.addCallback(_mangle_dirnode_1share)
3330 d.addCallback(lambda ign: c0.create_empty_dirnode())
3331 def _mangle_dirnode_0share(n):
3333 url = self.fileurls["dir-0share"] = "uri/" + urllib.quote(u) + "/"
3334 self.fileurls["dir-0share-json"] = url + "?t=json"
3335 self.delete_shares_numbered(u, range(0,10))
3336 d.addCallback(_mangle_dirnode_0share)
3338 # NotEnoughSharesError should be reported sensibly, with a
3339 # text/plain explanation of the problem, and perhaps some
3340 # information on which shares *could* be found.
3342 d.addCallback(lambda ignored:
3343 self.shouldHTTPError("GET unrecoverable",
3344 410, "Gone", "NoSharesError",
3345 self.GET, self.fileurls["0shares"]))
3346 def _check_zero_shares(body):
3347 self.failIf("<html>" in body, body)
3348 body = " ".join(body.strip().split())
3349 exp = ("NoSharesError: no shares could be found. "
3350 "Zero shares usually indicates a corrupt URI, or that "
3351 "no servers were connected, but it might also indicate "
3352 "severe corruption. You should perform a filecheck on "
3353 "this object to learn more. The full error message is: "
3354 "Failed to get enough shareholders: have 0, need 3")
3355 self.failUnlessEqual(exp, body)
3356 d.addCallback(_check_zero_shares)
3359 d.addCallback(lambda ignored:
3360 self.shouldHTTPError("GET 1share",
3361 410, "Gone", "NotEnoughSharesError",
3362 self.GET, self.fileurls["1share"]))
3363 def _check_one_share(body):
3364 self.failIf("<html>" in body, body)
3365 body = " ".join(body.strip().split())
3366 exp = ("NotEnoughSharesError: This indicates that some "
3367 "servers were unavailable, or that shares have been "
3368 "lost to server departure, hard drive failure, or disk "
3369 "corruption. You should perform a filecheck on "
3370 "this object to learn more. The full error message is:"
3371 " Failed to get enough shareholders: have 1, need 3")
3372 self.failUnlessEqual(exp, body)
3373 d.addCallback(_check_one_share)
3375 d.addCallback(lambda ignored:
3376 self.shouldHTTPError("GET imaginary",
3377 404, "Not Found", None,
3378 self.GET, self.fileurls["imaginary"]))
3379 def _missing_child(body):
3380 self.failUnless("No such child: imaginary" in body, body)
3381 d.addCallback(_missing_child)
3383 d.addCallback(lambda ignored: self.GET(self.fileurls["dir-0share"]))
3384 def _check_0shares_dir_html(body):
3385 self.failUnless("<html>" in body, body)
3386 # we should see the regular page, but without the child table or
3388 body = " ".join(body.strip().split())
3389 self.failUnlessIn('href="?t=info">More info on this directory',
3391 exp = ("UnrecoverableFileError: the directory (or mutable file) "
3392 "could not be retrieved, because there were insufficient "
3393 "good shares. This might indicate that no servers were "
3394 "connected, insufficient servers were connected, the URI "
3395 "was corrupt, or that shares have been lost due to server "
3396 "departure, hard drive failure, or disk corruption. You "
3397 "should perform a filecheck on this object to learn more.")
3398 self.failUnlessIn(exp, body)
3399 self.failUnlessIn("No upload forms: directory is unreadable", body)
3400 d.addCallback(_check_0shares_dir_html)
3402 d.addCallback(lambda ignored: self.GET(self.fileurls["dir-1share"]))
3403 def _check_1shares_dir_html(body):
3404 # at some point, we'll split UnrecoverableFileError into 0-shares
3405 # and some-shares like we did for immutable files (since there
3406 # are different sorts of advice to offer in each case). For now,
3407 # they present the same way.
3408 self.failUnless("<html>" in body, body)
3409 body = " ".join(body.strip().split())
3410 self.failUnlessIn('href="?t=info">More info on this directory',
3412 exp = ("UnrecoverableFileError: the directory (or mutable file) "
3413 "could not be retrieved, because there were insufficient "
3414 "good shares. This might indicate that no servers were "
3415 "connected, insufficient servers were connected, the URI "
3416 "was corrupt, or that shares have been lost due to server "
3417 "departure, hard drive failure, or disk corruption. You "
3418 "should perform a filecheck on this object to learn more.")
3419 self.failUnlessIn(exp, body)
3420 self.failUnlessIn("No upload forms: directory is unreadable", body)
3421 d.addCallback(_check_1shares_dir_html)
3423 d.addCallback(lambda ignored:
3424 self.shouldHTTPError("GET dir-0share-json",
3425 410, "Gone", "UnrecoverableFileError",
3427 self.fileurls["dir-0share-json"]))
3428 def _check_unrecoverable_file(body):
3429 self.failIf("<html>" in body, body)
3430 body = " ".join(body.strip().split())
3431 exp = ("UnrecoverableFileError: the directory (or mutable file) "
3432 "could not be retrieved, because there were insufficient "
3433 "good shares. This might indicate that no servers were "
3434 "connected, insufficient servers were connected, the URI "
3435 "was corrupt, or that shares have been lost due to server "
3436 "departure, hard drive failure, or disk corruption. You "
3437 "should perform a filecheck on this object to learn more.")
3438 self.failUnlessEqual(exp, body)
3439 d.addCallback(_check_unrecoverable_file)
3441 d.addCallback(lambda ignored:
3442 self.shouldHTTPError("GET dir-1share-json",
3443 410, "Gone", "UnrecoverableFileError",
3445 self.fileurls["dir-1share-json"]))
3446 d.addCallback(_check_unrecoverable_file)
3448 d.addCallback(lambda ignored:
3449 self.shouldHTTPError("GET imaginary",
3450 404, "Not Found", None,
3451 self.GET, self.fileurls["imaginary"]))
3453 # attach a webapi child that throws a random error, to test how it
3455 w = c0.getServiceNamed("webish")
3456 w.root.putChild("ERRORBOOM", ErrorBoom())
3458 d.addCallback(lambda ignored:
3459 self.shouldHTTPError("GET errorboom_html",
3460 500, "Internal Server Error", None,
3461 self.GET, "ERRORBOOM"))
3462 def _internal_error_html(body):
3463 # test that a weird exception during a webapi operation with
3464 # Accept:*/* results in a text/html stack trace, while one
3465 # without that Accept: line gets us a text/plain stack trace
3466 self.failUnless("<html>" in body, "expected HTML, not '%s'" % body)
3467 d.addCallback(_internal_error_html)
3469 d.addCallback(lambda ignored:
3470 self.shouldHTTPError("GET errorboom_text",
3471 500, "Internal Server Error", None,
3472 self.GET, "ERRORBOOM",
3473 headers={"accept": ["text/plain"]}))
3474 def _internal_error_text(body):
3475 # test that a weird exception during a webapi operation with
3476 # Accept:*/* results in a text/html stack trace, while one
3477 # without that Accept: line gets us a text/plain stack trace
3478 self.failIf("<html>" in body, body)
3479 self.failUnless(body.startswith("Traceback "), body)
3480 d.addCallback(_internal_error_text)
3482 def _flush_errors(res):
3483 # Trial: please ignore the CompletelyUnhandledError in the logs
3484 self.flushLoggedErrors(CompletelyUnhandledError)
3486 d.addBoth(_flush_errors)
3490 class CompletelyUnhandledError(Exception):
3492 class ErrorBoom(rend.Page):
3493 def beforeRender(self, ctx):
3494 raise CompletelyUnhandledError("whoops")