2 import os.path, re, urllib
4 from StringIO import StringIO
5 from twisted.application import service
6 from twisted.trial import unittest
7 from twisted.internet import defer, reactor
8 from twisted.internet.task import Clock
9 from twisted.web import client, error, http
10 from twisted.python import failure, log
11 from nevow import rend
12 from allmydata import interfaces, uri, webish, dirnode
13 from allmydata.storage.shares import get_share_file
14 from allmydata.storage_client import StorageFarmBroker
15 from allmydata.immutable import upload
16 from allmydata.immutable.downloader.status import DownloadStatus
17 from allmydata.dirnode import DirectoryNode
18 from allmydata.nodemaker import NodeMaker
19 from allmydata.unknown import UnknownNode
20 from allmydata.web import status, common
21 from allmydata.scripts.debug import CorruptShareOptions, corrupt_share
22 from allmydata.util import fileutil, base32
23 from allmydata.util.consumer import download_to_data
24 from allmydata.util.netstring import split_netstring
25 from allmydata.util.encodingutil import to_str
26 from allmydata.test.common import FakeCHKFileNode, FakeMutableFileNode, \
27 create_chk_filenode, WebErrorMixin, ShouldFailMixin, make_mutable_file_uri
28 from allmydata.interfaces import IMutableFileNode
29 from allmydata.mutable import servermap, publish, retrieve
30 import allmydata.test.common_util as testutil
31 from allmydata.test.no_network import GridTestMixin
32 from allmydata.test.common_web import HTTPClientGETFactory, \
34 from allmydata.client import Client, SecretHolder
36 # create a fake uploader/downloader, and a couple of fake dirnodes, then
37 # create a webserver that works against them
39 timeout = 480 # Most of these take longer than 240 seconds on Francois's arm box.
41 unknown_rwcap = u"lafs://from_the_future_rw_\u263A".encode('utf-8')
42 unknown_rocap = u"ro.lafs://readonly_from_the_future_ro_\u263A".encode('utf-8')
43 unknown_immcap = u"imm.lafs://immutable_from_the_future_imm_\u263A".encode('utf-8')
45 class FakeStatsProvider:
47 stats = {'stats': {}, 'counters': {}}
50 class FakeNodeMaker(NodeMaker):
51 def _create_lit(self, cap):
52 return FakeCHKFileNode(cap)
53 def _create_immutable(self, cap):
54 return FakeCHKFileNode(cap)
55 def _create_mutable(self, cap):
56 return FakeMutableFileNode(None, None, None, None).init_from_cap(cap)
57 def create_mutable_file(self, contents="", keysize=None):
58 n = FakeMutableFileNode(None, None, None, None)
59 return n.create(contents)
61 class FakeUploader(service.Service):
63 def upload(self, uploadable, history=None):
64 d = uploadable.get_size()
65 d.addCallback(lambda size: uploadable.read(size))
68 n = create_chk_filenode(data)
69 results = upload.UploadResults()
70 results.uri = n.get_uri()
72 d.addCallback(_got_data)
74 def get_helper_info(self):
78 _all_upload_status = [upload.UploadStatus()]
79 _all_download_status = [DownloadStatus("storage_index", 1234)]
80 _all_mapupdate_statuses = [servermap.UpdateStatus()]
81 _all_publish_statuses = [publish.PublishStatus()]
82 _all_retrieve_statuses = [retrieve.RetrieveStatus()]
84 def list_all_upload_statuses(self):
85 return self._all_upload_status
86 def list_all_download_statuses(self):
87 return self._all_download_status
88 def list_all_mapupdate_statuses(self):
89 return self._all_mapupdate_statuses
90 def list_all_publish_statuses(self):
91 return self._all_publish_statuses
92 def list_all_retrieve_statuses(self):
93 return self._all_retrieve_statuses
94 def list_all_helper_statuses(self):
97 class FakeClient(Client):
99 # don't upcall to Client.__init__, since we only want to initialize a
101 service.MultiService.__init__(self)
102 self.nodeid = "fake_nodeid"
103 self.nickname = "fake_nickname"
104 self.introducer_furl = "None"
105 self.stats_provider = FakeStatsProvider()
106 self._secret_holder = SecretHolder("lease secret", "convergence secret")
108 self.convergence = "some random string"
109 self.storage_broker = StorageFarmBroker(None, permute_peers=True)
110 self.introducer_client = None
111 self.history = FakeHistory()
112 self.uploader = FakeUploader()
113 self.uploader.setServiceParent(self)
114 self.nodemaker = FakeNodeMaker(None, self._secret_holder, None,
118 def startService(self):
119 return service.MultiService.startService(self)
120 def stopService(self):
121 return service.MultiService.stopService(self)
123 MUTABLE_SIZELIMIT = FakeMutableFileNode.MUTABLE_SIZELIMIT
125 class WebMixin(object):
127 self.s = FakeClient()
128 self.s.startService()
129 self.staticdir = self.mktemp()
131 self.ws = webish.WebishServer(self.s, "0", staticdir=self.staticdir,
133 self.ws.setServiceParent(self.s)
134 self.webish_port = port = self.ws.listener._port.getHost().port
135 self.webish_url = "http://localhost:%d" % port
137 l = [ self.s.create_dirnode() for x in range(6) ]
138 d = defer.DeferredList(l)
140 self.public_root = res[0][1]
141 assert interfaces.IDirectoryNode.providedBy(self.public_root), res
142 self.public_url = "/uri/" + self.public_root.get_uri()
143 self.private_root = res[1][1]
147 self._foo_uri = foo.get_uri()
148 self._foo_readonly_uri = foo.get_readonly_uri()
149 self._foo_verifycap = foo.get_verify_cap().to_string()
150 # NOTE: we ignore the deferred on all set_uri() calls, because we
151 # know the fake nodes do these synchronously
152 self.public_root.set_uri(u"foo", foo.get_uri(),
153 foo.get_readonly_uri())
155 self.BAR_CONTENTS, n, self._bar_txt_uri = self.makefile(0)
156 foo.set_uri(u"bar.txt", self._bar_txt_uri, self._bar_txt_uri)
157 self._bar_txt_verifycap = n.get_verify_cap().to_string()
159 foo.set_uri(u"empty", res[3][1].get_uri(),
160 res[3][1].get_readonly_uri())
161 sub_uri = res[4][1].get_uri()
162 self._sub_uri = sub_uri
163 foo.set_uri(u"sub", sub_uri, sub_uri)
164 sub = self.s.create_node_from_uri(sub_uri)
166 _ign, n, blocking_uri = self.makefile(1)
167 foo.set_uri(u"blockingfile", blocking_uri, blocking_uri)
169 unicode_filename = u"n\u00fc.txt" # n u-umlaut . t x t
170 # ok, unicode calls it LATIN SMALL LETTER U WITH DIAERESIS but I
171 # still think of it as an umlaut
172 foo.set_uri(unicode_filename, self._bar_txt_uri, self._bar_txt_uri)
174 _ign, n, baz_file = self.makefile(2)
175 self._baz_file_uri = baz_file
176 sub.set_uri(u"baz.txt", baz_file, baz_file)
178 _ign, n, self._bad_file_uri = self.makefile(3)
179 # this uri should not be downloadable
180 del FakeCHKFileNode.all_contents[self._bad_file_uri]
183 self.public_root.set_uri(u"reedownlee", rodir.get_readonly_uri(),
184 rodir.get_readonly_uri())
185 rodir.set_uri(u"nor", baz_file, baz_file)
190 # public/foo/blockingfile
193 # public/foo/sub/baz.txt
195 # public/reedownlee/nor
196 self.NEWFILE_CONTENTS = "newfile contents\n"
198 return foo.get_metadata_for(u"bar.txt")
200 def _got_metadata(metadata):
201 self._bar_txt_metadata = metadata
202 d.addCallback(_got_metadata)
205 def makefile(self, number):
206 contents = "contents of file %s\n" % number
207 n = create_chk_filenode(contents)
208 return contents, n, n.get_uri()
211 return self.s.stopService()
213 def failUnlessIsBarDotTxt(self, res):
214 self.failUnlessReallyEqual(res, self.BAR_CONTENTS, res)
216 def failUnlessIsBarJSON(self, res):
217 data = simplejson.loads(res)
218 self.failUnless(isinstance(data, list))
219 self.failUnlessEqual(data[0], "filenode")
220 self.failUnless(isinstance(data[1], dict))
221 self.failIf(data[1]["mutable"])
222 self.failIf("rw_uri" in data[1]) # immutable
223 self.failUnlessReallyEqual(to_str(data[1]["ro_uri"]), self._bar_txt_uri)
224 self.failUnlessReallyEqual(to_str(data[1]["verify_uri"]), self._bar_txt_verifycap)
225 self.failUnlessReallyEqual(data[1]["size"], len(self.BAR_CONTENTS))
227 def failUnlessIsFooJSON(self, res):
228 data = simplejson.loads(res)
229 self.failUnless(isinstance(data, list))
230 self.failUnlessEqual(data[0], "dirnode", res)
231 self.failUnless(isinstance(data[1], dict))
232 self.failUnless(data[1]["mutable"])
233 self.failUnless("rw_uri" in data[1]) # mutable
234 self.failUnlessReallyEqual(to_str(data[1]["rw_uri"]), self._foo_uri)
235 self.failUnlessReallyEqual(to_str(data[1]["ro_uri"]), self._foo_readonly_uri)
236 self.failUnlessReallyEqual(to_str(data[1]["verify_uri"]), self._foo_verifycap)
238 kidnames = sorted([unicode(n) for n in data[1]["children"]])
239 self.failUnlessEqual(kidnames,
240 [u"bar.txt", u"blockingfile", u"empty",
241 u"n\u00fc.txt", u"sub"])
242 kids = dict( [(unicode(name),value)
244 in data[1]["children"].iteritems()] )
245 self.failUnlessEqual(kids[u"sub"][0], "dirnode")
246 self.failUnlessIn("metadata", kids[u"sub"][1])
247 self.failUnlessIn("tahoe", kids[u"sub"][1]["metadata"])
248 tahoe_md = kids[u"sub"][1]["metadata"]["tahoe"]
249 self.failUnlessIn("linkcrtime", tahoe_md)
250 self.failUnlessIn("linkmotime", tahoe_md)
251 self.failUnlessEqual(kids[u"bar.txt"][0], "filenode")
252 self.failUnlessReallyEqual(kids[u"bar.txt"][1]["size"], len(self.BAR_CONTENTS))
253 self.failUnlessReallyEqual(to_str(kids[u"bar.txt"][1]["ro_uri"]), self._bar_txt_uri)
254 self.failUnlessReallyEqual(to_str(kids[u"bar.txt"][1]["verify_uri"]),
255 self._bar_txt_verifycap)
256 self.failUnlessIn("metadata", kids[u"bar.txt"][1])
257 self.failUnlessIn("tahoe", kids[u"bar.txt"][1]["metadata"])
258 self.failUnlessReallyEqual(kids[u"bar.txt"][1]["metadata"]["tahoe"]["linkcrtime"],
259 self._bar_txt_metadata["tahoe"]["linkcrtime"])
260 self.failUnlessReallyEqual(to_str(kids[u"n\u00fc.txt"][1]["ro_uri"]),
263 def GET(self, urlpath, followRedirect=False, return_response=False,
265 # if return_response=True, this fires with (data, statuscode,
266 # respheaders) instead of just data.
267 assert not isinstance(urlpath, unicode)
268 url = self.webish_url + urlpath
269 factory = HTTPClientGETFactory(url, method="GET",
270 followRedirect=followRedirect, **kwargs)
271 reactor.connectTCP("localhost", self.webish_port, factory)
274 return (data, factory.status, factory.response_headers)
276 d.addCallback(_got_data)
277 return factory.deferred
279 def HEAD(self, urlpath, return_response=False, **kwargs):
280 # this requires some surgery, because twisted.web.client doesn't want
281 # to give us back the response headers.
282 factory = HTTPClientHEADFactory(urlpath, method="HEAD", **kwargs)
283 reactor.connectTCP("localhost", self.webish_port, factory)
286 return (data, factory.status, factory.response_headers)
288 d.addCallback(_got_data)
289 return factory.deferred
291 def PUT(self, urlpath, data, **kwargs):
292 url = self.webish_url + urlpath
293 return client.getPage(url, method="PUT", postdata=data, **kwargs)
295 def DELETE(self, urlpath):
296 url = self.webish_url + urlpath
297 return client.getPage(url, method="DELETE")
299 def POST(self, urlpath, followRedirect=False, **fields):
300 sepbase = "boogabooga"
304 form.append('Content-Disposition: form-data; name="_charset"')
308 for name, value in fields.iteritems():
309 if isinstance(value, tuple):
310 filename, value = value
311 form.append('Content-Disposition: form-data; name="%s"; '
312 'filename="%s"' % (name, filename.encode("utf-8")))
314 form.append('Content-Disposition: form-data; name="%s"' % name)
316 if isinstance(value, unicode):
317 value = value.encode("utf-8")
320 assert isinstance(value, str)
327 body = "\r\n".join(form) + "\r\n"
328 headers["content-type"] = "multipart/form-data; boundary=%s" % sepbase
329 return self.POST2(urlpath, body, headers, followRedirect)
331 def POST2(self, urlpath, body="", headers={}, followRedirect=False):
332 url = self.webish_url + urlpath
333 return client.getPage(url, method="POST", postdata=body,
334 headers=headers, followRedirect=followRedirect)
336 def shouldFail(self, res, expected_failure, which,
337 substring=None, response_substring=None):
338 if isinstance(res, failure.Failure):
339 res.trap(expected_failure)
341 self.failUnless(substring in str(res),
342 "substring '%s' not in '%s'"
343 % (substring, str(res)))
344 if response_substring:
345 self.failUnless(response_substring in res.value.response,
346 "response substring '%s' not in '%s'"
347 % (response_substring, res.value.response))
349 self.fail("%s was supposed to raise %s, not get '%s'" %
350 (which, expected_failure, res))
352 def shouldFail2(self, expected_failure, which, substring,
354 callable, *args, **kwargs):
355 assert substring is None or isinstance(substring, str)
356 assert response_substring is None or isinstance(response_substring, str)
357 d = defer.maybeDeferred(callable, *args, **kwargs)
359 if isinstance(res, failure.Failure):
360 res.trap(expected_failure)
362 self.failUnless(substring in str(res),
363 "%s: substring '%s' not in '%s'"
364 % (which, substring, str(res)))
365 if response_substring:
366 self.failUnless(response_substring in res.value.response,
367 "%s: response substring '%s' not in '%s'"
369 response_substring, res.value.response))
371 self.fail("%s was supposed to raise %s, not get '%s'" %
372 (which, expected_failure, res))
376 def should404(self, res, which):
377 if isinstance(res, failure.Failure):
378 res.trap(error.Error)
379 self.failUnlessReallyEqual(res.value.status, "404")
381 self.fail("%s was supposed to Error(404), not get '%s'" %
384 def should302(self, res, which):
385 if isinstance(res, failure.Failure):
386 res.trap(error.Error)
387 self.failUnlessReallyEqual(res.value.status, "302")
389 self.fail("%s was supposed to Error(302), not get '%s'" %
393 class Web(WebMixin, WebErrorMixin, testutil.StallMixin, testutil.ReallyEqualMixin, unittest.TestCase):
394 def test_create(self):
397 def test_welcome(self):
400 self.failUnless('Welcome To Tahoe-LAFS' in res, res)
402 self.s.basedir = 'web/test_welcome'
403 fileutil.make_dirs("web/test_welcome")
404 fileutil.make_dirs("web/test_welcome/private")
406 d.addCallback(_check)
409 def test_provisioning(self):
410 d = self.GET("/provisioning/")
412 self.failUnless('Provisioning Tool' in res)
413 fields = {'filled': True,
414 "num_users": int(50e3),
415 "files_per_user": 1000,
416 "space_per_user": int(1e9),
417 "sharing_ratio": 1.0,
418 "encoding_parameters": "3-of-10-5",
420 "ownership_mode": "A",
421 "download_rate": 100,
426 return self.POST("/provisioning/", **fields)
428 d.addCallback(_check)
430 self.failUnless('Provisioning Tool' in res)
431 self.failUnless("Share space consumed: 167.01TB" in res)
433 fields = {'filled': True,
434 "num_users": int(50e6),
435 "files_per_user": 1000,
436 "space_per_user": int(5e9),
437 "sharing_ratio": 1.0,
438 "encoding_parameters": "25-of-100-50",
439 "num_servers": 30000,
440 "ownership_mode": "E",
441 "drive_failure_model": "U",
443 "download_rate": 1000,
448 return self.POST("/provisioning/", **fields)
449 d.addCallback(_check2)
451 self.failUnless("Share space consumed: huge!" in res)
452 fields = {'filled': True}
453 return self.POST("/provisioning/", **fields)
454 d.addCallback(_check3)
456 self.failUnless("Share space consumed:" in res)
457 d.addCallback(_check4)
460 def test_reliability_tool(self):
462 from allmydata import reliability
463 _hush_pyflakes = reliability
466 raise unittest.SkipTest("reliability tool requires NumPy")
468 d = self.GET("/reliability/")
470 self.failUnless('Reliability Tool' in res)
471 fields = {'drive_lifetime': "8Y",
476 "check_period": "1M",
477 "report_period": "3M",
480 return self.POST("/reliability/", **fields)
482 d.addCallback(_check)
484 self.failUnless('Reliability Tool' in res)
485 r = r'Probability of loss \(no maintenance\):\s+<span>0.033591'
486 self.failUnless(re.search(r, res), res)
487 d.addCallback(_check2)
490 def test_status(self):
491 h = self.s.get_history()
492 dl_num = h.list_all_download_statuses()[0].get_counter()
493 ul_num = h.list_all_upload_statuses()[0].get_counter()
494 mu_num = h.list_all_mapupdate_statuses()[0].get_counter()
495 pub_num = h.list_all_publish_statuses()[0].get_counter()
496 ret_num = h.list_all_retrieve_statuses()[0].get_counter()
497 d = self.GET("/status", followRedirect=True)
499 self.failUnless('Upload and Download Status' in res, res)
500 self.failUnless('"down-%d"' % dl_num in res, res)
501 self.failUnless('"up-%d"' % ul_num in res, res)
502 self.failUnless('"mapupdate-%d"' % mu_num in res, res)
503 self.failUnless('"publish-%d"' % pub_num in res, res)
504 self.failUnless('"retrieve-%d"' % ret_num in res, res)
505 d.addCallback(_check)
506 d.addCallback(lambda res: self.GET("/status/?t=json"))
507 def _check_json(res):
508 data = simplejson.loads(res)
509 self.failUnless(isinstance(data, dict))
510 #active = data["active"]
511 # TODO: test more. We need a way to fake an active operation
513 d.addCallback(_check_json)
515 d.addCallback(lambda res: self.GET("/status/down-%d" % dl_num))
517 self.failUnless("File Download Status" in res, res)
518 d.addCallback(_check_dl)
519 d.addCallback(lambda res: self.GET("/status/up-%d" % ul_num))
521 self.failUnless("File Upload Status" in res, res)
522 d.addCallback(_check_ul)
523 d.addCallback(lambda res: self.GET("/status/mapupdate-%d" % mu_num))
524 def _check_mapupdate(res):
525 self.failUnless("Mutable File Servermap Update Status" in res, res)
526 d.addCallback(_check_mapupdate)
527 d.addCallback(lambda res: self.GET("/status/publish-%d" % pub_num))
528 def _check_publish(res):
529 self.failUnless("Mutable File Publish Status" in res, res)
530 d.addCallback(_check_publish)
531 d.addCallback(lambda res: self.GET("/status/retrieve-%d" % ret_num))
532 def _check_retrieve(res):
533 self.failUnless("Mutable File Retrieve Status" in res, res)
534 d.addCallback(_check_retrieve)
538 def test_status_numbers(self):
539 drrm = status.DownloadResultsRendererMixin()
540 self.failUnlessReallyEqual(drrm.render_time(None, None), "")
541 self.failUnlessReallyEqual(drrm.render_time(None, 2.5), "2.50s")
542 self.failUnlessReallyEqual(drrm.render_time(None, 0.25), "250ms")
543 self.failUnlessReallyEqual(drrm.render_time(None, 0.0021), "2.1ms")
544 self.failUnlessReallyEqual(drrm.render_time(None, 0.000123), "123us")
545 self.failUnlessReallyEqual(drrm.render_rate(None, None), "")
546 self.failUnlessReallyEqual(drrm.render_rate(None, 2500000), "2.50MBps")
547 self.failUnlessReallyEqual(drrm.render_rate(None, 30100), "30.1kBps")
548 self.failUnlessReallyEqual(drrm.render_rate(None, 123), "123Bps")
550 urrm = status.UploadResultsRendererMixin()
551 self.failUnlessReallyEqual(urrm.render_time(None, None), "")
552 self.failUnlessReallyEqual(urrm.render_time(None, 2.5), "2.50s")
553 self.failUnlessReallyEqual(urrm.render_time(None, 0.25), "250ms")
554 self.failUnlessReallyEqual(urrm.render_time(None, 0.0021), "2.1ms")
555 self.failUnlessReallyEqual(urrm.render_time(None, 0.000123), "123us")
556 self.failUnlessReallyEqual(urrm.render_rate(None, None), "")
557 self.failUnlessReallyEqual(urrm.render_rate(None, 2500000), "2.50MBps")
558 self.failUnlessReallyEqual(urrm.render_rate(None, 30100), "30.1kBps")
559 self.failUnlessReallyEqual(urrm.render_rate(None, 123), "123Bps")
561 def test_GET_FILEURL(self):
562 d = self.GET(self.public_url + "/foo/bar.txt")
563 d.addCallback(self.failUnlessIsBarDotTxt)
566 def test_GET_FILEURL_range(self):
567 headers = {"range": "bytes=1-10"}
568 d = self.GET(self.public_url + "/foo/bar.txt", headers=headers,
569 return_response=True)
570 def _got((res, status, headers)):
571 self.failUnlessReallyEqual(int(status), 206)
572 self.failUnless(headers.has_key("content-range"))
573 self.failUnlessReallyEqual(headers["content-range"][0],
574 "bytes 1-10/%d" % len(self.BAR_CONTENTS))
575 self.failUnlessReallyEqual(res, self.BAR_CONTENTS[1:11])
579 def test_GET_FILEURL_partial_range(self):
580 headers = {"range": "bytes=5-"}
581 length = len(self.BAR_CONTENTS)
582 d = self.GET(self.public_url + "/foo/bar.txt", headers=headers,
583 return_response=True)
584 def _got((res, status, headers)):
585 self.failUnlessReallyEqual(int(status), 206)
586 self.failUnless(headers.has_key("content-range"))
587 self.failUnlessReallyEqual(headers["content-range"][0],
588 "bytes 5-%d/%d" % (length-1, length))
589 self.failUnlessReallyEqual(res, self.BAR_CONTENTS[5:])
593 def test_GET_FILEURL_partial_end_range(self):
594 headers = {"range": "bytes=-5"}
595 length = len(self.BAR_CONTENTS)
596 d = self.GET(self.public_url + "/foo/bar.txt", headers=headers,
597 return_response=True)
598 def _got((res, status, headers)):
599 self.failUnlessReallyEqual(int(status), 206)
600 self.failUnless(headers.has_key("content-range"))
601 self.failUnlessReallyEqual(headers["content-range"][0],
602 "bytes %d-%d/%d" % (length-5, length-1, length))
603 self.failUnlessReallyEqual(res, self.BAR_CONTENTS[-5:])
607 def test_GET_FILEURL_partial_range_overrun(self):
608 headers = {"range": "bytes=100-200"}
609 d = self.shouldFail2(error.Error, "test_GET_FILEURL_range_overrun",
610 "416 Requested Range not satisfiable",
611 "First beyond end of file",
612 self.GET, self.public_url + "/foo/bar.txt",
616 def test_HEAD_FILEURL_range(self):
617 headers = {"range": "bytes=1-10"}
618 d = self.HEAD(self.public_url + "/foo/bar.txt", headers=headers,
619 return_response=True)
620 def _got((res, status, headers)):
621 self.failUnlessReallyEqual(res, "")
622 self.failUnlessReallyEqual(int(status), 206)
623 self.failUnless(headers.has_key("content-range"))
624 self.failUnlessReallyEqual(headers["content-range"][0],
625 "bytes 1-10/%d" % len(self.BAR_CONTENTS))
629 def test_HEAD_FILEURL_partial_range(self):
630 headers = {"range": "bytes=5-"}
631 length = len(self.BAR_CONTENTS)
632 d = self.HEAD(self.public_url + "/foo/bar.txt", headers=headers,
633 return_response=True)
634 def _got((res, status, headers)):
635 self.failUnlessReallyEqual(int(status), 206)
636 self.failUnless(headers.has_key("content-range"))
637 self.failUnlessReallyEqual(headers["content-range"][0],
638 "bytes 5-%d/%d" % (length-1, length))
642 def test_HEAD_FILEURL_partial_end_range(self):
643 headers = {"range": "bytes=-5"}
644 length = len(self.BAR_CONTENTS)
645 d = self.HEAD(self.public_url + "/foo/bar.txt", headers=headers,
646 return_response=True)
647 def _got((res, status, headers)):
648 self.failUnlessReallyEqual(int(status), 206)
649 self.failUnless(headers.has_key("content-range"))
650 self.failUnlessReallyEqual(headers["content-range"][0],
651 "bytes %d-%d/%d" % (length-5, length-1, length))
655 def test_HEAD_FILEURL_partial_range_overrun(self):
656 headers = {"range": "bytes=100-200"}
657 d = self.shouldFail2(error.Error, "test_HEAD_FILEURL_range_overrun",
658 "416 Requested Range not satisfiable",
660 self.HEAD, self.public_url + "/foo/bar.txt",
664 def test_GET_FILEURL_range_bad(self):
665 headers = {"range": "BOGUS=fizbop-quarnak"}
666 d = self.GET(self.public_url + "/foo/bar.txt", headers=headers,
667 return_response=True)
668 def _got((res, status, headers)):
669 self.failUnlessReallyEqual(int(status), 200)
670 self.failUnless(not headers.has_key("content-range"))
671 self.failUnlessReallyEqual(res, self.BAR_CONTENTS)
675 def test_HEAD_FILEURL(self):
676 d = self.HEAD(self.public_url + "/foo/bar.txt", return_response=True)
677 def _got((res, status, headers)):
678 self.failUnlessReallyEqual(res, "")
679 self.failUnlessReallyEqual(headers["content-length"][0],
680 str(len(self.BAR_CONTENTS)))
681 self.failUnlessReallyEqual(headers["content-type"], ["text/plain"])
685 def test_GET_FILEURL_named(self):
686 base = "/file/%s" % urllib.quote(self._bar_txt_uri)
687 base2 = "/named/%s" % urllib.quote(self._bar_txt_uri)
688 d = self.GET(base + "/@@name=/blah.txt")
689 d.addCallback(self.failUnlessIsBarDotTxt)
690 d.addCallback(lambda res: self.GET(base + "/blah.txt"))
691 d.addCallback(self.failUnlessIsBarDotTxt)
692 d.addCallback(lambda res: self.GET(base + "/ignore/lots/blah.txt"))
693 d.addCallback(self.failUnlessIsBarDotTxt)
694 d.addCallback(lambda res: self.GET(base2 + "/@@name=/blah.txt"))
695 d.addCallback(self.failUnlessIsBarDotTxt)
696 save_url = base + "?save=true&filename=blah.txt"
697 d.addCallback(lambda res: self.GET(save_url))
698 d.addCallback(self.failUnlessIsBarDotTxt) # TODO: check headers
699 u_filename = u"n\u00e9wer.txt" # n e-acute w e r . t x t
700 u_fn_e = urllib.quote(u_filename.encode("utf-8"))
701 u_url = base + "?save=true&filename=" + u_fn_e
702 d.addCallback(lambda res: self.GET(u_url))
703 d.addCallback(self.failUnlessIsBarDotTxt) # TODO: check headers
706 def test_PUT_FILEURL_named_bad(self):
707 base = "/file/%s" % urllib.quote(self._bar_txt_uri)
708 d = self.shouldFail2(error.Error, "test_PUT_FILEURL_named_bad",
710 "/file can only be used with GET or HEAD",
711 self.PUT, base + "/@@name=/blah.txt", "")
714 def test_GET_DIRURL_named_bad(self):
715 base = "/file/%s" % urllib.quote(self._foo_uri)
716 d = self.shouldFail2(error.Error, "test_PUT_DIRURL_named_bad",
719 self.GET, base + "/@@name=/blah.txt")
722 def test_GET_slash_file_bad(self):
723 d = self.shouldFail2(error.Error, "test_GET_slash_file_bad",
725 "/file must be followed by a file-cap and a name",
729 def test_GET_unhandled_URI_named(self):
730 contents, n, newuri = self.makefile(12)
731 verifier_cap = n.get_verify_cap().to_string()
732 base = "/file/%s" % urllib.quote(verifier_cap)
733 # client.create_node_from_uri() can't handle verify-caps
734 d = self.shouldFail2(error.Error, "GET_unhandled_URI_named",
735 "400 Bad Request", "is not a file-cap",
739 def test_GET_unhandled_URI(self):
740 contents, n, newuri = self.makefile(12)
741 verifier_cap = n.get_verify_cap().to_string()
742 base = "/uri/%s" % urllib.quote(verifier_cap)
743 # client.create_node_from_uri() can't handle verify-caps
744 d = self.shouldFail2(error.Error, "test_GET_unhandled_URI",
746 "GET unknown URI type: can only do t=info",
750 def test_GET_FILE_URI(self):
751 base = "/uri/%s" % urllib.quote(self._bar_txt_uri)
753 d.addCallback(self.failUnlessIsBarDotTxt)
756 def test_GET_FILE_URI_badchild(self):
757 base = "/uri/%s/boguschild" % urllib.quote(self._bar_txt_uri)
758 errmsg = "Files have no children, certainly not named 'boguschild'"
759 d = self.shouldFail2(error.Error, "test_GET_FILE_URI_badchild",
760 "400 Bad Request", errmsg,
764 def test_PUT_FILE_URI_badchild(self):
765 base = "/uri/%s/boguschild" % urllib.quote(self._bar_txt_uri)
766 errmsg = "Cannot create directory 'boguschild', because its parent is a file, not a directory"
767 d = self.shouldFail2(error.Error, "test_GET_FILE_URI_badchild",
768 "400 Bad Request", errmsg,
772 # TODO: version of this with a Unicode filename
773 def test_GET_FILEURL_save(self):
774 d = self.GET(self.public_url + "/foo/bar.txt?filename=bar.txt&save=true",
775 return_response=True)
776 def _got((res, statuscode, headers)):
777 content_disposition = headers["content-disposition"][0]
778 self.failUnless(content_disposition == 'attachment; filename="bar.txt"', content_disposition)
779 self.failUnlessIsBarDotTxt(res)
783 def test_GET_FILEURL_missing(self):
784 d = self.GET(self.public_url + "/foo/missing")
785 d.addBoth(self.should404, "test_GET_FILEURL_missing")
788 def test_PUT_overwrite_only_files(self):
789 # create a directory, put a file in that directory.
790 contents, n, filecap = self.makefile(8)
791 d = self.PUT(self.public_url + "/foo/dir?t=mkdir", "")
792 d.addCallback(lambda res:
793 self.PUT(self.public_url + "/foo/dir/file1.txt",
794 self.NEWFILE_CONTENTS))
795 # try to overwrite the file with replace=only-files
797 d.addCallback(lambda res:
798 self.PUT(self.public_url + "/foo/dir/file1.txt?t=uri&replace=only-files",
800 d.addCallback(lambda res:
801 self.shouldFail2(error.Error, "PUT_bad_t", "409 Conflict",
802 "There was already a child by that name, and you asked me "
804 self.PUT, self.public_url + "/foo/dir?t=uri&replace=only-files",
808 def test_PUT_NEWFILEURL(self):
809 d = self.PUT(self.public_url + "/foo/new.txt", self.NEWFILE_CONTENTS)
810 # TODO: we lose the response code, so we can't check this
811 #self.failUnlessReallyEqual(responsecode, 201)
812 d.addCallback(self.failUnlessURIMatchesROChild, self._foo_node, u"new.txt")
813 d.addCallback(lambda res:
814 self.failUnlessChildContentsAre(self._foo_node, u"new.txt",
815 self.NEWFILE_CONTENTS))
818 def test_PUT_NEWFILEURL_not_mutable(self):
819 d = self.PUT(self.public_url + "/foo/new.txt?mutable=false",
820 self.NEWFILE_CONTENTS)
821 # TODO: we lose the response code, so we can't check this
822 #self.failUnlessReallyEqual(responsecode, 201)
823 d.addCallback(self.failUnlessURIMatchesROChild, self._foo_node, u"new.txt")
824 d.addCallback(lambda res:
825 self.failUnlessChildContentsAre(self._foo_node, u"new.txt",
826 self.NEWFILE_CONTENTS))
829 def test_PUT_NEWFILEURL_range_bad(self):
830 headers = {"content-range": "bytes 1-10/%d" % len(self.NEWFILE_CONTENTS)}
831 target = self.public_url + "/foo/new.txt"
832 d = self.shouldFail2(error.Error, "test_PUT_NEWFILEURL_range_bad",
833 "501 Not Implemented",
834 "Content-Range in PUT not yet supported",
835 # (and certainly not for immutable files)
836 self.PUT, target, self.NEWFILE_CONTENTS[1:11],
838 d.addCallback(lambda res:
839 self.failIfNodeHasChild(self._foo_node, u"new.txt"))
842 def test_PUT_NEWFILEURL_mutable(self):
843 d = self.PUT(self.public_url + "/foo/new.txt?mutable=true",
844 self.NEWFILE_CONTENTS)
845 # TODO: we lose the response code, so we can't check this
846 #self.failUnlessReallyEqual(responsecode, 201)
848 u = uri.from_string_mutable_filenode(res)
849 self.failUnless(u.is_mutable())
850 self.failIf(u.is_readonly())
852 d.addCallback(_check_uri)
853 d.addCallback(self.failUnlessURIMatchesRWChild, self._foo_node, u"new.txt")
854 d.addCallback(lambda res:
855 self.failUnlessMutableChildContentsAre(self._foo_node,
857 self.NEWFILE_CONTENTS))
860 def test_PUT_NEWFILEURL_mutable_toobig(self):
861 d = self.shouldFail2(error.Error, "test_PUT_NEWFILEURL_mutable_toobig",
862 "413 Request Entity Too Large",
863 "SDMF is limited to one segment, and 10001 > 10000",
865 self.public_url + "/foo/new.txt?mutable=true",
866 "b" * (self.s.MUTABLE_SIZELIMIT+1))
869 def test_PUT_NEWFILEURL_replace(self):
870 d = self.PUT(self.public_url + "/foo/bar.txt", self.NEWFILE_CONTENTS)
871 # TODO: we lose the response code, so we can't check this
872 #self.failUnlessReallyEqual(responsecode, 200)
873 d.addCallback(self.failUnlessURIMatchesROChild, self._foo_node, u"bar.txt")
874 d.addCallback(lambda res:
875 self.failUnlessChildContentsAre(self._foo_node, u"bar.txt",
876 self.NEWFILE_CONTENTS))
879 def test_PUT_NEWFILEURL_bad_t(self):
880 d = self.shouldFail2(error.Error, "PUT_bad_t", "400 Bad Request",
881 "PUT to a file: bad t=bogus",
882 self.PUT, self.public_url + "/foo/bar.txt?t=bogus",
886 def test_PUT_NEWFILEURL_no_replace(self):
887 d = self.PUT(self.public_url + "/foo/bar.txt?replace=false",
888 self.NEWFILE_CONTENTS)
889 d.addBoth(self.shouldFail, error.Error, "PUT_NEWFILEURL_no_replace",
891 "There was already a child by that name, and you asked me "
895 def test_PUT_NEWFILEURL_mkdirs(self):
896 d = self.PUT(self.public_url + "/foo/newdir/new.txt", self.NEWFILE_CONTENTS)
898 d.addCallback(self.failUnlessURIMatchesROChild, fn, u"newdir/new.txt")
899 d.addCallback(lambda res: self.failIfNodeHasChild(fn, u"new.txt"))
900 d.addCallback(lambda res: self.failUnlessNodeHasChild(fn, u"newdir"))
901 d.addCallback(lambda res:
902 self.failUnlessChildContentsAre(fn, u"newdir/new.txt",
903 self.NEWFILE_CONTENTS))
906 def test_PUT_NEWFILEURL_blocked(self):
907 d = self.PUT(self.public_url + "/foo/blockingfile/new.txt",
908 self.NEWFILE_CONTENTS)
909 d.addBoth(self.shouldFail, error.Error, "PUT_NEWFILEURL_blocked",
911 "Unable to create directory 'blockingfile': a file was in the way")
914 def test_PUT_NEWFILEURL_emptyname(self):
915 # an empty pathname component (i.e. a double-slash) is disallowed
916 d = self.shouldFail2(error.Error, "test_PUT_NEWFILEURL_emptyname",
918 "The webapi does not allow empty pathname components",
919 self.PUT, self.public_url + "/foo//new.txt", "")
922 def test_DELETE_FILEURL(self):
923 d = self.DELETE(self.public_url + "/foo/bar.txt")
924 d.addCallback(lambda res:
925 self.failIfNodeHasChild(self._foo_node, u"bar.txt"))
928 def test_DELETE_FILEURL_missing(self):
929 d = self.DELETE(self.public_url + "/foo/missing")
930 d.addBoth(self.should404, "test_DELETE_FILEURL_missing")
933 def test_DELETE_FILEURL_missing2(self):
934 d = self.DELETE(self.public_url + "/missing/missing")
935 d.addBoth(self.should404, "test_DELETE_FILEURL_missing2")
938 def failUnlessHasBarDotTxtMetadata(self, res):
939 data = simplejson.loads(res)
940 self.failUnless(isinstance(data, list))
941 self.failUnlessIn("metadata", data[1])
942 self.failUnlessIn("tahoe", data[1]["metadata"])
943 self.failUnlessIn("linkcrtime", data[1]["metadata"]["tahoe"])
944 self.failUnlessIn("linkmotime", data[1]["metadata"]["tahoe"])
945 self.failUnlessReallyEqual(data[1]["metadata"]["tahoe"]["linkcrtime"],
946 self._bar_txt_metadata["tahoe"]["linkcrtime"])
948 def test_GET_FILEURL_json(self):
949 # twisted.web.http.parse_qs ignores any query args without an '=', so
950 # I can't do "GET /path?json", I have to do "GET /path/t=json"
951 # instead. This may make it tricky to emulate the S3 interface
953 d = self.GET(self.public_url + "/foo/bar.txt?t=json")
955 self.failUnlessIsBarJSON(data)
956 self.failUnlessHasBarDotTxtMetadata(data)
958 d.addCallback(_check1)
961 def test_GET_FILEURL_json_missing(self):
962 d = self.GET(self.public_url + "/foo/missing?json")
963 d.addBoth(self.should404, "test_GET_FILEURL_json_missing")
966 def test_GET_FILEURL_uri(self):
967 d = self.GET(self.public_url + "/foo/bar.txt?t=uri")
969 self.failUnlessReallyEqual(res, self._bar_txt_uri)
970 d.addCallback(_check)
971 d.addCallback(lambda res:
972 self.GET(self.public_url + "/foo/bar.txt?t=readonly-uri"))
974 # for now, for files, uris and readonly-uris are the same
975 self.failUnlessReallyEqual(res, self._bar_txt_uri)
976 d.addCallback(_check2)
979 def test_GET_FILEURL_badtype(self):
980 d = self.shouldHTTPError("GET t=bogus", 400, "Bad Request",
983 self.public_url + "/foo/bar.txt?t=bogus")
986 def test_CSS_FILE(self):
987 d = self.GET("/tahoe_css", followRedirect=True)
989 CSS_STYLE=re.compile('toolbar\s{.+text-align:\scenter.+toolbar-item.+display:\sinline',re.DOTALL)
990 self.failUnless(CSS_STYLE.search(res), res)
991 d.addCallback(_check)
994 def test_GET_FILEURL_uri_missing(self):
995 d = self.GET(self.public_url + "/foo/missing?t=uri")
996 d.addBoth(self.should404, "test_GET_FILEURL_uri_missing")
999 def test_GET_DIRECTORY_html_banner(self):
1000 d = self.GET(self.public_url + "/foo", followRedirect=True)
1002 self.failUnlessIn('<div class="toolbar-item"><a href="../../..">Return to Welcome page</a></div>',res)
1003 d.addCallback(_check)
1006 def test_GET_DIRURL(self):
1007 # the addSlash means we get a redirect here
1008 # from /uri/$URI/foo/ , we need ../../../ to get back to the root
1010 d = self.GET(self.public_url + "/foo", followRedirect=True)
1012 self.failUnless(('<a href="%s">Return to Welcome page' % ROOT)
1014 # the FILE reference points to a URI, but it should end in bar.txt
1015 bar_url = ("%s/file/%s/@@named=/bar.txt" %
1016 (ROOT, urllib.quote(self._bar_txt_uri)))
1017 get_bar = "".join([r'<td>FILE</td>',
1019 r'<a href="%s">bar.txt</a>' % bar_url,
1021 r'\s+<td>%d</td>' % len(self.BAR_CONTENTS),
1023 self.failUnless(re.search(get_bar, res), res)
1024 for line in res.split("\n"):
1025 # find the line that contains the delete button for bar.txt
1026 if ("form action" in line and
1027 'value="delete"' in line and
1028 'value="bar.txt"' in line):
1029 # the form target should use a relative URL
1030 foo_url = urllib.quote("%s/uri/%s/" % (ROOT, self._foo_uri))
1031 self.failUnless(('action="%s"' % foo_url) in line, line)
1032 # and the when_done= should too
1033 #done_url = urllib.quote(???)
1034 #self.failUnless(('name="when_done" value="%s"' % done_url)
1038 self.fail("unable to find delete-bar.txt line", res)
1040 # the DIR reference just points to a URI
1041 sub_url = ("%s/uri/%s/" % (ROOT, urllib.quote(self._sub_uri)))
1042 get_sub = ((r'<td>DIR</td>')
1043 +r'\s+<td><a href="%s">sub</a></td>' % sub_url)
1044 self.failUnless(re.search(get_sub, res), res)
1045 d.addCallback(_check)
1047 # look at a readonly directory
1048 d.addCallback(lambda res:
1049 self.GET(self.public_url + "/reedownlee", followRedirect=True))
1051 self.failUnless("(read-only)" in res, res)
1052 self.failIf("Upload a file" in res, res)
1053 d.addCallback(_check2)
1055 # and at a directory that contains a readonly directory
1056 d.addCallback(lambda res:
1057 self.GET(self.public_url, followRedirect=True))
1059 self.failUnless(re.search('<td>DIR-RO</td>'
1060 r'\s+<td><a href="[\.\/]+/uri/URI%3ADIR2-RO%3A[^"]+">reedownlee</a></td>', res), res)
1061 d.addCallback(_check3)
1063 # and an empty directory
1064 d.addCallback(lambda res: self.GET(self.public_url + "/foo/empty/"))
1066 self.failUnless("directory is empty" in res, res)
1067 MKDIR_BUTTON_RE=re.compile('<input type="hidden" name="t" value="mkdir" />.*<legend class="freeform-form-label">Create a new directory in this directory</legend>.*<input type="submit" value="Create" />', re.I)
1068 self.failUnless(MKDIR_BUTTON_RE.search(res), res)
1069 d.addCallback(_check4)
1071 # and at a literal directory
1072 tiny_litdir_uri = "URI:DIR2-LIT:gqytunj2onug64tufqzdcosvkjetutcjkq5gw4tvm5vwszdgnz5hgyzufqydulbshj5x2lbm" # contains one child which is itself also LIT
1073 d.addCallback(lambda res:
1074 self.GET("/uri/" + tiny_litdir_uri + "/", followRedirect=True))
1076 self.failUnless('(immutable)' in res, res)
1077 self.failUnless(re.search('<td>FILE</td>'
1078 r'\s+<td><a href="[\.\/]+/file/URI%3ALIT%3Akrugkidfnzsc4/@@named=/short">short</a></td>', res), res)
1079 d.addCallback(_check5)
1082 def test_GET_DIRURL_badtype(self):
1083 d = self.shouldHTTPError("test_GET_DIRURL_badtype",
1087 self.public_url + "/foo?t=bogus")
1090 def test_GET_DIRURL_json(self):
1091 d = self.GET(self.public_url + "/foo?t=json")
1092 d.addCallback(self.failUnlessIsFooJSON)
1096 def test_POST_DIRURL_manifest_no_ophandle(self):
1097 d = self.shouldFail2(error.Error,
1098 "test_POST_DIRURL_manifest_no_ophandle",
1100 "slow operation requires ophandle=",
1101 self.POST, self.public_url, t="start-manifest")
1104 def test_POST_DIRURL_manifest(self):
1105 d = defer.succeed(None)
1106 def getman(ignored, output):
1107 d = self.POST(self.public_url + "/foo/?t=start-manifest&ophandle=125",
1108 followRedirect=True)
1109 d.addCallback(self.wait_for_operation, "125")
1110 d.addCallback(self.get_operation_results, "125", output)
1112 d.addCallback(getman, None)
1113 def _got_html(manifest):
1114 self.failUnless("Manifest of SI=" in manifest)
1115 self.failUnless("<td>sub</td>" in manifest)
1116 self.failUnless(self._sub_uri in manifest)
1117 self.failUnless("<td>sub/baz.txt</td>" in manifest)
1118 d.addCallback(_got_html)
1120 # both t=status and unadorned GET should be identical
1121 d.addCallback(lambda res: self.GET("/operations/125"))
1122 d.addCallback(_got_html)
1124 d.addCallback(getman, "html")
1125 d.addCallback(_got_html)
1126 d.addCallback(getman, "text")
1127 def _got_text(manifest):
1128 self.failUnless("\nsub " + self._sub_uri + "\n" in manifest)
1129 self.failUnless("\nsub/baz.txt URI:CHK:" in manifest)
1130 d.addCallback(_got_text)
1131 d.addCallback(getman, "JSON")
1133 data = res["manifest"]
1135 for (path_list, cap) in data:
1136 got[tuple(path_list)] = cap
1137 self.failUnlessReallyEqual(to_str(got[(u"sub",)]), self._sub_uri)
1138 self.failUnless((u"sub",u"baz.txt") in got)
1139 self.failUnless("finished" in res)
1140 self.failUnless("origin" in res)
1141 self.failUnless("storage-index" in res)
1142 self.failUnless("verifycaps" in res)
1143 self.failUnless("stats" in res)
1144 d.addCallback(_got_json)
1147 def test_POST_DIRURL_deepsize_no_ophandle(self):
1148 d = self.shouldFail2(error.Error,
1149 "test_POST_DIRURL_deepsize_no_ophandle",
1151 "slow operation requires ophandle=",
1152 self.POST, self.public_url, t="start-deep-size")
1155 def test_POST_DIRURL_deepsize(self):
1156 d = self.POST(self.public_url + "/foo/?t=start-deep-size&ophandle=126",
1157 followRedirect=True)
1158 d.addCallback(self.wait_for_operation, "126")
1159 d.addCallback(self.get_operation_results, "126", "json")
1160 def _got_json(data):
1161 self.failUnlessReallyEqual(data["finished"], True)
1163 self.failUnless(size > 1000)
1164 d.addCallback(_got_json)
1165 d.addCallback(self.get_operation_results, "126", "text")
1167 mo = re.search(r'^size: (\d+)$', res, re.M)
1168 self.failUnless(mo, res)
1169 size = int(mo.group(1))
1170 # with directories, the size varies.
1171 self.failUnless(size > 1000)
1172 d.addCallback(_got_text)
1175 def test_POST_DIRURL_deepstats_no_ophandle(self):
1176 d = self.shouldFail2(error.Error,
1177 "test_POST_DIRURL_deepstats_no_ophandle",
1179 "slow operation requires ophandle=",
1180 self.POST, self.public_url, t="start-deep-stats")
1183 def test_POST_DIRURL_deepstats(self):
1184 d = self.POST(self.public_url + "/foo/?t=start-deep-stats&ophandle=127",
1185 followRedirect=True)
1186 d.addCallback(self.wait_for_operation, "127")
1187 d.addCallback(self.get_operation_results, "127", "json")
1188 def _got_json(stats):
1189 expected = {"count-immutable-files": 3,
1190 "count-mutable-files": 0,
1191 "count-literal-files": 0,
1193 "count-directories": 3,
1194 "size-immutable-files": 57,
1195 "size-literal-files": 0,
1196 #"size-directories": 1912, # varies
1197 #"largest-directory": 1590,
1198 "largest-directory-children": 5,
1199 "largest-immutable-file": 19,
1201 for k,v in expected.iteritems():
1202 self.failUnlessReallyEqual(stats[k], v,
1203 "stats[%s] was %s, not %s" %
1205 self.failUnlessReallyEqual(stats["size-files-histogram"],
1207 d.addCallback(_got_json)
1210 def test_POST_DIRURL_stream_manifest(self):
1211 d = self.POST(self.public_url + "/foo/?t=stream-manifest")
1213 self.failUnless(res.endswith("\n"))
1214 units = [simplejson.loads(t) for t in res[:-1].split("\n")]
1215 self.failUnlessReallyEqual(len(units), 7)
1216 self.failUnlessEqual(units[-1]["type"], "stats")
1218 self.failUnlessEqual(first["path"], [])
1219 self.failUnlessReallyEqual(to_str(first["cap"]), self._foo_uri)
1220 self.failUnlessEqual(first["type"], "directory")
1221 baz = [u for u in units[:-1] if to_str(u["cap"]) == self._baz_file_uri][0]
1222 self.failUnlessEqual(baz["path"], ["sub", "baz.txt"])
1223 self.failIfEqual(baz["storage-index"], None)
1224 self.failIfEqual(baz["verifycap"], None)
1225 self.failIfEqual(baz["repaircap"], None)
1227 d.addCallback(_check)
1230 def test_GET_DIRURL_uri(self):
1231 d = self.GET(self.public_url + "/foo?t=uri")
1233 self.failUnlessReallyEqual(to_str(res), self._foo_uri)
1234 d.addCallback(_check)
1237 def test_GET_DIRURL_readonly_uri(self):
1238 d = self.GET(self.public_url + "/foo?t=readonly-uri")
1240 self.failUnlessReallyEqual(to_str(res), self._foo_readonly_uri)
1241 d.addCallback(_check)
1244 def test_PUT_NEWDIRURL(self):
1245 d = self.PUT(self.public_url + "/foo/newdir?t=mkdir", "")
1246 d.addCallback(lambda res:
1247 self.failUnlessNodeHasChild(self._foo_node, u"newdir"))
1248 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
1249 d.addCallback(self.failUnlessNodeKeysAre, [])
1252 def test_POST_NEWDIRURL(self):
1253 d = self.POST2(self.public_url + "/foo/newdir?t=mkdir", "")
1254 d.addCallback(lambda res:
1255 self.failUnlessNodeHasChild(self._foo_node, u"newdir"))
1256 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
1257 d.addCallback(self.failUnlessNodeKeysAre, [])
1260 def test_POST_NEWDIRURL_emptyname(self):
1261 # an empty pathname component (i.e. a double-slash) is disallowed
1262 d = self.shouldFail2(error.Error, "test_POST_NEWDIRURL_emptyname",
1264 "The webapi does not allow empty pathname components, i.e. a double slash",
1265 self.POST, self.public_url + "//?t=mkdir")
1268 def test_POST_NEWDIRURL_initial_children(self):
1269 (newkids, caps) = self._create_initial_children()
1270 d = self.POST2(self.public_url + "/foo/newdir?t=mkdir-with-children",
1271 simplejson.dumps(newkids))
1273 n = self.s.create_node_from_uri(uri.strip())
1274 d2 = self.failUnlessNodeKeysAre(n, newkids.keys())
1275 d2.addCallback(lambda ign:
1276 self.failUnlessROChildURIIs(n, u"child-imm",
1278 d2.addCallback(lambda ign:
1279 self.failUnlessRWChildURIIs(n, u"child-mutable",
1281 d2.addCallback(lambda ign:
1282 self.failUnlessROChildURIIs(n, u"child-mutable-ro",
1284 d2.addCallback(lambda ign:
1285 self.failUnlessROChildURIIs(n, u"unknownchild-ro",
1286 caps['unknown_rocap']))
1287 d2.addCallback(lambda ign:
1288 self.failUnlessRWChildURIIs(n, u"unknownchild-rw",
1289 caps['unknown_rwcap']))
1290 d2.addCallback(lambda ign:
1291 self.failUnlessROChildURIIs(n, u"unknownchild-imm",
1292 caps['unknown_immcap']))
1293 d2.addCallback(lambda ign:
1294 self.failUnlessRWChildURIIs(n, u"dirchild",
1296 d2.addCallback(lambda ign:
1297 self.failUnlessROChildURIIs(n, u"dirchild-lit",
1299 d2.addCallback(lambda ign:
1300 self.failUnlessROChildURIIs(n, u"dirchild-empty",
1301 caps['emptydircap']))
1303 d.addCallback(_check)
1304 d.addCallback(lambda res:
1305 self.failUnlessNodeHasChild(self._foo_node, u"newdir"))
1306 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
1307 d.addCallback(self.failUnlessNodeKeysAre, newkids.keys())
1308 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
1309 d.addCallback(self.failUnlessROChildURIIs, u"child-imm", caps['filecap1'])
1312 def test_POST_NEWDIRURL_immutable(self):
1313 (newkids, caps) = self._create_immutable_children()
1314 d = self.POST2(self.public_url + "/foo/newdir?t=mkdir-immutable",
1315 simplejson.dumps(newkids))
1317 n = self.s.create_node_from_uri(uri.strip())
1318 d2 = self.failUnlessNodeKeysAre(n, newkids.keys())
1319 d2.addCallback(lambda ign:
1320 self.failUnlessROChildURIIs(n, u"child-imm",
1322 d2.addCallback(lambda ign:
1323 self.failUnlessROChildURIIs(n, u"unknownchild-imm",
1324 caps['unknown_immcap']))
1325 d2.addCallback(lambda ign:
1326 self.failUnlessROChildURIIs(n, u"dirchild-imm",
1328 d2.addCallback(lambda ign:
1329 self.failUnlessROChildURIIs(n, u"dirchild-lit",
1331 d2.addCallback(lambda ign:
1332 self.failUnlessROChildURIIs(n, u"dirchild-empty",
1333 caps['emptydircap']))
1335 d.addCallback(_check)
1336 d.addCallback(lambda res:
1337 self.failUnlessNodeHasChild(self._foo_node, u"newdir"))
1338 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
1339 d.addCallback(self.failUnlessNodeKeysAre, newkids.keys())
1340 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
1341 d.addCallback(self.failUnlessROChildURIIs, u"child-imm", caps['filecap1'])
1342 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
1343 d.addCallback(self.failUnlessROChildURIIs, u"unknownchild-imm", caps['unknown_immcap'])
1344 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
1345 d.addCallback(self.failUnlessROChildURIIs, u"dirchild-imm", caps['immdircap'])
1346 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
1347 d.addCallback(self.failUnlessROChildURIIs, u"dirchild-lit", caps['litdircap'])
1348 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
1349 d.addCallback(self.failUnlessROChildURIIs, u"dirchild-empty", caps['emptydircap'])
1350 d.addErrback(self.explain_web_error)
1353 def test_POST_NEWDIRURL_immutable_bad(self):
1354 (newkids, caps) = self._create_initial_children()
1355 d = self.shouldFail2(error.Error, "test_POST_NEWDIRURL_immutable_bad",
1357 "needed to be immutable but was not",
1359 self.public_url + "/foo/newdir?t=mkdir-immutable",
1360 simplejson.dumps(newkids))
1363 def test_PUT_NEWDIRURL_exists(self):
1364 d = self.PUT(self.public_url + "/foo/sub?t=mkdir", "")
1365 d.addCallback(lambda res:
1366 self.failUnlessNodeHasChild(self._foo_node, u"sub"))
1367 d.addCallback(lambda res: self._foo_node.get(u"sub"))
1368 d.addCallback(self.failUnlessNodeKeysAre, [u"baz.txt"])
1371 def test_PUT_NEWDIRURL_blocked(self):
1372 d = self.shouldFail2(error.Error, "PUT_NEWDIRURL_blocked",
1373 "409 Conflict", "Unable to create directory 'bar.txt': a file was in the way",
1375 self.public_url + "/foo/bar.txt/sub?t=mkdir", "")
1376 d.addCallback(lambda res:
1377 self.failUnlessNodeHasChild(self._foo_node, u"sub"))
1378 d.addCallback(lambda res: self._foo_node.get(u"sub"))
1379 d.addCallback(self.failUnlessNodeKeysAre, [u"baz.txt"])
1382 def test_PUT_NEWDIRURL_mkdir_p(self):
1383 d = defer.succeed(None)
1384 d.addCallback(lambda res: self.POST(self.public_url + "/foo", t='mkdir', name='mkp'))
1385 d.addCallback(lambda res: self.failUnlessNodeHasChild(self._foo_node, u"mkp"))
1386 d.addCallback(lambda res: self._foo_node.get(u"mkp"))
1387 def mkdir_p(mkpnode):
1388 url = '/uri/%s?t=mkdir-p&path=/sub1/sub2' % urllib.quote(mkpnode.get_uri())
1390 def made_subsub(ssuri):
1391 d = self._foo_node.get_child_at_path(u"mkp/sub1/sub2")
1392 d.addCallback(lambda ssnode: self.failUnlessReallyEqual(ssnode.get_uri(), ssuri))
1394 d.addCallback(lambda uri2: self.failUnlessReallyEqual(uri2, ssuri))
1396 d.addCallback(made_subsub)
1398 d.addCallback(mkdir_p)
1401 def test_PUT_NEWDIRURL_mkdirs(self):
1402 d = self.PUT(self.public_url + "/foo/subdir/newdir?t=mkdir", "")
1403 d.addCallback(lambda res:
1404 self.failIfNodeHasChild(self._foo_node, u"newdir"))
1405 d.addCallback(lambda res:
1406 self.failUnlessNodeHasChild(self._foo_node, u"subdir"))
1407 d.addCallback(lambda res:
1408 self._foo_node.get_child_at_path(u"subdir/newdir"))
1409 d.addCallback(self.failUnlessNodeKeysAre, [])
1412 def test_DELETE_DIRURL(self):
1413 d = self.DELETE(self.public_url + "/foo")
1414 d.addCallback(lambda res:
1415 self.failIfNodeHasChild(self.public_root, u"foo"))
1418 def test_DELETE_DIRURL_missing(self):
1419 d = self.DELETE(self.public_url + "/foo/missing")
1420 d.addBoth(self.should404, "test_DELETE_DIRURL_missing")
1421 d.addCallback(lambda res:
1422 self.failUnlessNodeHasChild(self.public_root, u"foo"))
1425 def test_DELETE_DIRURL_missing2(self):
1426 d = self.DELETE(self.public_url + "/missing")
1427 d.addBoth(self.should404, "test_DELETE_DIRURL_missing2")
1430 def dump_root(self):
1432 w = webish.DirnodeWalkerMixin()
1433 def visitor(childpath, childnode, metadata):
1435 d = w.walk(self.public_root, visitor)
1438 def failUnlessNodeKeysAre(self, node, expected_keys):
1439 for k in expected_keys:
1440 assert isinstance(k, unicode)
1442 def _check(children):
1443 self.failUnlessReallyEqual(sorted(children.keys()), sorted(expected_keys))
1444 d.addCallback(_check)
1446 def failUnlessNodeHasChild(self, node, name):
1447 assert isinstance(name, unicode)
1449 def _check(children):
1450 self.failUnless(name in children)
1451 d.addCallback(_check)
1453 def failIfNodeHasChild(self, node, name):
1454 assert isinstance(name, unicode)
1456 def _check(children):
1457 self.failIf(name in children)
1458 d.addCallback(_check)
1461 def failUnlessChildContentsAre(self, node, name, expected_contents):
1462 assert isinstance(name, unicode)
1463 d = node.get_child_at_path(name)
1464 d.addCallback(lambda node: download_to_data(node))
1465 def _check(contents):
1466 self.failUnlessReallyEqual(contents, expected_contents)
1467 d.addCallback(_check)
1470 def failUnlessMutableChildContentsAre(self, node, name, expected_contents):
1471 assert isinstance(name, unicode)
1472 d = node.get_child_at_path(name)
1473 d.addCallback(lambda node: node.download_best_version())
1474 def _check(contents):
1475 self.failUnlessReallyEqual(contents, expected_contents)
1476 d.addCallback(_check)
1479 def failUnlessRWChildURIIs(self, node, name, expected_uri):
1480 assert isinstance(name, unicode)
1481 d = node.get_child_at_path(name)
1483 self.failUnless(child.is_unknown() or not child.is_readonly())
1484 self.failUnlessReallyEqual(child.get_uri(), expected_uri.strip())
1485 self.failUnlessReallyEqual(child.get_write_uri(), expected_uri.strip())
1486 expected_ro_uri = self._make_readonly(expected_uri)
1488 self.failUnlessReallyEqual(child.get_readonly_uri(), expected_ro_uri.strip())
1489 d.addCallback(_check)
1492 def failUnlessROChildURIIs(self, node, name, expected_uri):
1493 assert isinstance(name, unicode)
1494 d = node.get_child_at_path(name)
1496 self.failUnless(child.is_unknown() or child.is_readonly())
1497 self.failUnlessReallyEqual(child.get_write_uri(), None)
1498 self.failUnlessReallyEqual(child.get_uri(), expected_uri.strip())
1499 self.failUnlessReallyEqual(child.get_readonly_uri(), expected_uri.strip())
1500 d.addCallback(_check)
1503 def failUnlessURIMatchesRWChild(self, got_uri, node, name):
1504 assert isinstance(name, unicode)
1505 d = node.get_child_at_path(name)
1507 self.failUnless(child.is_unknown() or not child.is_readonly())
1508 self.failUnlessReallyEqual(child.get_uri(), got_uri.strip())
1509 self.failUnlessReallyEqual(child.get_write_uri(), got_uri.strip())
1510 expected_ro_uri = self._make_readonly(got_uri)
1512 self.failUnlessReallyEqual(child.get_readonly_uri(), expected_ro_uri.strip())
1513 d.addCallback(_check)
1516 def failUnlessURIMatchesROChild(self, got_uri, node, name):
1517 assert isinstance(name, unicode)
1518 d = node.get_child_at_path(name)
1520 self.failUnless(child.is_unknown() or child.is_readonly())
1521 self.failUnlessReallyEqual(child.get_write_uri(), None)
1522 self.failUnlessReallyEqual(got_uri.strip(), child.get_uri())
1523 self.failUnlessReallyEqual(got_uri.strip(), child.get_readonly_uri())
1524 d.addCallback(_check)
1527 def failUnlessCHKURIHasContents(self, got_uri, contents):
1528 self.failUnless(FakeCHKFileNode.all_contents[got_uri] == contents)
1530 def test_POST_upload(self):
1531 d = self.POST(self.public_url + "/foo", t="upload",
1532 file=("new.txt", self.NEWFILE_CONTENTS))
1534 d.addCallback(self.failUnlessURIMatchesROChild, fn, u"new.txt")
1535 d.addCallback(lambda res:
1536 self.failUnlessChildContentsAre(fn, u"new.txt",
1537 self.NEWFILE_CONTENTS))
1540 def test_POST_upload_unicode(self):
1541 filename = u"n\u00e9wer.txt" # n e-acute w e r . t x t
1542 d = self.POST(self.public_url + "/foo", t="upload",
1543 file=(filename, self.NEWFILE_CONTENTS))
1545 d.addCallback(self.failUnlessURIMatchesROChild, fn, filename)
1546 d.addCallback(lambda res:
1547 self.failUnlessChildContentsAre(fn, filename,
1548 self.NEWFILE_CONTENTS))
1549 target_url = self.public_url + "/foo/" + filename.encode("utf-8")
1550 d.addCallback(lambda res: self.GET(target_url))
1551 d.addCallback(lambda contents: self.failUnlessReallyEqual(contents,
1552 self.NEWFILE_CONTENTS,
1556 def test_POST_upload_unicode_named(self):
1557 filename = u"n\u00e9wer.txt" # n e-acute w e r . t x t
1558 d = self.POST(self.public_url + "/foo", t="upload",
1560 file=("overridden", self.NEWFILE_CONTENTS))
1562 d.addCallback(self.failUnlessURIMatchesROChild, fn, filename)
1563 d.addCallback(lambda res:
1564 self.failUnlessChildContentsAre(fn, filename,
1565 self.NEWFILE_CONTENTS))
1566 target_url = self.public_url + "/foo/" + filename.encode("utf-8")
1567 d.addCallback(lambda res: self.GET(target_url))
1568 d.addCallback(lambda contents: self.failUnlessReallyEqual(contents,
1569 self.NEWFILE_CONTENTS,
1573 def test_POST_upload_no_link(self):
1574 d = self.POST("/uri", t="upload",
1575 file=("new.txt", self.NEWFILE_CONTENTS))
1576 def _check_upload_results(page):
1577 # this should be a page which describes the results of the upload
1578 # that just finished.
1579 self.failUnless("Upload Results:" in page)
1580 self.failUnless("URI:" in page)
1581 uri_re = re.compile("URI: <tt><span>(.*)</span>")
1582 mo = uri_re.search(page)
1583 self.failUnless(mo, page)
1584 new_uri = mo.group(1)
1586 d.addCallback(_check_upload_results)
1587 d.addCallback(self.failUnlessCHKURIHasContents, self.NEWFILE_CONTENTS)
1590 def test_POST_upload_no_link_whendone(self):
1591 d = self.POST("/uri", t="upload", when_done="/",
1592 file=("new.txt", self.NEWFILE_CONTENTS))
1593 d.addBoth(self.shouldRedirect, "/")
1596 def shouldRedirect2(self, which, checker, callable, *args, **kwargs):
1597 d = defer.maybeDeferred(callable, *args, **kwargs)
1599 if isinstance(res, failure.Failure):
1600 res.trap(error.PageRedirect)
1601 statuscode = res.value.status
1602 target = res.value.location
1603 return checker(statuscode, target)
1604 self.fail("%s: callable was supposed to redirect, not return '%s'"
1609 def test_POST_upload_no_link_whendone_results(self):
1610 def check(statuscode, target):
1611 self.failUnlessReallyEqual(statuscode, str(http.FOUND))
1612 self.failUnless(target.startswith(self.webish_url), target)
1613 return client.getPage(target, method="GET")
1614 d = self.shouldRedirect2("test_POST_upload_no_link_whendone_results",
1616 self.POST, "/uri", t="upload",
1617 when_done="/uri/%(uri)s",
1618 file=("new.txt", self.NEWFILE_CONTENTS))
1619 d.addCallback(lambda res:
1620 self.failUnlessReallyEqual(res, self.NEWFILE_CONTENTS))
1623 def test_POST_upload_no_link_mutable(self):
1624 d = self.POST("/uri", t="upload", mutable="true",
1625 file=("new.txt", self.NEWFILE_CONTENTS))
1626 def _check(filecap):
1627 filecap = filecap.strip()
1628 self.failUnless(filecap.startswith("URI:SSK:"), filecap)
1629 self.filecap = filecap
1630 u = uri.WriteableSSKFileURI.init_from_string(filecap)
1631 self.failUnless(u.get_storage_index() in FakeMutableFileNode.all_contents)
1632 n = self.s.create_node_from_uri(filecap)
1633 return n.download_best_version()
1634 d.addCallback(_check)
1636 self.failUnlessReallyEqual(data, self.NEWFILE_CONTENTS)
1637 return self.GET("/uri/%s" % urllib.quote(self.filecap))
1638 d.addCallback(_check2)
1640 self.failUnlessReallyEqual(data, self.NEWFILE_CONTENTS)
1641 return self.GET("/file/%s" % urllib.quote(self.filecap))
1642 d.addCallback(_check3)
1644 self.failUnlessReallyEqual(data, self.NEWFILE_CONTENTS)
1645 d.addCallback(_check4)
1648 def test_POST_upload_no_link_mutable_toobig(self):
1649 d = self.shouldFail2(error.Error,
1650 "test_POST_upload_no_link_mutable_toobig",
1651 "413 Request Entity Too Large",
1652 "SDMF is limited to one segment, and 10001 > 10000",
1654 "/uri", t="upload", mutable="true",
1656 "b" * (self.s.MUTABLE_SIZELIMIT+1)) )
1659 def test_POST_upload_mutable(self):
1660 # this creates a mutable file
1661 d = self.POST(self.public_url + "/foo", t="upload", mutable="true",
1662 file=("new.txt", self.NEWFILE_CONTENTS))
1664 d.addCallback(self.failUnlessURIMatchesRWChild, fn, u"new.txt")
1665 d.addCallback(lambda res:
1666 self.failUnlessMutableChildContentsAre(fn, u"new.txt",
1667 self.NEWFILE_CONTENTS))
1668 d.addCallback(lambda res: self._foo_node.get(u"new.txt"))
1670 self.failUnless(IMutableFileNode.providedBy(newnode))
1671 self.failUnless(newnode.is_mutable())
1672 self.failIf(newnode.is_readonly())
1673 self._mutable_node = newnode
1674 self._mutable_uri = newnode.get_uri()
1677 # now upload it again and make sure that the URI doesn't change
1678 NEWER_CONTENTS = self.NEWFILE_CONTENTS + "newer\n"
1679 d.addCallback(lambda res:
1680 self.POST(self.public_url + "/foo", t="upload",
1682 file=("new.txt", NEWER_CONTENTS)))
1683 d.addCallback(self.failUnlessURIMatchesRWChild, fn, u"new.txt")
1684 d.addCallback(lambda res:
1685 self.failUnlessMutableChildContentsAre(fn, u"new.txt",
1687 d.addCallback(lambda res: self._foo_node.get(u"new.txt"))
1689 self.failUnless(IMutableFileNode.providedBy(newnode))
1690 self.failUnless(newnode.is_mutable())
1691 self.failIf(newnode.is_readonly())
1692 self.failUnlessReallyEqual(self._mutable_uri, newnode.get_uri())
1693 d.addCallback(_got2)
1695 # upload a second time, using PUT instead of POST
1696 NEW2_CONTENTS = NEWER_CONTENTS + "overwrite with PUT\n"
1697 d.addCallback(lambda res:
1698 self.PUT(self.public_url + "/foo/new.txt", NEW2_CONTENTS))
1699 d.addCallback(self.failUnlessURIMatchesRWChild, fn, u"new.txt")
1700 d.addCallback(lambda res:
1701 self.failUnlessMutableChildContentsAre(fn, u"new.txt",
1704 # finally list the directory, since mutable files are displayed
1705 # slightly differently
1707 d.addCallback(lambda res:
1708 self.GET(self.public_url + "/foo/",
1709 followRedirect=True))
1710 def _check_page(res):
1711 # TODO: assert more about the contents
1712 self.failUnless("SSK" in res)
1714 d.addCallback(_check_page)
1716 d.addCallback(lambda res: self._foo_node.get(u"new.txt"))
1718 self.failUnless(IMutableFileNode.providedBy(newnode))
1719 self.failUnless(newnode.is_mutable())
1720 self.failIf(newnode.is_readonly())
1721 self.failUnlessReallyEqual(self._mutable_uri, newnode.get_uri())
1722 d.addCallback(_got3)
1724 # look at the JSON form of the enclosing directory
1725 d.addCallback(lambda res:
1726 self.GET(self.public_url + "/foo/?t=json",
1727 followRedirect=True))
1728 def _check_page_json(res):
1729 parsed = simplejson.loads(res)
1730 self.failUnlessEqual(parsed[0], "dirnode")
1731 children = dict( [(unicode(name),value)
1733 in parsed[1]["children"].iteritems()] )
1734 self.failUnless(u"new.txt" in children)
1735 new_json = children[u"new.txt"]
1736 self.failUnlessEqual(new_json[0], "filenode")
1737 self.failUnless(new_json[1]["mutable"])
1738 self.failUnlessReallyEqual(to_str(new_json[1]["rw_uri"]), self._mutable_uri)
1739 ro_uri = self._mutable_node.get_readonly().to_string()
1740 self.failUnlessReallyEqual(to_str(new_json[1]["ro_uri"]), ro_uri)
1741 d.addCallback(_check_page_json)
1743 # and the JSON form of the file
1744 d.addCallback(lambda res:
1745 self.GET(self.public_url + "/foo/new.txt?t=json"))
1746 def _check_file_json(res):
1747 parsed = simplejson.loads(res)
1748 self.failUnlessEqual(parsed[0], "filenode")
1749 self.failUnless(parsed[1]["mutable"])
1750 self.failUnlessReallyEqual(to_str(parsed[1]["rw_uri"]), self._mutable_uri)
1751 ro_uri = self._mutable_node.get_readonly().to_string()
1752 self.failUnlessReallyEqual(to_str(parsed[1]["ro_uri"]), ro_uri)
1753 d.addCallback(_check_file_json)
1755 # and look at t=uri and t=readonly-uri
1756 d.addCallback(lambda res:
1757 self.GET(self.public_url + "/foo/new.txt?t=uri"))
1758 d.addCallback(lambda res: self.failUnlessReallyEqual(res, self._mutable_uri))
1759 d.addCallback(lambda res:
1760 self.GET(self.public_url + "/foo/new.txt?t=readonly-uri"))
1761 def _check_ro_uri(res):
1762 ro_uri = self._mutable_node.get_readonly().to_string()
1763 self.failUnlessReallyEqual(res, ro_uri)
1764 d.addCallback(_check_ro_uri)
1766 # make sure we can get to it from /uri/URI
1767 d.addCallback(lambda res:
1768 self.GET("/uri/%s" % urllib.quote(self._mutable_uri)))
1769 d.addCallback(lambda res:
1770 self.failUnlessReallyEqual(res, NEW2_CONTENTS))
1772 # and that HEAD computes the size correctly
1773 d.addCallback(lambda res:
1774 self.HEAD(self.public_url + "/foo/new.txt",
1775 return_response=True))
1776 def _got_headers((res, status, headers)):
1777 self.failUnlessReallyEqual(res, "")
1778 self.failUnlessReallyEqual(headers["content-length"][0],
1779 str(len(NEW2_CONTENTS)))
1780 self.failUnlessReallyEqual(headers["content-type"], ["text/plain"])
1781 d.addCallback(_got_headers)
1783 # make sure that size errors are displayed correctly for overwrite
1784 d.addCallback(lambda res:
1785 self.shouldFail2(error.Error,
1786 "test_POST_upload_mutable-toobig",
1787 "413 Request Entity Too Large",
1788 "SDMF is limited to one segment, and 10001 > 10000",
1790 self.public_url + "/foo", t="upload",
1793 "b" * (self.s.MUTABLE_SIZELIMIT+1)),
1796 d.addErrback(self.dump_error)
1799 def test_POST_upload_mutable_toobig(self):
1800 d = self.shouldFail2(error.Error,
1801 "test_POST_upload_mutable_toobig",
1802 "413 Request Entity Too Large",
1803 "SDMF is limited to one segment, and 10001 > 10000",
1805 self.public_url + "/foo",
1806 t="upload", mutable="true",
1808 "b" * (self.s.MUTABLE_SIZELIMIT+1)) )
1811 def dump_error(self, f):
1812 # if the web server returns an error code (like 400 Bad Request),
1813 # web.client.getPage puts the HTTP response body into the .response
1814 # attribute of the exception object that it gives back. It does not
1815 # appear in the Failure's repr(), so the ERROR that trial displays
1816 # will be rather terse and unhelpful. addErrback this method to the
1817 # end of your chain to get more information out of these errors.
1818 if f.check(error.Error):
1819 print "web.error.Error:"
1821 print f.value.response
1824 def test_POST_upload_replace(self):
1825 d = self.POST(self.public_url + "/foo", t="upload",
1826 file=("bar.txt", self.NEWFILE_CONTENTS))
1828 d.addCallback(self.failUnlessURIMatchesROChild, fn, u"bar.txt")
1829 d.addCallback(lambda res:
1830 self.failUnlessChildContentsAre(fn, u"bar.txt",
1831 self.NEWFILE_CONTENTS))
1834 def test_POST_upload_no_replace_ok(self):
1835 d = self.POST(self.public_url + "/foo?replace=false", t="upload",
1836 file=("new.txt", self.NEWFILE_CONTENTS))
1837 d.addCallback(lambda res: self.GET(self.public_url + "/foo/new.txt"))
1838 d.addCallback(lambda res: self.failUnlessReallyEqual(res,
1839 self.NEWFILE_CONTENTS))
1842 def test_POST_upload_no_replace_queryarg(self):
1843 d = self.POST(self.public_url + "/foo?replace=false", t="upload",
1844 file=("bar.txt", self.NEWFILE_CONTENTS))
1845 d.addBoth(self.shouldFail, error.Error,
1846 "POST_upload_no_replace_queryarg",
1848 "There was already a child by that name, and you asked me "
1849 "to not replace it")
1850 d.addCallback(lambda res: self.GET(self.public_url + "/foo/bar.txt"))
1851 d.addCallback(self.failUnlessIsBarDotTxt)
1854 def test_POST_upload_no_replace_field(self):
1855 d = self.POST(self.public_url + "/foo", t="upload", replace="false",
1856 file=("bar.txt", self.NEWFILE_CONTENTS))
1857 d.addBoth(self.shouldFail, error.Error, "POST_upload_no_replace_field",
1859 "There was already a child by that name, and you asked me "
1860 "to not replace it")
1861 d.addCallback(lambda res: self.GET(self.public_url + "/foo/bar.txt"))
1862 d.addCallback(self.failUnlessIsBarDotTxt)
1865 def test_POST_upload_whendone(self):
1866 d = self.POST(self.public_url + "/foo", t="upload", when_done="/THERE",
1867 file=("new.txt", self.NEWFILE_CONTENTS))
1868 d.addBoth(self.shouldRedirect, "/THERE")
1870 d.addCallback(lambda res:
1871 self.failUnlessChildContentsAre(fn, u"new.txt",
1872 self.NEWFILE_CONTENTS))
1875 def test_POST_upload_named(self):
1877 d = self.POST(self.public_url + "/foo", t="upload",
1878 name="new.txt", file=self.NEWFILE_CONTENTS)
1879 d.addCallback(self.failUnlessURIMatchesROChild, fn, u"new.txt")
1880 d.addCallback(lambda res:
1881 self.failUnlessChildContentsAre(fn, u"new.txt",
1882 self.NEWFILE_CONTENTS))
1885 def test_POST_upload_named_badfilename(self):
1886 d = self.POST(self.public_url + "/foo", t="upload",
1887 name="slashes/are/bad.txt", file=self.NEWFILE_CONTENTS)
1888 d.addBoth(self.shouldFail, error.Error,
1889 "test_POST_upload_named_badfilename",
1891 "name= may not contain a slash",
1893 # make sure that nothing was added
1894 d.addCallback(lambda res:
1895 self.failUnlessNodeKeysAre(self._foo_node,
1896 [u"bar.txt", u"blockingfile",
1897 u"empty", u"n\u00fc.txt",
1901 def test_POST_FILEURL_check(self):
1902 bar_url = self.public_url + "/foo/bar.txt"
1903 d = self.POST(bar_url, t="check")
1905 self.failUnless("Healthy :" in res)
1906 d.addCallback(_check)
1907 redir_url = "http://allmydata.org/TARGET"
1908 def _check2(statuscode, target):
1909 self.failUnlessReallyEqual(statuscode, str(http.FOUND))
1910 self.failUnlessReallyEqual(target, redir_url)
1911 d.addCallback(lambda res:
1912 self.shouldRedirect2("test_POST_FILEURL_check",
1916 when_done=redir_url))
1917 d.addCallback(lambda res:
1918 self.POST(bar_url, t="check", return_to=redir_url))
1920 self.failUnless("Healthy :" in res)
1921 self.failUnless("Return to file" in res)
1922 self.failUnless(redir_url in res)
1923 d.addCallback(_check3)
1925 d.addCallback(lambda res:
1926 self.POST(bar_url, t="check", output="JSON"))
1927 def _check_json(res):
1928 data = simplejson.loads(res)
1929 self.failUnless("storage-index" in data)
1930 self.failUnless(data["results"]["healthy"])
1931 d.addCallback(_check_json)
1935 def test_POST_FILEURL_check_and_repair(self):
1936 bar_url = self.public_url + "/foo/bar.txt"
1937 d = self.POST(bar_url, t="check", repair="true")
1939 self.failUnless("Healthy :" in res)
1940 d.addCallback(_check)
1941 redir_url = "http://allmydata.org/TARGET"
1942 def _check2(statuscode, target):
1943 self.failUnlessReallyEqual(statuscode, str(http.FOUND))
1944 self.failUnlessReallyEqual(target, redir_url)
1945 d.addCallback(lambda res:
1946 self.shouldRedirect2("test_POST_FILEURL_check_and_repair",
1949 t="check", repair="true",
1950 when_done=redir_url))
1951 d.addCallback(lambda res:
1952 self.POST(bar_url, t="check", return_to=redir_url))
1954 self.failUnless("Healthy :" in res)
1955 self.failUnless("Return to file" in res)
1956 self.failUnless(redir_url in res)
1957 d.addCallback(_check3)
1960 def test_POST_DIRURL_check(self):
1961 foo_url = self.public_url + "/foo/"
1962 d = self.POST(foo_url, t="check")
1964 self.failUnless("Healthy :" in res, res)
1965 d.addCallback(_check)
1966 redir_url = "http://allmydata.org/TARGET"
1967 def _check2(statuscode, target):
1968 self.failUnlessReallyEqual(statuscode, str(http.FOUND))
1969 self.failUnlessReallyEqual(target, redir_url)
1970 d.addCallback(lambda res:
1971 self.shouldRedirect2("test_POST_DIRURL_check",
1975 when_done=redir_url))
1976 d.addCallback(lambda res:
1977 self.POST(foo_url, t="check", return_to=redir_url))
1979 self.failUnless("Healthy :" in res, res)
1980 self.failUnless("Return to file/directory" in res)
1981 self.failUnless(redir_url in res)
1982 d.addCallback(_check3)
1984 d.addCallback(lambda res:
1985 self.POST(foo_url, t="check", output="JSON"))
1986 def _check_json(res):
1987 data = simplejson.loads(res)
1988 self.failUnless("storage-index" in data)
1989 self.failUnless(data["results"]["healthy"])
1990 d.addCallback(_check_json)
1994 def test_POST_DIRURL_check_and_repair(self):
1995 foo_url = self.public_url + "/foo/"
1996 d = self.POST(foo_url, t="check", repair="true")
1998 self.failUnless("Healthy :" in res, res)
1999 d.addCallback(_check)
2000 redir_url = "http://allmydata.org/TARGET"
2001 def _check2(statuscode, target):
2002 self.failUnlessReallyEqual(statuscode, str(http.FOUND))
2003 self.failUnlessReallyEqual(target, redir_url)
2004 d.addCallback(lambda res:
2005 self.shouldRedirect2("test_POST_DIRURL_check_and_repair",
2008 t="check", repair="true",
2009 when_done=redir_url))
2010 d.addCallback(lambda res:
2011 self.POST(foo_url, t="check", return_to=redir_url))
2013 self.failUnless("Healthy :" in res)
2014 self.failUnless("Return to file/directory" in res)
2015 self.failUnless(redir_url in res)
2016 d.addCallback(_check3)
2019 def wait_for_operation(self, ignored, ophandle):
2020 url = "/operations/" + ophandle
2021 url += "?t=status&output=JSON"
2024 data = simplejson.loads(res)
2025 if not data["finished"]:
2026 d = self.stall(delay=1.0)
2027 d.addCallback(self.wait_for_operation, ophandle)
2033 def get_operation_results(self, ignored, ophandle, output=None):
2034 url = "/operations/" + ophandle
2037 url += "&output=" + output
2040 if output and output.lower() == "json":
2041 return simplejson.loads(res)
2046 def test_POST_DIRURL_deepcheck_no_ophandle(self):
2047 d = self.shouldFail2(error.Error,
2048 "test_POST_DIRURL_deepcheck_no_ophandle",
2050 "slow operation requires ophandle=",
2051 self.POST, self.public_url, t="start-deep-check")
2054 def test_POST_DIRURL_deepcheck(self):
2055 def _check_redirect(statuscode, target):
2056 self.failUnlessReallyEqual(statuscode, str(http.FOUND))
2057 self.failUnless(target.endswith("/operations/123"))
2058 d = self.shouldRedirect2("test_POST_DIRURL_deepcheck", _check_redirect,
2059 self.POST, self.public_url,
2060 t="start-deep-check", ophandle="123")
2061 d.addCallback(self.wait_for_operation, "123")
2062 def _check_json(data):
2063 self.failUnlessReallyEqual(data["finished"], True)
2064 self.failUnlessReallyEqual(data["count-objects-checked"], 8)
2065 self.failUnlessReallyEqual(data["count-objects-healthy"], 8)
2066 d.addCallback(_check_json)
2067 d.addCallback(self.get_operation_results, "123", "html")
2068 def _check_html(res):
2069 self.failUnless("Objects Checked: <span>8</span>" in res)
2070 self.failUnless("Objects Healthy: <span>8</span>" in res)
2071 d.addCallback(_check_html)
2073 d.addCallback(lambda res:
2074 self.GET("/operations/123/"))
2075 d.addCallback(_check_html) # should be the same as without the slash
2077 d.addCallback(lambda res:
2078 self.shouldFail2(error.Error, "one", "404 Not Found",
2079 "No detailed results for SI bogus",
2080 self.GET, "/operations/123/bogus"))
2082 foo_si = self._foo_node.get_storage_index()
2083 foo_si_s = base32.b2a(foo_si)
2084 d.addCallback(lambda res:
2085 self.GET("/operations/123/%s?output=JSON" % foo_si_s))
2086 def _check_foo_json(res):
2087 data = simplejson.loads(res)
2088 self.failUnlessEqual(data["storage-index"], foo_si_s)
2089 self.failUnless(data["results"]["healthy"])
2090 d.addCallback(_check_foo_json)
2093 def test_POST_DIRURL_deepcheck_and_repair(self):
2094 d = self.POST(self.public_url, t="start-deep-check", repair="true",
2095 ophandle="124", output="json", followRedirect=True)
2096 d.addCallback(self.wait_for_operation, "124")
2097 def _check_json(data):
2098 self.failUnlessReallyEqual(data["finished"], True)
2099 self.failUnlessReallyEqual(data["count-objects-checked"], 8)
2100 self.failUnlessReallyEqual(data["count-objects-healthy-pre-repair"], 8)
2101 self.failUnlessReallyEqual(data["count-objects-unhealthy-pre-repair"], 0)
2102 self.failUnlessReallyEqual(data["count-corrupt-shares-pre-repair"], 0)
2103 self.failUnlessReallyEqual(data["count-repairs-attempted"], 0)
2104 self.failUnlessReallyEqual(data["count-repairs-successful"], 0)
2105 self.failUnlessReallyEqual(data["count-repairs-unsuccessful"], 0)
2106 self.failUnlessReallyEqual(data["count-objects-healthy-post-repair"], 8)
2107 self.failUnlessReallyEqual(data["count-objects-unhealthy-post-repair"], 0)
2108 self.failUnlessReallyEqual(data["count-corrupt-shares-post-repair"], 0)
2109 d.addCallback(_check_json)
2110 d.addCallback(self.get_operation_results, "124", "html")
2111 def _check_html(res):
2112 self.failUnless("Objects Checked: <span>8</span>" in res)
2114 self.failUnless("Objects Healthy (before repair): <span>8</span>" in res)
2115 self.failUnless("Objects Unhealthy (before repair): <span>0</span>" in res)
2116 self.failUnless("Corrupt Shares (before repair): <span>0</span>" in res)
2118 self.failUnless("Repairs Attempted: <span>0</span>" in res)
2119 self.failUnless("Repairs Successful: <span>0</span>" in res)
2120 self.failUnless("Repairs Unsuccessful: <span>0</span>" in res)
2122 self.failUnless("Objects Healthy (after repair): <span>8</span>" in res)
2123 self.failUnless("Objects Unhealthy (after repair): <span>0</span>" in res)
2124 self.failUnless("Corrupt Shares (after repair): <span>0</span>" in res)
2125 d.addCallback(_check_html)
2128 def test_POST_FILEURL_bad_t(self):
2129 d = self.shouldFail2(error.Error, "POST_bad_t", "400 Bad Request",
2130 "POST to file: bad t=bogus",
2131 self.POST, self.public_url + "/foo/bar.txt",
2135 def test_POST_mkdir(self): # return value?
2136 d = self.POST(self.public_url + "/foo", t="mkdir", name="newdir")
2137 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
2138 d.addCallback(self.failUnlessNodeKeysAre, [])
2141 def test_POST_mkdir_initial_children(self):
2142 (newkids, caps) = self._create_initial_children()
2143 d = self.POST2(self.public_url +
2144 "/foo?t=mkdir-with-children&name=newdir",
2145 simplejson.dumps(newkids))
2146 d.addCallback(lambda res:
2147 self.failUnlessNodeHasChild(self._foo_node, u"newdir"))
2148 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
2149 d.addCallback(self.failUnlessNodeKeysAre, newkids.keys())
2150 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
2151 d.addCallback(self.failUnlessROChildURIIs, u"child-imm", caps['filecap1'])
2154 def test_POST_mkdir_immutable(self):
2155 (newkids, caps) = self._create_immutable_children()
2156 d = self.POST2(self.public_url +
2157 "/foo?t=mkdir-immutable&name=newdir",
2158 simplejson.dumps(newkids))
2159 d.addCallback(lambda res:
2160 self.failUnlessNodeHasChild(self._foo_node, u"newdir"))
2161 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
2162 d.addCallback(self.failUnlessNodeKeysAre, newkids.keys())
2163 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
2164 d.addCallback(self.failUnlessROChildURIIs, u"child-imm", caps['filecap1'])
2165 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
2166 d.addCallback(self.failUnlessROChildURIIs, u"unknownchild-imm", caps['unknown_immcap'])
2167 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
2168 d.addCallback(self.failUnlessROChildURIIs, u"dirchild-imm", caps['immdircap'])
2169 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
2170 d.addCallback(self.failUnlessROChildURIIs, u"dirchild-lit", caps['litdircap'])
2171 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
2172 d.addCallback(self.failUnlessROChildURIIs, u"dirchild-empty", caps['emptydircap'])
2175 def test_POST_mkdir_immutable_bad(self):
2176 (newkids, caps) = self._create_initial_children()
2177 d = self.shouldFail2(error.Error, "test_POST_mkdir_immutable_bad",
2179 "needed to be immutable but was not",
2182 "/foo?t=mkdir-immutable&name=newdir",
2183 simplejson.dumps(newkids))
2186 def test_POST_mkdir_2(self):
2187 d = self.POST(self.public_url + "/foo/newdir?t=mkdir", "")
2188 d.addCallback(lambda res:
2189 self.failUnlessNodeHasChild(self._foo_node, u"newdir"))
2190 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
2191 d.addCallback(self.failUnlessNodeKeysAre, [])
2194 def test_POST_mkdirs_2(self):
2195 d = self.POST(self.public_url + "/foo/bardir/newdir?t=mkdir", "")
2196 d.addCallback(lambda res:
2197 self.failUnlessNodeHasChild(self._foo_node, u"bardir"))
2198 d.addCallback(lambda res: self._foo_node.get(u"bardir"))
2199 d.addCallback(lambda bardirnode: bardirnode.get(u"newdir"))
2200 d.addCallback(self.failUnlessNodeKeysAre, [])
2203 def test_POST_mkdir_no_parentdir_noredirect(self):
2204 d = self.POST("/uri?t=mkdir")
2205 def _after_mkdir(res):
2206 uri.DirectoryURI.init_from_string(res)
2207 d.addCallback(_after_mkdir)
2210 def test_POST_mkdir_no_parentdir_noredirect2(self):
2211 # make sure form-based arguments (as on the welcome page) still work
2212 d = self.POST("/uri", t="mkdir")
2213 def _after_mkdir(res):
2214 uri.DirectoryURI.init_from_string(res)
2215 d.addCallback(_after_mkdir)
2216 d.addErrback(self.explain_web_error)
2219 def test_POST_mkdir_no_parentdir_redirect(self):
2220 d = self.POST("/uri?t=mkdir&redirect_to_result=true")
2221 d.addBoth(self.shouldRedirect, None, statuscode='303')
2222 def _check_target(target):
2223 target = urllib.unquote(target)
2224 self.failUnless(target.startswith("uri/URI:DIR2:"), target)
2225 d.addCallback(_check_target)
2228 def test_POST_mkdir_no_parentdir_redirect2(self):
2229 d = self.POST("/uri", t="mkdir", redirect_to_result="true")
2230 d.addBoth(self.shouldRedirect, None, statuscode='303')
2231 def _check_target(target):
2232 target = urllib.unquote(target)
2233 self.failUnless(target.startswith("uri/URI:DIR2:"), target)
2234 d.addCallback(_check_target)
2235 d.addErrback(self.explain_web_error)
2238 def _make_readonly(self, u):
2239 ro_uri = uri.from_string(u).get_readonly()
2242 return ro_uri.to_string()
2244 def _create_initial_children(self):
2245 contents, n, filecap1 = self.makefile(12)
2246 md1 = {"metakey1": "metavalue1"}
2247 filecap2 = make_mutable_file_uri()
2248 node3 = self.s.create_node_from_uri(make_mutable_file_uri())
2249 filecap3 = node3.get_readonly_uri()
2250 node4 = self.s.create_node_from_uri(make_mutable_file_uri())
2251 dircap = DirectoryNode(node4, None, None).get_uri()
2252 litdircap = "URI:DIR2-LIT:ge3dumj2mewdcotyfqydulbshj5x2lbm"
2253 emptydircap = "URI:DIR2-LIT:"
2254 newkids = {u"child-imm": ["filenode", {"rw_uri": filecap1,
2255 "ro_uri": self._make_readonly(filecap1),
2256 "metadata": md1, }],
2257 u"child-mutable": ["filenode", {"rw_uri": filecap2,
2258 "ro_uri": self._make_readonly(filecap2)}],
2259 u"child-mutable-ro": ["filenode", {"ro_uri": filecap3}],
2260 u"unknownchild-rw": ["unknown", {"rw_uri": unknown_rwcap,
2261 "ro_uri": unknown_rocap}],
2262 u"unknownchild-ro": ["unknown", {"ro_uri": unknown_rocap}],
2263 u"unknownchild-imm": ["unknown", {"ro_uri": unknown_immcap}],
2264 u"dirchild": ["dirnode", {"rw_uri": dircap,
2265 "ro_uri": self._make_readonly(dircap)}],
2266 u"dirchild-lit": ["dirnode", {"ro_uri": litdircap}],
2267 u"dirchild-empty": ["dirnode", {"ro_uri": emptydircap}],
2269 return newkids, {'filecap1': filecap1,
2270 'filecap2': filecap2,
2271 'filecap3': filecap3,
2272 'unknown_rwcap': unknown_rwcap,
2273 'unknown_rocap': unknown_rocap,
2274 'unknown_immcap': unknown_immcap,
2276 'litdircap': litdircap,
2277 'emptydircap': emptydircap}
2279 def _create_immutable_children(self):
2280 contents, n, filecap1 = self.makefile(12)
2281 md1 = {"metakey1": "metavalue1"}
2282 tnode = create_chk_filenode("immutable directory contents\n"*10)
2283 dnode = DirectoryNode(tnode, None, None)
2284 assert not dnode.is_mutable()
2285 immdircap = dnode.get_uri()
2286 litdircap = "URI:DIR2-LIT:ge3dumj2mewdcotyfqydulbshj5x2lbm"
2287 emptydircap = "URI:DIR2-LIT:"
2288 newkids = {u"child-imm": ["filenode", {"ro_uri": filecap1,
2289 "metadata": md1, }],
2290 u"unknownchild-imm": ["unknown", {"ro_uri": unknown_immcap}],
2291 u"dirchild-imm": ["dirnode", {"ro_uri": immdircap}],
2292 u"dirchild-lit": ["dirnode", {"ro_uri": litdircap}],
2293 u"dirchild-empty": ["dirnode", {"ro_uri": emptydircap}],
2295 return newkids, {'filecap1': filecap1,
2296 'unknown_immcap': unknown_immcap,
2297 'immdircap': immdircap,
2298 'litdircap': litdircap,
2299 'emptydircap': emptydircap}
2301 def test_POST_mkdir_no_parentdir_initial_children(self):
2302 (newkids, caps) = self._create_initial_children()
2303 d = self.POST2("/uri?t=mkdir-with-children", simplejson.dumps(newkids))
2304 def _after_mkdir(res):
2305 self.failUnless(res.startswith("URI:DIR"), res)
2306 n = self.s.create_node_from_uri(res)
2307 d2 = self.failUnlessNodeKeysAre(n, newkids.keys())
2308 d2.addCallback(lambda ign:
2309 self.failUnlessROChildURIIs(n, u"child-imm",
2311 d2.addCallback(lambda ign:
2312 self.failUnlessRWChildURIIs(n, u"child-mutable",
2314 d2.addCallback(lambda ign:
2315 self.failUnlessROChildURIIs(n, u"child-mutable-ro",
2317 d2.addCallback(lambda ign:
2318 self.failUnlessRWChildURIIs(n, u"unknownchild-rw",
2319 caps['unknown_rwcap']))
2320 d2.addCallback(lambda ign:
2321 self.failUnlessROChildURIIs(n, u"unknownchild-ro",
2322 caps['unknown_rocap']))
2323 d2.addCallback(lambda ign:
2324 self.failUnlessROChildURIIs(n, u"unknownchild-imm",
2325 caps['unknown_immcap']))
2326 d2.addCallback(lambda ign:
2327 self.failUnlessRWChildURIIs(n, u"dirchild",
2330 d.addCallback(_after_mkdir)
2333 def test_POST_mkdir_no_parentdir_unexpected_children(self):
2334 # the regular /uri?t=mkdir operation is specified to ignore its body.
2335 # Only t=mkdir-with-children pays attention to it.
2336 (newkids, caps) = self._create_initial_children()
2337 d = self.shouldHTTPError("POST t=mkdir unexpected children",
2339 "t=mkdir does not accept children=, "
2340 "try t=mkdir-with-children instead",
2341 self.POST2, "/uri?t=mkdir", # without children
2342 simplejson.dumps(newkids))
2345 def test_POST_noparent_bad(self):
2346 d = self.shouldHTTPError("POST /uri?t=bogus", 400, "Bad Request",
2347 "/uri accepts only PUT, PUT?t=mkdir, "
2348 "POST?t=upload, and POST?t=mkdir",
2349 self.POST, "/uri?t=bogus")
2352 def test_POST_mkdir_no_parentdir_immutable(self):
2353 (newkids, caps) = self._create_immutable_children()
2354 d = self.POST2("/uri?t=mkdir-immutable", simplejson.dumps(newkids))
2355 def _after_mkdir(res):
2356 self.failUnless(res.startswith("URI:DIR"), res)
2357 n = self.s.create_node_from_uri(res)
2358 d2 = self.failUnlessNodeKeysAre(n, newkids.keys())
2359 d2.addCallback(lambda ign:
2360 self.failUnlessROChildURIIs(n, u"child-imm",
2362 d2.addCallback(lambda ign:
2363 self.failUnlessROChildURIIs(n, u"unknownchild-imm",
2364 caps['unknown_immcap']))
2365 d2.addCallback(lambda ign:
2366 self.failUnlessROChildURIIs(n, u"dirchild-imm",
2368 d2.addCallback(lambda ign:
2369 self.failUnlessROChildURIIs(n, u"dirchild-lit",
2371 d2.addCallback(lambda ign:
2372 self.failUnlessROChildURIIs(n, u"dirchild-empty",
2373 caps['emptydircap']))
2375 d.addCallback(_after_mkdir)
2378 def test_POST_mkdir_no_parentdir_immutable_bad(self):
2379 (newkids, caps) = self._create_initial_children()
2380 d = self.shouldFail2(error.Error,
2381 "test_POST_mkdir_no_parentdir_immutable_bad",
2383 "needed to be immutable but was not",
2385 "/uri?t=mkdir-immutable",
2386 simplejson.dumps(newkids))
2389 def test_welcome_page_mkdir_button(self):
2390 # Fetch the welcome page.
2392 def _after_get_welcome_page(res):
2393 MKDIR_BUTTON_RE = re.compile(
2394 '<form action="([^"]*)" method="post".*?'
2395 '<input type="hidden" name="t" value="([^"]*)" />'
2396 '<input type="hidden" name="([^"]*)" value="([^"]*)" />'
2397 '<input type="submit" value="Create a directory" />',
2399 mo = MKDIR_BUTTON_RE.search(res)
2400 formaction = mo.group(1)
2402 formaname = mo.group(3)
2403 formavalue = mo.group(4)
2404 return (formaction, formt, formaname, formavalue)
2405 d.addCallback(_after_get_welcome_page)
2406 def _after_parse_form(res):
2407 (formaction, formt, formaname, formavalue) = res
2408 return self.POST("/%s?t=%s&%s=%s" % (formaction, formt, formaname, formavalue))
2409 d.addCallback(_after_parse_form)
2410 d.addBoth(self.shouldRedirect, None, statuscode='303')
2413 def test_POST_mkdir_replace(self): # return value?
2414 d = self.POST(self.public_url + "/foo", t="mkdir", name="sub")
2415 d.addCallback(lambda res: self._foo_node.get(u"sub"))
2416 d.addCallback(self.failUnlessNodeKeysAre, [])
2419 def test_POST_mkdir_no_replace_queryarg(self): # return value?
2420 d = self.POST(self.public_url + "/foo?replace=false", t="mkdir", name="sub")
2421 d.addBoth(self.shouldFail, error.Error,
2422 "POST_mkdir_no_replace_queryarg",
2424 "There was already a child by that name, and you asked me "
2425 "to not replace it")
2426 d.addCallback(lambda res: self._foo_node.get(u"sub"))
2427 d.addCallback(self.failUnlessNodeKeysAre, [u"baz.txt"])
2430 def test_POST_mkdir_no_replace_field(self): # return value?
2431 d = self.POST(self.public_url + "/foo", t="mkdir", name="sub",
2433 d.addBoth(self.shouldFail, error.Error, "POST_mkdir_no_replace_field",
2435 "There was already a child by that name, and you asked me "
2436 "to not replace it")
2437 d.addCallback(lambda res: self._foo_node.get(u"sub"))
2438 d.addCallback(self.failUnlessNodeKeysAre, [u"baz.txt"])
2441 def test_POST_mkdir_whendone_field(self):
2442 d = self.POST(self.public_url + "/foo",
2443 t="mkdir", name="newdir", when_done="/THERE")
2444 d.addBoth(self.shouldRedirect, "/THERE")
2445 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
2446 d.addCallback(self.failUnlessNodeKeysAre, [])
2449 def test_POST_mkdir_whendone_queryarg(self):
2450 d = self.POST(self.public_url + "/foo?when_done=/THERE",
2451 t="mkdir", name="newdir")
2452 d.addBoth(self.shouldRedirect, "/THERE")
2453 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
2454 d.addCallback(self.failUnlessNodeKeysAre, [])
2457 def test_POST_bad_t(self):
2458 d = self.shouldFail2(error.Error, "POST_bad_t", "400 Bad Request",
2459 "POST to a directory with bad t=BOGUS",
2460 self.POST, self.public_url + "/foo", t="BOGUS")
2463 def test_POST_set_children(self, command_name="set_children"):
2464 contents9, n9, newuri9 = self.makefile(9)
2465 contents10, n10, newuri10 = self.makefile(10)
2466 contents11, n11, newuri11 = self.makefile(11)
2469 "atomic_added_1": [ "filenode", { "rw_uri": "%s",
2472 "ctime": 1002777696.7564139,
2473 "mtime": 1002777696.7564139
2476 "atomic_added_2": [ "filenode", { "rw_uri": "%s",
2479 "ctime": 1002777696.7564139,
2480 "mtime": 1002777696.7564139
2483 "atomic_added_3": [ "filenode", { "rw_uri": "%s",
2486 "ctime": 1002777696.7564139,
2487 "mtime": 1002777696.7564139
2490 }""" % (newuri9, newuri10, newuri11)
2492 url = self.webish_url + self.public_url + "/foo" + "?t=" + command_name
2494 d = client.getPage(url, method="POST", postdata=reqbody)
2496 self.failUnlessURIMatchesROChild(newuri9, self._foo_node, u"atomic_added_1")
2497 self.failUnlessURIMatchesROChild(newuri10, self._foo_node, u"atomic_added_2")
2498 self.failUnlessURIMatchesROChild(newuri11, self._foo_node, u"atomic_added_3")
2500 d.addCallback(_then)
2501 d.addErrback(self.dump_error)
2504 def test_POST_set_children_with_hyphen(self):
2505 return self.test_POST_set_children(command_name="set-children")
2507 def test_POST_link_uri(self):
2508 contents, n, newuri = self.makefile(8)
2509 d = self.POST(self.public_url + "/foo", t="uri", name="new.txt", uri=newuri)
2510 d.addCallback(self.failUnlessURIMatchesROChild, self._foo_node, u"new.txt")
2511 d.addCallback(lambda res:
2512 self.failUnlessChildContentsAre(self._foo_node, u"new.txt",
2516 def test_POST_link_uri_replace(self):
2517 contents, n, newuri = self.makefile(8)
2518 d = self.POST(self.public_url + "/foo", t="uri", name="bar.txt", uri=newuri)
2519 d.addCallback(self.failUnlessURIMatchesROChild, self._foo_node, u"bar.txt")
2520 d.addCallback(lambda res:
2521 self.failUnlessChildContentsAre(self._foo_node, u"bar.txt",
2525 def test_POST_link_uri_unknown_bad(self):
2526 d = self.POST(self.public_url + "/foo", t="uri", name="future.txt", uri=unknown_rwcap)
2527 d.addBoth(self.shouldFail, error.Error,
2528 "POST_link_uri_unknown_bad",
2530 "unknown cap in a write slot")
2533 def test_POST_link_uri_unknown_ro_good(self):
2534 d = self.POST(self.public_url + "/foo", t="uri", name="future-ro.txt", uri=unknown_rocap)
2535 d.addCallback(self.failUnlessURIMatchesROChild, self._foo_node, u"future-ro.txt")
2538 def test_POST_link_uri_unknown_imm_good(self):
2539 d = self.POST(self.public_url + "/foo", t="uri", name="future-imm.txt", uri=unknown_immcap)
2540 d.addCallback(self.failUnlessURIMatchesROChild, self._foo_node, u"future-imm.txt")
2543 def test_POST_link_uri_no_replace_queryarg(self):
2544 contents, n, newuri = self.makefile(8)
2545 d = self.POST(self.public_url + "/foo?replace=false", t="uri",
2546 name="bar.txt", uri=newuri)
2547 d.addBoth(self.shouldFail, error.Error,
2548 "POST_link_uri_no_replace_queryarg",
2550 "There was already a child by that name, and you asked me "
2551 "to not replace it")
2552 d.addCallback(lambda res: self.GET(self.public_url + "/foo/bar.txt"))
2553 d.addCallback(self.failUnlessIsBarDotTxt)
2556 def test_POST_link_uri_no_replace_field(self):
2557 contents, n, newuri = self.makefile(8)
2558 d = self.POST(self.public_url + "/foo", t="uri", replace="false",
2559 name="bar.txt", uri=newuri)
2560 d.addBoth(self.shouldFail, error.Error,
2561 "POST_link_uri_no_replace_field",
2563 "There was already a child by that name, and you asked me "
2564 "to not replace it")
2565 d.addCallback(lambda res: self.GET(self.public_url + "/foo/bar.txt"))
2566 d.addCallback(self.failUnlessIsBarDotTxt)
2569 def test_POST_delete(self):
2570 d = self.POST(self.public_url + "/foo", t="delete", name="bar.txt")
2571 d.addCallback(lambda res: self._foo_node.list())
2572 def _check(children):
2573 self.failIf(u"bar.txt" in children)
2574 d.addCallback(_check)
2577 def test_POST_rename_file(self):
2578 d = self.POST(self.public_url + "/foo", t="rename",
2579 from_name="bar.txt", to_name='wibble.txt')
2580 d.addCallback(lambda res:
2581 self.failIfNodeHasChild(self._foo_node, u"bar.txt"))
2582 d.addCallback(lambda res:
2583 self.failUnlessNodeHasChild(self._foo_node, u"wibble.txt"))
2584 d.addCallback(lambda res: self.GET(self.public_url + "/foo/wibble.txt"))
2585 d.addCallback(self.failUnlessIsBarDotTxt)
2586 d.addCallback(lambda res: self.GET(self.public_url + "/foo/wibble.txt?t=json"))
2587 d.addCallback(self.failUnlessIsBarJSON)
2590 def test_POST_rename_file_redundant(self):
2591 d = self.POST(self.public_url + "/foo", t="rename",
2592 from_name="bar.txt", to_name='bar.txt')
2593 d.addCallback(lambda res:
2594 self.failUnlessNodeHasChild(self._foo_node, u"bar.txt"))
2595 d.addCallback(lambda res: self.GET(self.public_url + "/foo/bar.txt"))
2596 d.addCallback(self.failUnlessIsBarDotTxt)
2597 d.addCallback(lambda res: self.GET(self.public_url + "/foo/bar.txt?t=json"))
2598 d.addCallback(self.failUnlessIsBarJSON)
2601 def test_POST_rename_file_replace(self):
2602 # rename a file and replace a directory with it
2603 d = self.POST(self.public_url + "/foo", t="rename",
2604 from_name="bar.txt", to_name='empty')
2605 d.addCallback(lambda res:
2606 self.failIfNodeHasChild(self._foo_node, u"bar.txt"))
2607 d.addCallback(lambda res:
2608 self.failUnlessNodeHasChild(self._foo_node, u"empty"))
2609 d.addCallback(lambda res: self.GET(self.public_url + "/foo/empty"))
2610 d.addCallback(self.failUnlessIsBarDotTxt)
2611 d.addCallback(lambda res: self.GET(self.public_url + "/foo/empty?t=json"))
2612 d.addCallback(self.failUnlessIsBarJSON)
2615 def test_POST_rename_file_no_replace_queryarg(self):
2616 # rename a file and replace a directory with it
2617 d = self.POST(self.public_url + "/foo?replace=false", t="rename",
2618 from_name="bar.txt", to_name='empty')
2619 d.addBoth(self.shouldFail, error.Error,
2620 "POST_rename_file_no_replace_queryarg",
2622 "There was already a child by that name, and you asked me "
2623 "to not replace it")
2624 d.addCallback(lambda res: self.GET(self.public_url + "/foo/empty?t=json"))
2625 d.addCallback(self.failUnlessIsEmptyJSON)
2628 def test_POST_rename_file_no_replace_field(self):
2629 # rename a file and replace a directory with it
2630 d = self.POST(self.public_url + "/foo", t="rename", replace="false",
2631 from_name="bar.txt", to_name='empty')
2632 d.addBoth(self.shouldFail, error.Error,
2633 "POST_rename_file_no_replace_field",
2635 "There was already a child by that name, and you asked me "
2636 "to not replace it")
2637 d.addCallback(lambda res: self.GET(self.public_url + "/foo/empty?t=json"))
2638 d.addCallback(self.failUnlessIsEmptyJSON)
2641 def failUnlessIsEmptyJSON(self, res):
2642 data = simplejson.loads(res)
2643 self.failUnlessEqual(data[0], "dirnode", data)
2644 self.failUnlessReallyEqual(len(data[1]["children"]), 0)
2646 def test_POST_rename_file_slash_fail(self):
2647 d = self.POST(self.public_url + "/foo", t="rename",
2648 from_name="bar.txt", to_name='kirk/spock.txt')
2649 d.addBoth(self.shouldFail, error.Error,
2650 "test_POST_rename_file_slash_fail",
2652 "to_name= may not contain a slash",
2654 d.addCallback(lambda res:
2655 self.failUnlessNodeHasChild(self._foo_node, u"bar.txt"))
2658 def test_POST_rename_dir(self):
2659 d = self.POST(self.public_url, t="rename",
2660 from_name="foo", to_name='plunk')
2661 d.addCallback(lambda res:
2662 self.failIfNodeHasChild(self.public_root, u"foo"))
2663 d.addCallback(lambda res:
2664 self.failUnlessNodeHasChild(self.public_root, u"plunk"))
2665 d.addCallback(lambda res: self.GET(self.public_url + "/plunk?t=json"))
2666 d.addCallback(self.failUnlessIsFooJSON)
2669 def shouldRedirect(self, res, target=None, statuscode=None, which=""):
2670 """ If target is not None then the redirection has to go to target. If
2671 statuscode is not None then the redirection has to be accomplished with
2672 that HTTP status code."""
2673 if not isinstance(res, failure.Failure):
2674 to_where = (target is None) and "somewhere" or ("to " + target)
2675 self.fail("%s: we were expecting to get redirected %s, not get an"
2676 " actual page: %s" % (which, to_where, res))
2677 res.trap(error.PageRedirect)
2678 if statuscode is not None:
2679 self.failUnlessReallyEqual(res.value.status, statuscode,
2680 "%s: not a redirect" % which)
2681 if target is not None:
2682 # the PageRedirect does not seem to capture the uri= query arg
2683 # properly, so we can't check for it.
2684 realtarget = self.webish_url + target
2685 self.failUnlessReallyEqual(res.value.location, realtarget,
2686 "%s: wrong target" % which)
2687 return res.value.location
2689 def test_GET_URI_form(self):
2690 base = "/uri?uri=%s" % self._bar_txt_uri
2691 # this is supposed to give us a redirect to /uri/$URI, plus arguments
2692 targetbase = "/uri/%s" % urllib.quote(self._bar_txt_uri)
2694 d.addBoth(self.shouldRedirect, targetbase)
2695 d.addCallback(lambda res: self.GET(base+"&filename=bar.txt"))
2696 d.addBoth(self.shouldRedirect, targetbase+"?filename=bar.txt")
2697 d.addCallback(lambda res: self.GET(base+"&t=json"))
2698 d.addBoth(self.shouldRedirect, targetbase+"?t=json")
2699 d.addCallback(self.log, "about to get file by uri")
2700 d.addCallback(lambda res: self.GET(base, followRedirect=True))
2701 d.addCallback(self.failUnlessIsBarDotTxt)
2702 d.addCallback(self.log, "got file by uri, about to get dir by uri")
2703 d.addCallback(lambda res: self.GET("/uri?uri=%s&t=json" % self._foo_uri,
2704 followRedirect=True))
2705 d.addCallback(self.failUnlessIsFooJSON)
2706 d.addCallback(self.log, "got dir by uri")
2710 def test_GET_URI_form_bad(self):
2711 d = self.shouldFail2(error.Error, "test_GET_URI_form_bad",
2712 "400 Bad Request", "GET /uri requires uri=",
2716 def test_GET_rename_form(self):
2717 d = self.GET(self.public_url + "/foo?t=rename-form&name=bar.txt",
2718 followRedirect=True)
2720 self.failUnless('name="when_done" value="."' in res, res)
2721 self.failUnless(re.search(r'name="from_name" value="bar\.txt"', res))
2722 d.addCallback(_check)
2725 def log(self, res, msg):
2726 #print "MSG: %s RES: %s" % (msg, res)
2730 def test_GET_URI_URL(self):
2731 base = "/uri/%s" % self._bar_txt_uri
2733 d.addCallback(self.failUnlessIsBarDotTxt)
2734 d.addCallback(lambda res: self.GET(base+"?filename=bar.txt"))
2735 d.addCallback(self.failUnlessIsBarDotTxt)
2736 d.addCallback(lambda res: self.GET(base+"?filename=bar.txt&save=true"))
2737 d.addCallback(self.failUnlessIsBarDotTxt)
2740 def test_GET_URI_URL_dir(self):
2741 base = "/uri/%s?t=json" % self._foo_uri
2743 d.addCallback(self.failUnlessIsFooJSON)
2746 def test_GET_URI_URL_missing(self):
2747 base = "/uri/%s" % self._bad_file_uri
2748 d = self.shouldHTTPError("test_GET_URI_URL_missing",
2749 http.GONE, None, "NotEnoughSharesError",
2751 # TODO: how can we exercise both sides of WebDownloadTarget.fail
2752 # here? we must arrange for a download to fail after target.open()
2753 # has been called, and then inspect the response to see that it is
2754 # shorter than we expected.
2757 def test_PUT_DIRURL_uri(self):
2758 d = self.s.create_dirnode()
2760 new_uri = dn.get_uri()
2761 # replace /foo with a new (empty) directory
2762 d = self.PUT(self.public_url + "/foo?t=uri", new_uri)
2763 d.addCallback(lambda res:
2764 self.failUnlessReallyEqual(res.strip(), new_uri))
2765 d.addCallback(lambda res:
2766 self.failUnlessRWChildURIIs(self.public_root,
2770 d.addCallback(_made_dir)
2773 def test_PUT_DIRURL_uri_noreplace(self):
2774 d = self.s.create_dirnode()
2776 new_uri = dn.get_uri()
2777 # replace /foo with a new (empty) directory, but ask that
2778 # replace=false, so it should fail
2779 d = self.shouldFail2(error.Error, "test_PUT_DIRURL_uri_noreplace",
2780 "409 Conflict", "There was already a child by that name, and you asked me to not replace it",
2782 self.public_url + "/foo?t=uri&replace=false",
2784 d.addCallback(lambda res:
2785 self.failUnlessRWChildURIIs(self.public_root,
2789 d.addCallback(_made_dir)
2792 def test_PUT_DIRURL_bad_t(self):
2793 d = self.shouldFail2(error.Error, "test_PUT_DIRURL_bad_t",
2794 "400 Bad Request", "PUT to a directory",
2795 self.PUT, self.public_url + "/foo?t=BOGUS", "")
2796 d.addCallback(lambda res:
2797 self.failUnlessRWChildURIIs(self.public_root,
2802 def test_PUT_NEWFILEURL_uri(self):
2803 contents, n, new_uri = self.makefile(8)
2804 d = self.PUT(self.public_url + "/foo/new.txt?t=uri", new_uri)
2805 d.addCallback(lambda res: self.failUnlessReallyEqual(res.strip(), new_uri))
2806 d.addCallback(lambda res:
2807 self.failUnlessChildContentsAre(self._foo_node, u"new.txt",
2811 def test_PUT_NEWFILEURL_uri_replace(self):
2812 contents, n, new_uri = self.makefile(8)
2813 d = self.PUT(self.public_url + "/foo/bar.txt?t=uri", new_uri)
2814 d.addCallback(lambda res: self.failUnlessReallyEqual(res.strip(), new_uri))
2815 d.addCallback(lambda res:
2816 self.failUnlessChildContentsAre(self._foo_node, u"bar.txt",
2820 def test_PUT_NEWFILEURL_uri_no_replace(self):
2821 contents, n, new_uri = self.makefile(8)
2822 d = self.PUT(self.public_url + "/foo/bar.txt?t=uri&replace=false", new_uri)
2823 d.addBoth(self.shouldFail, error.Error, "PUT_NEWFILEURL_uri_no_replace",
2825 "There was already a child by that name, and you asked me "
2826 "to not replace it")
2829 def test_PUT_NEWFILEURL_uri_unknown_bad(self):
2830 d = self.PUT(self.public_url + "/foo/put-future.txt?t=uri", unknown_rwcap)
2831 d.addBoth(self.shouldFail, error.Error,
2832 "POST_put_uri_unknown_bad",
2834 "unknown cap in a write slot")
2837 def test_PUT_NEWFILEURL_uri_unknown_ro_good(self):
2838 d = self.PUT(self.public_url + "/foo/put-future-ro.txt?t=uri", unknown_rocap)
2839 d.addCallback(self.failUnlessURIMatchesROChild, self._foo_node,
2840 u"put-future-ro.txt")
2843 def test_PUT_NEWFILEURL_uri_unknown_imm_good(self):
2844 d = self.PUT(self.public_url + "/foo/put-future-imm.txt?t=uri", unknown_immcap)
2845 d.addCallback(self.failUnlessURIMatchesROChild, self._foo_node,
2846 u"put-future-imm.txt")
2849 def test_PUT_NEWFILE_URI(self):
2850 file_contents = "New file contents here\n"
2851 d = self.PUT("/uri", file_contents)
2853 assert isinstance(uri, str), uri
2854 self.failUnless(uri in FakeCHKFileNode.all_contents)
2855 self.failUnlessReallyEqual(FakeCHKFileNode.all_contents[uri],
2857 return self.GET("/uri/%s" % uri)
2858 d.addCallback(_check)
2860 self.failUnlessReallyEqual(res, file_contents)
2861 d.addCallback(_check2)
2864 def test_PUT_NEWFILE_URI_not_mutable(self):
2865 file_contents = "New file contents here\n"
2866 d = self.PUT("/uri?mutable=false", file_contents)
2868 assert isinstance(uri, str), uri
2869 self.failUnless(uri in FakeCHKFileNode.all_contents)
2870 self.failUnlessReallyEqual(FakeCHKFileNode.all_contents[uri],
2872 return self.GET("/uri/%s" % uri)
2873 d.addCallback(_check)
2875 self.failUnlessReallyEqual(res, file_contents)
2876 d.addCallback(_check2)
2879 def test_PUT_NEWFILE_URI_only_PUT(self):
2880 d = self.PUT("/uri?t=bogus", "")
2881 d.addBoth(self.shouldFail, error.Error,
2882 "PUT_NEWFILE_URI_only_PUT",
2884 "/uri accepts only PUT, PUT?t=mkdir, POST?t=upload, and POST?t=mkdir")
2887 def test_PUT_NEWFILE_URI_mutable(self):
2888 file_contents = "New file contents here\n"
2889 d = self.PUT("/uri?mutable=true", file_contents)
2890 def _check1(filecap):
2891 filecap = filecap.strip()
2892 self.failUnless(filecap.startswith("URI:SSK:"), filecap)
2893 self.filecap = filecap
2894 u = uri.WriteableSSKFileURI.init_from_string(filecap)
2895 self.failUnless(u.get_storage_index() in FakeMutableFileNode.all_contents)
2896 n = self.s.create_node_from_uri(filecap)
2897 return n.download_best_version()
2898 d.addCallback(_check1)
2900 self.failUnlessReallyEqual(data, file_contents)
2901 return self.GET("/uri/%s" % urllib.quote(self.filecap))
2902 d.addCallback(_check2)
2904 self.failUnlessReallyEqual(res, file_contents)
2905 d.addCallback(_check3)
2908 def test_PUT_mkdir(self):
2909 d = self.PUT("/uri?t=mkdir", "")
2911 n = self.s.create_node_from_uri(uri.strip())
2912 d2 = self.failUnlessNodeKeysAre(n, [])
2913 d2.addCallback(lambda res:
2914 self.GET("/uri/%s?t=json" % uri))
2916 d.addCallback(_check)
2917 d.addCallback(self.failUnlessIsEmptyJSON)
2920 def test_POST_check(self):
2921 d = self.POST(self.public_url + "/foo", t="check", name="bar.txt")
2923 # this returns a string form of the results, which are probably
2924 # None since we're using fake filenodes.
2925 # TODO: verify that the check actually happened, by changing
2926 # FakeCHKFileNode to count how many times .check() has been
2929 d.addCallback(_done)
2932 def test_bad_method(self):
2933 url = self.webish_url + self.public_url + "/foo/bar.txt"
2934 d = self.shouldHTTPError("test_bad_method",
2935 501, "Not Implemented",
2936 "I don't know how to treat a BOGUS request.",
2937 client.getPage, url, method="BOGUS")
2940 def test_short_url(self):
2941 url = self.webish_url + "/uri"
2942 d = self.shouldHTTPError("test_short_url", 501, "Not Implemented",
2943 "I don't know how to treat a DELETE request.",
2944 client.getPage, url, method="DELETE")
2947 def test_ophandle_bad(self):
2948 url = self.webish_url + "/operations/bogus?t=status"
2949 d = self.shouldHTTPError("test_ophandle_bad", 404, "404 Not Found",
2950 "unknown/expired handle 'bogus'",
2951 client.getPage, url)
2954 def test_ophandle_cancel(self):
2955 d = self.POST(self.public_url + "/foo/?t=start-manifest&ophandle=128",
2956 followRedirect=True)
2957 d.addCallback(lambda ignored:
2958 self.GET("/operations/128?t=status&output=JSON"))
2960 data = simplejson.loads(res)
2961 self.failUnless("finished" in data, res)
2962 monitor = self.ws.root.child_operations.handles["128"][0]
2963 d = self.POST("/operations/128?t=cancel&output=JSON")
2965 data = simplejson.loads(res)
2966 self.failUnless("finished" in data, res)
2967 # t=cancel causes the handle to be forgotten
2968 self.failUnless(monitor.is_cancelled())
2969 d.addCallback(_check2)
2971 d.addCallback(_check1)
2972 d.addCallback(lambda ignored:
2973 self.shouldHTTPError("test_ophandle_cancel",
2974 404, "404 Not Found",
2975 "unknown/expired handle '128'",
2977 "/operations/128?t=status&output=JSON"))
2980 def test_ophandle_retainfor(self):
2981 d = self.POST(self.public_url + "/foo/?t=start-manifest&ophandle=129&retain-for=60",
2982 followRedirect=True)
2983 d.addCallback(lambda ignored:
2984 self.GET("/operations/129?t=status&output=JSON&retain-for=0"))
2986 data = simplejson.loads(res)
2987 self.failUnless("finished" in data, res)
2988 d.addCallback(_check1)
2989 # the retain-for=0 will cause the handle to be expired very soon
2990 d.addCallback(lambda ign:
2991 self.clock.advance(2.0))
2992 d.addCallback(lambda ignored:
2993 self.shouldHTTPError("test_ophandle_retainfor",
2994 404, "404 Not Found",
2995 "unknown/expired handle '129'",
2997 "/operations/129?t=status&output=JSON"))
3000 def test_ophandle_release_after_complete(self):
3001 d = self.POST(self.public_url + "/foo/?t=start-manifest&ophandle=130",
3002 followRedirect=True)
3003 d.addCallback(self.wait_for_operation, "130")
3004 d.addCallback(lambda ignored:
3005 self.GET("/operations/130?t=status&output=JSON&release-after-complete=true"))
3006 # the release-after-complete=true will cause the handle to be expired
3007 d.addCallback(lambda ignored:
3008 self.shouldHTTPError("test_ophandle_release_after_complete",
3009 404, "404 Not Found",
3010 "unknown/expired handle '130'",
3012 "/operations/130?t=status&output=JSON"))
3015 def test_uncollected_ophandle_expiration(self):
3016 # uncollected ophandles should expire after 4 days
3017 def _make_uncollected_ophandle(ophandle):
3018 d = self.POST(self.public_url +
3019 "/foo/?t=start-manifest&ophandle=%d" % ophandle,
3020 followRedirect=False)
3021 # When we start the operation, the webapi server will want
3022 # to redirect us to the page for the ophandle, so we get
3023 # confirmation that the operation has started. If the
3024 # manifest operation has finished by the time we get there,
3025 # following that redirect (by setting followRedirect=True
3026 # above) has the side effect of collecting the ophandle that
3027 # we've just created, which means that we can't use the
3028 # ophandle to test the uncollected timeout anymore. So,
3029 # instead, catch the 302 here and don't follow it.
3030 d.addBoth(self.should302, "uncollected_ophandle_creation")
3032 # Create an ophandle, don't collect it, then advance the clock by
3033 # 4 days - 1 second and make sure that the ophandle is still there.
3034 d = _make_uncollected_ophandle(131)
3035 d.addCallback(lambda ign:
3036 self.clock.advance((96*60*60) - 1)) # 96 hours = 4 days
3037 d.addCallback(lambda ign:
3038 self.GET("/operations/131?t=status&output=JSON"))
3040 data = simplejson.loads(res)
3041 self.failUnless("finished" in data, res)
3042 d.addCallback(_check1)
3043 # Create an ophandle, don't collect it, then try to collect it
3044 # after 4 days. It should be gone.
3045 d.addCallback(lambda ign:
3046 _make_uncollected_ophandle(132))
3047 d.addCallback(lambda ign:
3048 self.clock.advance(96*60*60))
3049 d.addCallback(lambda ign:
3050 self.shouldHTTPError("test_uncollected_ophandle_expired_after_100_hours",
3051 404, "404 Not Found",
3052 "unknown/expired handle '132'",
3054 "/operations/132?t=status&output=JSON"))
3057 def test_collected_ophandle_expiration(self):
3058 # collected ophandles should expire after 1 day
3059 def _make_collected_ophandle(ophandle):
3060 d = self.POST(self.public_url +
3061 "/foo/?t=start-manifest&ophandle=%d" % ophandle,
3062 followRedirect=True)
3063 # By following the initial redirect, we collect the ophandle
3064 # we've just created.
3066 # Create a collected ophandle, then collect it after 23 hours
3067 # and 59 seconds to make sure that it is still there.
3068 d = _make_collected_ophandle(133)
3069 d.addCallback(lambda ign:
3070 self.clock.advance((24*60*60) - 1))
3071 d.addCallback(lambda ign:
3072 self.GET("/operations/133?t=status&output=JSON"))
3074 data = simplejson.loads(res)
3075 self.failUnless("finished" in data, res)
3076 d.addCallback(_check1)
3077 # Create another uncollected ophandle, then try to collect it
3078 # after 24 hours to make sure that it is gone.
3079 d.addCallback(lambda ign:
3080 _make_collected_ophandle(134))
3081 d.addCallback(lambda ign:
3082 self.clock.advance(24*60*60))
3083 d.addCallback(lambda ign:
3084 self.shouldHTTPError("test_collected_ophandle_expired_after_1000_minutes",
3085 404, "404 Not Found",
3086 "unknown/expired handle '134'",
3088 "/operations/134?t=status&output=JSON"))
3091 def test_incident(self):
3092 d = self.POST("/report_incident", details="eek")
3094 self.failUnless("Thank you for your report!" in res, res)
3095 d.addCallback(_done)
3098 def test_static(self):
3099 webdir = os.path.join(self.staticdir, "subdir")
3100 fileutil.make_dirs(webdir)
3101 f = open(os.path.join(webdir, "hello.txt"), "wb")
3105 d = self.GET("/static/subdir/hello.txt")
3107 self.failUnlessReallyEqual(res, "hello")
3108 d.addCallback(_check)
3112 class Util(ShouldFailMixin, testutil.ReallyEqualMixin, unittest.TestCase):
3113 def test_load_file(self):
3114 # This will raise an exception unless a well-formed XML file is found under that name.
3115 common.getxmlfile('directory.xhtml').load()
3117 def test_parse_replace_arg(self):
3118 self.failUnlessReallyEqual(common.parse_replace_arg("true"), True)
3119 self.failUnlessReallyEqual(common.parse_replace_arg("false"), False)
3120 self.failUnlessReallyEqual(common.parse_replace_arg("only-files"),
3122 self.shouldFail(AssertionError, "test_parse_replace_arg", "",
3123 common.parse_replace_arg, "only_fles")
3125 def test_abbreviate_time(self):
3126 self.failUnlessReallyEqual(common.abbreviate_time(None), "")
3127 self.failUnlessReallyEqual(common.abbreviate_time(1.234), "1.23s")
3128 self.failUnlessReallyEqual(common.abbreviate_time(0.123), "123ms")
3129 self.failUnlessReallyEqual(common.abbreviate_time(0.00123), "1.2ms")
3130 self.failUnlessReallyEqual(common.abbreviate_time(0.000123), "123us")
3131 self.failUnlessReallyEqual(common.abbreviate_time(-123000), "-123000000000us")
3133 def test_abbreviate_rate(self):
3134 self.failUnlessReallyEqual(common.abbreviate_rate(None), "")
3135 self.failUnlessReallyEqual(common.abbreviate_rate(1234000), "1.23MBps")
3136 self.failUnlessReallyEqual(common.abbreviate_rate(12340), "12.3kBps")
3137 self.failUnlessReallyEqual(common.abbreviate_rate(123), "123Bps")
3139 def test_abbreviate_size(self):
3140 self.failUnlessReallyEqual(common.abbreviate_size(None), "")
3141 self.failUnlessReallyEqual(common.abbreviate_size(1.23*1000*1000*1000), "1.23GB")
3142 self.failUnlessReallyEqual(common.abbreviate_size(1.23*1000*1000), "1.23MB")
3143 self.failUnlessReallyEqual(common.abbreviate_size(1230), "1.2kB")
3144 self.failUnlessReallyEqual(common.abbreviate_size(123), "123B")
3146 def test_plural(self):
3148 return "%d second%s" % (s, status.plural(s))
3149 self.failUnlessReallyEqual(convert(0), "0 seconds")
3150 self.failUnlessReallyEqual(convert(1), "1 second")
3151 self.failUnlessReallyEqual(convert(2), "2 seconds")
3153 return "has share%s: %s" % (status.plural(s), ",".join(s))
3154 self.failUnlessReallyEqual(convert2([]), "has shares: ")
3155 self.failUnlessReallyEqual(convert2(["1"]), "has share: 1")
3156 self.failUnlessReallyEqual(convert2(["1","2"]), "has shares: 1,2")
3159 class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMixin, unittest.TestCase):
3161 def CHECK(self, ign, which, args, clientnum=0):
3162 fileurl = self.fileurls[which]
3163 url = fileurl + "?" + args
3164 return self.GET(url, method="POST", clientnum=clientnum)
3166 def test_filecheck(self):
3167 self.basedir = "web/Grid/filecheck"
3169 c0 = self.g.clients[0]
3172 d = c0.upload(upload.Data(DATA, convergence=""))
3173 def _stash_uri(ur, which):
3174 self.uris[which] = ur.uri
3175 d.addCallback(_stash_uri, "good")
3176 d.addCallback(lambda ign:
3177 c0.upload(upload.Data(DATA+"1", convergence="")))
3178 d.addCallback(_stash_uri, "sick")
3179 d.addCallback(lambda ign:
3180 c0.upload(upload.Data(DATA+"2", convergence="")))
3181 d.addCallback(_stash_uri, "dead")
3182 def _stash_mutable_uri(n, which):
3183 self.uris[which] = n.get_uri()
3184 assert isinstance(self.uris[which], str)
3185 d.addCallback(lambda ign: c0.create_mutable_file(DATA+"3"))
3186 d.addCallback(_stash_mutable_uri, "corrupt")
3187 d.addCallback(lambda ign:
3188 c0.upload(upload.Data("literal", convergence="")))
3189 d.addCallback(_stash_uri, "small")
3190 d.addCallback(lambda ign: c0.create_immutable_dirnode({}))
3191 d.addCallback(_stash_mutable_uri, "smalldir")
3193 def _compute_fileurls(ignored):
3195 for which in self.uris:
3196 self.fileurls[which] = "uri/" + urllib.quote(self.uris[which])
3197 d.addCallback(_compute_fileurls)
3199 def _clobber_shares(ignored):
3200 good_shares = self.find_uri_shares(self.uris["good"])
3201 self.failUnlessReallyEqual(len(good_shares), 10)
3202 sick_shares = self.find_uri_shares(self.uris["sick"])
3203 os.unlink(sick_shares[0][2])
3204 dead_shares = self.find_uri_shares(self.uris["dead"])
3205 for i in range(1, 10):
3206 os.unlink(dead_shares[i][2])
3207 c_shares = self.find_uri_shares(self.uris["corrupt"])
3208 cso = CorruptShareOptions()
3209 cso.stdout = StringIO()
3210 cso.parseOptions([c_shares[0][2]])
3212 d.addCallback(_clobber_shares)
3214 d.addCallback(self.CHECK, "good", "t=check")
3215 def _got_html_good(res):
3216 self.failUnless("Healthy" in res, res)
3217 self.failIf("Not Healthy" in res, res)
3218 d.addCallback(_got_html_good)
3219 d.addCallback(self.CHECK, "good", "t=check&return_to=somewhere")
3220 def _got_html_good_return_to(res):
3221 self.failUnless("Healthy" in res, res)
3222 self.failIf("Not Healthy" in res, res)
3223 self.failUnless('<a href="somewhere">Return to file'
3225 d.addCallback(_got_html_good_return_to)
3226 d.addCallback(self.CHECK, "good", "t=check&output=json")
3227 def _got_json_good(res):
3228 r = simplejson.loads(res)
3229 self.failUnlessEqual(r["summary"], "Healthy")
3230 self.failUnless(r["results"]["healthy"])
3231 self.failIf(r["results"]["needs-rebalancing"])
3232 self.failUnless(r["results"]["recoverable"])
3233 d.addCallback(_got_json_good)
3235 d.addCallback(self.CHECK, "small", "t=check")
3236 def _got_html_small(res):
3237 self.failUnless("Literal files are always healthy" in res, res)
3238 self.failIf("Not Healthy" in res, res)
3239 d.addCallback(_got_html_small)
3240 d.addCallback(self.CHECK, "small", "t=check&return_to=somewhere")
3241 def _got_html_small_return_to(res):
3242 self.failUnless("Literal files are always healthy" in res, res)
3243 self.failIf("Not Healthy" in res, res)
3244 self.failUnless('<a href="somewhere">Return to file'
3246 d.addCallback(_got_html_small_return_to)
3247 d.addCallback(self.CHECK, "small", "t=check&output=json")
3248 def _got_json_small(res):
3249 r = simplejson.loads(res)
3250 self.failUnlessEqual(r["storage-index"], "")
3251 self.failUnless(r["results"]["healthy"])
3252 d.addCallback(_got_json_small)
3254 d.addCallback(self.CHECK, "smalldir", "t=check")
3255 def _got_html_smalldir(res):
3256 self.failUnless("Literal files are always healthy" in res, res)
3257 self.failIf("Not Healthy" in res, res)
3258 d.addCallback(_got_html_smalldir)
3259 d.addCallback(self.CHECK, "smalldir", "t=check&output=json")
3260 def _got_json_smalldir(res):
3261 r = simplejson.loads(res)
3262 self.failUnlessEqual(r["storage-index"], "")
3263 self.failUnless(r["results"]["healthy"])
3264 d.addCallback(_got_json_smalldir)
3266 d.addCallback(self.CHECK, "sick", "t=check")
3267 def _got_html_sick(res):
3268 self.failUnless("Not Healthy" in res, res)
3269 d.addCallback(_got_html_sick)
3270 d.addCallback(self.CHECK, "sick", "t=check&output=json")
3271 def _got_json_sick(res):
3272 r = simplejson.loads(res)
3273 self.failUnlessEqual(r["summary"],
3274 "Not Healthy: 9 shares (enc 3-of-10)")
3275 self.failIf(r["results"]["healthy"])
3276 self.failIf(r["results"]["needs-rebalancing"])
3277 self.failUnless(r["results"]["recoverable"])
3278 d.addCallback(_got_json_sick)
3280 d.addCallback(self.CHECK, "dead", "t=check")
3281 def _got_html_dead(res):
3282 self.failUnless("Not Healthy" in res, res)
3283 d.addCallback(_got_html_dead)
3284 d.addCallback(self.CHECK, "dead", "t=check&output=json")
3285 def _got_json_dead(res):
3286 r = simplejson.loads(res)
3287 self.failUnlessEqual(r["summary"],
3288 "Not Healthy: 1 shares (enc 3-of-10)")
3289 self.failIf(r["results"]["healthy"])
3290 self.failIf(r["results"]["needs-rebalancing"])
3291 self.failIf(r["results"]["recoverable"])
3292 d.addCallback(_got_json_dead)
3294 d.addCallback(self.CHECK, "corrupt", "t=check&verify=true")
3295 def _got_html_corrupt(res):
3296 self.failUnless("Not Healthy! : Unhealthy" in res, res)
3297 d.addCallback(_got_html_corrupt)
3298 d.addCallback(self.CHECK, "corrupt", "t=check&verify=true&output=json")
3299 def _got_json_corrupt(res):
3300 r = simplejson.loads(res)
3301 self.failUnless("Unhealthy: 9 shares (enc 3-of-10)" in r["summary"],
3303 self.failIf(r["results"]["healthy"])
3304 self.failUnless(r["results"]["recoverable"])
3305 self.failUnlessReallyEqual(r["results"]["count-shares-good"], 9)
3306 self.failUnlessReallyEqual(r["results"]["count-corrupt-shares"], 1)
3307 d.addCallback(_got_json_corrupt)
3309 d.addErrback(self.explain_web_error)
3312 def test_repair_html(self):
3313 self.basedir = "web/Grid/repair_html"
3315 c0 = self.g.clients[0]
3318 d = c0.upload(upload.Data(DATA, convergence=""))
3319 def _stash_uri(ur, which):
3320 self.uris[which] = ur.uri
3321 d.addCallback(_stash_uri, "good")
3322 d.addCallback(lambda ign:
3323 c0.upload(upload.Data(DATA+"1", convergence="")))
3324 d.addCallback(_stash_uri, "sick")
3325 d.addCallback(lambda ign:
3326 c0.upload(upload.Data(DATA+"2", convergence="")))
3327 d.addCallback(_stash_uri, "dead")
3328 def _stash_mutable_uri(n, which):
3329 self.uris[which] = n.get_uri()
3330 assert isinstance(self.uris[which], str)
3331 d.addCallback(lambda ign: c0.create_mutable_file(DATA+"3"))
3332 d.addCallback(_stash_mutable_uri, "corrupt")
3334 def _compute_fileurls(ignored):
3336 for which in self.uris:
3337 self.fileurls[which] = "uri/" + urllib.quote(self.uris[which])
3338 d.addCallback(_compute_fileurls)
3340 def _clobber_shares(ignored):
3341 good_shares = self.find_uri_shares(self.uris["good"])
3342 self.failUnlessReallyEqual(len(good_shares), 10)
3343 sick_shares = self.find_uri_shares(self.uris["sick"])
3344 os.unlink(sick_shares[0][2])
3345 dead_shares = self.find_uri_shares(self.uris["dead"])
3346 for i in range(1, 10):
3347 os.unlink(dead_shares[i][2])
3348 c_shares = self.find_uri_shares(self.uris["corrupt"])
3349 cso = CorruptShareOptions()
3350 cso.stdout = StringIO()
3351 cso.parseOptions([c_shares[0][2]])
3353 d.addCallback(_clobber_shares)
3355 d.addCallback(self.CHECK, "good", "t=check&repair=true")
3356 def _got_html_good(res):
3357 self.failUnless("Healthy" in res, res)
3358 self.failIf("Not Healthy" in res, res)
3359 self.failUnless("No repair necessary" in res, res)
3360 d.addCallback(_got_html_good)
3362 d.addCallback(self.CHECK, "sick", "t=check&repair=true")
3363 def _got_html_sick(res):
3364 self.failUnless("Healthy : healthy" in res, res)
3365 self.failIf("Not Healthy" in res, res)
3366 self.failUnless("Repair successful" in res, res)
3367 d.addCallback(_got_html_sick)
3369 # repair of a dead file will fail, of course, but it isn't yet
3370 # clear how this should be reported. Right now it shows up as
3373 #d.addCallback(self.CHECK, "dead", "t=check&repair=true")
3374 #def _got_html_dead(res):
3376 # self.failUnless("Healthy : healthy" in res, res)
3377 # self.failIf("Not Healthy" in res, res)
3378 # self.failUnless("No repair necessary" in res, res)
3379 #d.addCallback(_got_html_dead)
3381 d.addCallback(self.CHECK, "corrupt", "t=check&verify=true&repair=true")
3382 def _got_html_corrupt(res):
3383 self.failUnless("Healthy : Healthy" in res, res)
3384 self.failIf("Not Healthy" in res, res)
3385 self.failUnless("Repair successful" in res, res)
3386 d.addCallback(_got_html_corrupt)
3388 d.addErrback(self.explain_web_error)
3391 def test_repair_json(self):
3392 self.basedir = "web/Grid/repair_json"
3394 c0 = self.g.clients[0]
3397 d = c0.upload(upload.Data(DATA+"1", convergence=""))
3398 def _stash_uri(ur, which):
3399 self.uris[which] = ur.uri
3400 d.addCallback(_stash_uri, "sick")
3402 def _compute_fileurls(ignored):
3404 for which in self.uris:
3405 self.fileurls[which] = "uri/" + urllib.quote(self.uris[which])
3406 d.addCallback(_compute_fileurls)
3408 def _clobber_shares(ignored):
3409 sick_shares = self.find_uri_shares(self.uris["sick"])
3410 os.unlink(sick_shares[0][2])
3411 d.addCallback(_clobber_shares)
3413 d.addCallback(self.CHECK, "sick", "t=check&repair=true&output=json")
3414 def _got_json_sick(res):
3415 r = simplejson.loads(res)
3416 self.failUnlessReallyEqual(r["repair-attempted"], True)
3417 self.failUnlessReallyEqual(r["repair-successful"], True)
3418 self.failUnlessEqual(r["pre-repair-results"]["summary"],
3419 "Not Healthy: 9 shares (enc 3-of-10)")
3420 self.failIf(r["pre-repair-results"]["results"]["healthy"])
3421 self.failUnlessEqual(r["post-repair-results"]["summary"], "healthy")
3422 self.failUnless(r["post-repair-results"]["results"]["healthy"])
3423 d.addCallback(_got_json_sick)
3425 d.addErrback(self.explain_web_error)
3428 def test_unknown(self, immutable=False):
3429 self.basedir = "web/Grid/unknown"
3431 self.basedir = "web/Grid/unknown-immutable"
3434 c0 = self.g.clients[0]
3438 # the future cap format may contain slashes, which must be tolerated
3439 expected_info_url = "uri/%s?t=info" % urllib.quote(unknown_rwcap,
3443 name = u"future-imm"
3444 future_node = UnknownNode(None, unknown_immcap, deep_immutable=True)
3445 d = c0.create_immutable_dirnode({name: (future_node, {})})
3448 future_node = UnknownNode(unknown_rwcap, unknown_rocap)
3449 d = c0.create_dirnode()
3451 def _stash_root_and_create_file(n):
3453 self.rooturl = "uri/" + urllib.quote(n.get_uri()) + "/"
3454 self.rourl = "uri/" + urllib.quote(n.get_readonly_uri()) + "/"
3456 return self.rootnode.set_node(name, future_node)
3457 d.addCallback(_stash_root_and_create_file)
3459 # make sure directory listing tolerates unknown nodes
3460 d.addCallback(lambda ign: self.GET(self.rooturl))
3461 def _check_directory_html(res, expected_type_suffix):
3462 pattern = re.compile(r'<td>\?%s</td>[ \t\n\r]*'
3463 '<td>%s</td>' % (expected_type_suffix, str(name)),
3465 self.failUnless(re.search(pattern, res), res)
3466 # find the More Info link for name, should be relative
3467 mo = re.search(r'<a href="([^"]+)">More Info</a>', res)
3468 info_url = mo.group(1)
3469 self.failUnlessReallyEqual(info_url, "%s?t=info" % (str(name),))
3471 d.addCallback(_check_directory_html, "-IMM")
3473 d.addCallback(_check_directory_html, "")
3475 d.addCallback(lambda ign: self.GET(self.rooturl+"?t=json"))
3476 def _check_directory_json(res, expect_rw_uri):
3477 data = simplejson.loads(res)
3478 self.failUnlessEqual(data[0], "dirnode")
3479 f = data[1]["children"][name]
3480 self.failUnlessEqual(f[0], "unknown")
3482 self.failUnlessReallyEqual(to_str(f[1]["rw_uri"]), unknown_rwcap, data)
3484 self.failIfIn("rw_uri", f[1])
3486 self.failUnlessReallyEqual(to_str(f[1]["ro_uri"]), unknown_immcap, data)
3488 self.failUnlessReallyEqual(to_str(f[1]["ro_uri"]), unknown_rocap, data)
3489 self.failUnless("metadata" in f[1])
3490 d.addCallback(_check_directory_json, expect_rw_uri=not immutable)
3492 def _check_info(res, expect_rw_uri, expect_ro_uri):
3493 self.failUnlessIn("Object Type: <span>unknown</span>", res)
3495 self.failUnlessIn(unknown_rwcap, res)
3498 self.failUnlessIn(unknown_immcap, res)
3500 self.failUnlessIn(unknown_rocap, res)
3502 self.failIfIn(unknown_rocap, res)
3503 self.failIfIn("Raw data as", res)
3504 self.failIfIn("Directory writecap", res)
3505 self.failIfIn("Checker Operations", res)
3506 self.failIfIn("Mutable File Operations", res)
3507 self.failIfIn("Directory Operations", res)
3509 # FIXME: these should have expect_rw_uri=not immutable; I don't know
3510 # why they fail. Possibly related to ticket #922.
3512 d.addCallback(lambda ign: self.GET(expected_info_url))
3513 d.addCallback(_check_info, expect_rw_uri=False, expect_ro_uri=False)
3514 d.addCallback(lambda ign: self.GET("%s%s?t=info" % (self.rooturl, str(name))))
3515 d.addCallback(_check_info, expect_rw_uri=False, expect_ro_uri=True)
3517 def _check_json(res, expect_rw_uri):
3518 data = simplejson.loads(res)
3519 self.failUnlessEqual(data[0], "unknown")
3521 self.failUnlessReallyEqual(to_str(data[1]["rw_uri"]), unknown_rwcap, data)
3523 self.failIfIn("rw_uri", data[1])
3526 self.failUnlessReallyEqual(to_str(data[1]["ro_uri"]), unknown_immcap, data)
3527 self.failUnlessReallyEqual(data[1]["mutable"], False)
3529 self.failUnlessReallyEqual(to_str(data[1]["ro_uri"]), unknown_rocap, data)
3530 self.failUnlessReallyEqual(data[1]["mutable"], True)
3532 self.failUnlessReallyEqual(to_str(data[1]["ro_uri"]), unknown_rocap, data)
3533 self.failIf("mutable" in data[1], data[1])
3535 # TODO: check metadata contents
3536 self.failUnless("metadata" in data[1])
3538 d.addCallback(lambda ign: self.GET("%s%s?t=json" % (self.rooturl, str(name))))
3539 d.addCallback(_check_json, expect_rw_uri=not immutable)
3541 # and make sure that a read-only version of the directory can be
3542 # rendered too. This version will not have unknown_rwcap, whether
3543 # or not future_node was immutable.
3544 d.addCallback(lambda ign: self.GET(self.rourl))
3546 d.addCallback(_check_directory_html, "-IMM")
3548 d.addCallback(_check_directory_html, "-RO")
3550 d.addCallback(lambda ign: self.GET(self.rourl+"?t=json"))
3551 d.addCallback(_check_directory_json, expect_rw_uri=False)
3553 d.addCallback(lambda ign: self.GET("%s%s?t=json" % (self.rourl, str(name))))
3554 d.addCallback(_check_json, expect_rw_uri=False)
3556 # TODO: check that getting t=info from the Info link in the ro directory
3557 # works, and does not include the writecap URI.
3560 def test_immutable_unknown(self):
3561 return self.test_unknown(immutable=True)
3563 def test_mutant_dirnodes_are_omitted(self):
3564 self.basedir = "web/Grid/mutant_dirnodes_are_omitted"
3567 c = self.g.clients[0]
3572 lonely_uri = "URI:LIT:n5xgk" # LIT for "one"
3573 mut_write_uri = "URI:SSK:vfvcbdfbszyrsaxchgevhmmlii:euw4iw7bbnkrrwpzuburbhppuxhc3gwxv26f6imekhz7zyw2ojnq"
3574 mut_read_uri = "URI:SSK-RO:e3mdrzfwhoq42hy5ubcz6rp3o4:ybyibhnp3vvwuq2vaw2ckjmesgkklfs6ghxleztqidihjyofgw7q"
3576 # This method tests mainly dirnode, but we'd have to duplicate code in order to
3577 # test the dirnode and web layers separately.
3579 # 'lonely' is a valid LIT child, 'ro' is a mutant child with an SSK-RO readcap,
3580 # and 'write-in-ro' is a mutant child with an SSK writecap in the ro_uri field.
3581 # When the directory is read, the mutants should be silently disposed of, leaving
3582 # their lonely sibling.
3583 # We don't test the case of a retrieving a cap from the encrypted rw_uri field,
3584 # because immutable directories don't have a writecap and therefore that field
3585 # isn't (and can't be) decrypted.
3586 # TODO: The field still exists in the netstring. Technically we should check what
3587 # happens if something is put there (_unpack_contents should raise ValueError),
3588 # but that can wait.
3590 lonely_child = nm.create_from_cap(lonely_uri)
3591 mutant_ro_child = nm.create_from_cap(mut_read_uri)
3592 mutant_write_in_ro_child = nm.create_from_cap(mut_write_uri)
3594 def _by_hook_or_by_crook():
3596 for n in [mutant_ro_child, mutant_write_in_ro_child]:
3597 n.is_allowed_in_immutable_directory = _by_hook_or_by_crook
3599 mutant_write_in_ro_child.get_write_uri = lambda: None
3600 mutant_write_in_ro_child.get_readonly_uri = lambda: mut_write_uri
3602 kids = {u"lonely": (lonely_child, {}),
3603 u"ro": (mutant_ro_child, {}),
3604 u"write-in-ro": (mutant_write_in_ro_child, {}),
3606 d = c.create_immutable_dirnode(kids)
3609 self.failUnless(isinstance(dn, dirnode.DirectoryNode))
3610 self.failIf(dn.is_mutable())
3611 self.failUnless(dn.is_readonly())
3612 # This checks that if we somehow ended up calling dn._decrypt_rwcapdata, it would fail.
3613 self.failIf(hasattr(dn._node, 'get_writekey'))
3615 self.failUnless("RO-IMM" in rep)
3617 self.failUnlessIn("CHK", cap.to_string())
3620 self.rooturl = "uri/" + urllib.quote(dn.get_uri()) + "/"
3621 return download_to_data(dn._node)
3622 d.addCallback(_created)
3624 def _check_data(data):
3625 # Decode the netstring representation of the directory to check that all children
3626 # are present. This is a bit of an abstraction violation, but there's not really
3627 # any other way to do it given that the real DirectoryNode._unpack_contents would
3628 # strip the mutant children out (which is what we're trying to test, later).
3631 while position < len(data):
3632 entries, position = split_netstring(data, 1, position)
3634 (name_utf8, ro_uri, rwcapdata, metadata_s), subpos = split_netstring(entry, 4)
3635 name = name_utf8.decode("utf-8")
3636 self.failUnless(rwcapdata == "")
3637 self.failUnless(name in kids)
3638 (expected_child, ign) = kids[name]
3639 self.failUnlessReallyEqual(ro_uri, expected_child.get_readonly_uri())
3642 self.failUnlessReallyEqual(numkids, 3)
3643 return self.rootnode.list()
3644 d.addCallback(_check_data)
3646 # Now when we use the real directory listing code, the mutants should be absent.
3647 def _check_kids(children):
3648 self.failUnlessReallyEqual(sorted(children.keys()), [u"lonely"])
3649 lonely_node, lonely_metadata = children[u"lonely"]
3651 self.failUnlessReallyEqual(lonely_node.get_write_uri(), None)
3652 self.failUnlessReallyEqual(lonely_node.get_readonly_uri(), lonely_uri)
3653 d.addCallback(_check_kids)
3655 d.addCallback(lambda ign: nm.create_from_cap(self.cap.to_string()))
3656 d.addCallback(lambda n: n.list())
3657 d.addCallback(_check_kids) # again with dirnode recreated from cap
3659 # Make sure the lonely child can be listed in HTML...
3660 d.addCallback(lambda ign: self.GET(self.rooturl))
3661 def _check_html(res):
3662 self.failIfIn("URI:SSK", res)
3663 get_lonely = "".join([r'<td>FILE</td>',
3665 r'<a href="[^"]+%s[^"]+">lonely</a>' % (urllib.quote(lonely_uri),),
3667 r'\s+<td>%d</td>' % len("one"),
3669 self.failUnless(re.search(get_lonely, res), res)
3671 # find the More Info link for name, should be relative
3672 mo = re.search(r'<a href="([^"]+)">More Info</a>', res)
3673 info_url = mo.group(1)
3674 self.failUnless(info_url.endswith(urllib.quote(lonely_uri) + "?t=info"), info_url)
3675 d.addCallback(_check_html)
3678 d.addCallback(lambda ign: self.GET(self.rooturl+"?t=json"))
3679 def _check_json(res):
3680 data = simplejson.loads(res)
3681 self.failUnlessEqual(data[0], "dirnode")
3682 listed_children = data[1]["children"]
3683 self.failUnlessReallyEqual(sorted(listed_children.keys()), [u"lonely"])
3684 ll_type, ll_data = listed_children[u"lonely"]
3685 self.failUnlessEqual(ll_type, "filenode")
3686 self.failIf("rw_uri" in ll_data)
3687 self.failUnlessReallyEqual(to_str(ll_data["ro_uri"]), lonely_uri)
3688 d.addCallback(_check_json)
3691 def test_deep_check(self):
3692 self.basedir = "web/Grid/deep_check"
3694 c0 = self.g.clients[0]
3698 d = c0.create_dirnode()
3699 def _stash_root_and_create_file(n):
3701 self.fileurls["root"] = "uri/" + urllib.quote(n.get_uri()) + "/"
3702 return n.add_file(u"good", upload.Data(DATA, convergence=""))
3703 d.addCallback(_stash_root_and_create_file)
3704 def _stash_uri(fn, which):
3705 self.uris[which] = fn.get_uri()
3707 d.addCallback(_stash_uri, "good")
3708 d.addCallback(lambda ign:
3709 self.rootnode.add_file(u"small",
3710 upload.Data("literal",
3712 d.addCallback(_stash_uri, "small")
3713 d.addCallback(lambda ign:
3714 self.rootnode.add_file(u"sick",
3715 upload.Data(DATA+"1",
3717 d.addCallback(_stash_uri, "sick")
3719 # this tests that deep-check and stream-manifest will ignore
3720 # UnknownNode instances. Hopefully this will also cover deep-stats.
3721 future_node = UnknownNode(unknown_rwcap, unknown_rocap)
3722 d.addCallback(lambda ign: self.rootnode.set_node(u"future", future_node))
3724 def _clobber_shares(ignored):
3725 self.delete_shares_numbered(self.uris["sick"], [0,1])
3726 d.addCallback(_clobber_shares)
3734 d.addCallback(self.CHECK, "root", "t=stream-deep-check")
3737 units = [simplejson.loads(line)
3738 for line in res.splitlines()
3741 print "response is:", res
3742 print "undecodeable line was '%s'" % line
3744 self.failUnlessReallyEqual(len(units), 5+1)
3745 # should be parent-first
3747 self.failUnlessEqual(u0["path"], [])
3748 self.failUnlessEqual(u0["type"], "directory")
3749 self.failUnlessReallyEqual(to_str(u0["cap"]), self.rootnode.get_uri())
3750 u0cr = u0["check-results"]
3751 self.failUnlessReallyEqual(u0cr["results"]["count-shares-good"], 10)
3753 ugood = [u for u in units
3754 if u["type"] == "file" and u["path"] == [u"good"]][0]
3755 self.failUnlessReallyEqual(to_str(ugood["cap"]), self.uris["good"])
3756 ugoodcr = ugood["check-results"]
3757 self.failUnlessReallyEqual(ugoodcr["results"]["count-shares-good"], 10)
3760 self.failUnlessEqual(stats["type"], "stats")
3762 self.failUnlessReallyEqual(s["count-immutable-files"], 2)
3763 self.failUnlessReallyEqual(s["count-literal-files"], 1)
3764 self.failUnlessReallyEqual(s["count-directories"], 1)
3765 self.failUnlessReallyEqual(s["count-unknown"], 1)
3766 d.addCallback(_done)
3768 d.addCallback(self.CHECK, "root", "t=stream-manifest")
3769 def _check_manifest(res):
3770 self.failUnless(res.endswith("\n"))
3771 units = [simplejson.loads(t) for t in res[:-1].split("\n")]
3772 self.failUnlessReallyEqual(len(units), 5+1)
3773 self.failUnlessEqual(units[-1]["type"], "stats")
3775 self.failUnlessEqual(first["path"], [])
3776 self.failUnlessEqual(to_str(first["cap"]), self.rootnode.get_uri())
3777 self.failUnlessEqual(first["type"], "directory")
3778 stats = units[-1]["stats"]
3779 self.failUnlessReallyEqual(stats["count-immutable-files"], 2)
3780 self.failUnlessReallyEqual(stats["count-literal-files"], 1)
3781 self.failUnlessReallyEqual(stats["count-mutable-files"], 0)
3782 self.failUnlessReallyEqual(stats["count-immutable-files"], 2)
3783 self.failUnlessReallyEqual(stats["count-unknown"], 1)
3784 d.addCallback(_check_manifest)
3786 # now add root/subdir and root/subdir/grandchild, then make subdir
3787 # unrecoverable, then see what happens
3789 d.addCallback(lambda ign:
3790 self.rootnode.create_subdirectory(u"subdir"))
3791 d.addCallback(_stash_uri, "subdir")
3792 d.addCallback(lambda subdir_node:
3793 subdir_node.add_file(u"grandchild",
3794 upload.Data(DATA+"2",
3796 d.addCallback(_stash_uri, "grandchild")
3798 d.addCallback(lambda ign:
3799 self.delete_shares_numbered(self.uris["subdir"],
3807 # root/subdir [unrecoverable]
3808 # root/subdir/grandchild
3810 # how should a streaming-JSON API indicate fatal error?
3811 # answer: emit ERROR: instead of a JSON string
3813 d.addCallback(self.CHECK, "root", "t=stream-manifest")
3814 def _check_broken_manifest(res):
3815 lines = res.splitlines()
3817 for (i,line) in enumerate(lines)
3818 if line.startswith("ERROR:")]
3820 self.fail("no ERROR: in output: %s" % (res,))
3821 first_error = error_lines[0]
3822 error_line = lines[first_error]
3823 error_msg = lines[first_error+1:]
3824 error_msg_s = "\n".join(error_msg) + "\n"
3825 self.failUnlessIn("ERROR: UnrecoverableFileError(no recoverable versions)",
3827 self.failUnless(len(error_msg) > 2, error_msg_s) # some traceback
3828 units = [simplejson.loads(line) for line in lines[:first_error]]
3829 self.failUnlessReallyEqual(len(units), 6) # includes subdir
3830 last_unit = units[-1]
3831 self.failUnlessEqual(last_unit["path"], ["subdir"])
3832 d.addCallback(_check_broken_manifest)
3834 d.addCallback(self.CHECK, "root", "t=stream-deep-check")
3835 def _check_broken_deepcheck(res):
3836 lines = res.splitlines()
3838 for (i,line) in enumerate(lines)
3839 if line.startswith("ERROR:")]
3841 self.fail("no ERROR: in output: %s" % (res,))
3842 first_error = error_lines[0]
3843 error_line = lines[first_error]
3844 error_msg = lines[first_error+1:]
3845 error_msg_s = "\n".join(error_msg) + "\n"
3846 self.failUnlessIn("ERROR: UnrecoverableFileError(no recoverable versions)",
3848 self.failUnless(len(error_msg) > 2, error_msg_s) # some traceback
3849 units = [simplejson.loads(line) for line in lines[:first_error]]
3850 self.failUnlessReallyEqual(len(units), 6) # includes subdir
3851 last_unit = units[-1]
3852 self.failUnlessEqual(last_unit["path"], ["subdir"])
3853 r = last_unit["check-results"]["results"]
3854 self.failUnlessReallyEqual(r["count-recoverable-versions"], 0)
3855 self.failUnlessReallyEqual(r["count-shares-good"], 1)
3856 self.failUnlessReallyEqual(r["recoverable"], False)
3857 d.addCallback(_check_broken_deepcheck)
3859 d.addErrback(self.explain_web_error)
3862 def test_deep_check_and_repair(self):
3863 self.basedir = "web/Grid/deep_check_and_repair"
3865 c0 = self.g.clients[0]
3869 d = c0.create_dirnode()
3870 def _stash_root_and_create_file(n):
3872 self.fileurls["root"] = "uri/" + urllib.quote(n.get_uri()) + "/"
3873 return n.add_file(u"good", upload.Data(DATA, convergence=""))
3874 d.addCallback(_stash_root_and_create_file)
3875 def _stash_uri(fn, which):
3876 self.uris[which] = fn.get_uri()
3877 d.addCallback(_stash_uri, "good")
3878 d.addCallback(lambda ign:
3879 self.rootnode.add_file(u"small",
3880 upload.Data("literal",
3882 d.addCallback(_stash_uri, "small")
3883 d.addCallback(lambda ign:
3884 self.rootnode.add_file(u"sick",
3885 upload.Data(DATA+"1",
3887 d.addCallback(_stash_uri, "sick")
3888 #d.addCallback(lambda ign:
3889 # self.rootnode.add_file(u"dead",
3890 # upload.Data(DATA+"2",
3892 #d.addCallback(_stash_uri, "dead")
3894 #d.addCallback(lambda ign: c0.create_mutable_file("mutable"))
3895 #d.addCallback(lambda fn: self.rootnode.set_node(u"corrupt", fn))
3896 #d.addCallback(_stash_uri, "corrupt")
3898 def _clobber_shares(ignored):
3899 good_shares = self.find_uri_shares(self.uris["good"])
3900 self.failUnlessReallyEqual(len(good_shares), 10)
3901 sick_shares = self.find_uri_shares(self.uris["sick"])
3902 os.unlink(sick_shares[0][2])
3903 #dead_shares = self.find_uri_shares(self.uris["dead"])
3904 #for i in range(1, 10):
3905 # os.unlink(dead_shares[i][2])
3907 #c_shares = self.find_uri_shares(self.uris["corrupt"])
3908 #cso = CorruptShareOptions()
3909 #cso.stdout = StringIO()
3910 #cso.parseOptions([c_shares[0][2]])
3912 d.addCallback(_clobber_shares)
3915 # root/good CHK, 10 shares
3917 # root/sick CHK, 9 shares
3919 d.addCallback(self.CHECK, "root", "t=stream-deep-check&repair=true")
3921 units = [simplejson.loads(line)
3922 for line in res.splitlines()
3924 self.failUnlessReallyEqual(len(units), 4+1)
3925 # should be parent-first
3927 self.failUnlessEqual(u0["path"], [])
3928 self.failUnlessEqual(u0["type"], "directory")
3929 self.failUnlessReallyEqual(to_str(u0["cap"]), self.rootnode.get_uri())
3930 u0crr = u0["check-and-repair-results"]
3931 self.failUnlessReallyEqual(u0crr["repair-attempted"], False)
3932 self.failUnlessReallyEqual(u0crr["pre-repair-results"]["results"]["count-shares-good"], 10)
3934 ugood = [u for u in units
3935 if u["type"] == "file" and u["path"] == [u"good"]][0]
3936 self.failUnlessEqual(to_str(ugood["cap"]), self.uris["good"])
3937 ugoodcrr = ugood["check-and-repair-results"]
3938 self.failUnlessReallyEqual(ugoodcrr["repair-attempted"], False)
3939 self.failUnlessReallyEqual(ugoodcrr["pre-repair-results"]["results"]["count-shares-good"], 10)
3941 usick = [u for u in units
3942 if u["type"] == "file" and u["path"] == [u"sick"]][0]
3943 self.failUnlessReallyEqual(to_str(usick["cap"]), self.uris["sick"])
3944 usickcrr = usick["check-and-repair-results"]
3945 self.failUnlessReallyEqual(usickcrr["repair-attempted"], True)
3946 self.failUnlessReallyEqual(usickcrr["repair-successful"], True)
3947 self.failUnlessReallyEqual(usickcrr["pre-repair-results"]["results"]["count-shares-good"], 9)
3948 self.failUnlessReallyEqual(usickcrr["post-repair-results"]["results"]["count-shares-good"], 10)
3951 self.failUnlessEqual(stats["type"], "stats")
3953 self.failUnlessReallyEqual(s["count-immutable-files"], 2)
3954 self.failUnlessReallyEqual(s["count-literal-files"], 1)
3955 self.failUnlessReallyEqual(s["count-directories"], 1)
3956 d.addCallback(_done)
3958 d.addErrback(self.explain_web_error)
3961 def _count_leases(self, ignored, which):
3962 u = self.uris[which]
3963 shares = self.find_uri_shares(u)
3965 for shnum, serverid, fn in shares:
3966 sf = get_share_file(fn)
3967 num_leases = len(list(sf.get_leases()))
3968 lease_counts.append( (fn, num_leases) )
3971 def _assert_leasecount(self, lease_counts, expected):
3972 for (fn, num_leases) in lease_counts:
3973 if num_leases != expected:
3974 self.fail("expected %d leases, have %d, on %s" %
3975 (expected, num_leases, fn))
3977 def test_add_lease(self):
3978 self.basedir = "web/Grid/add_lease"
3979 self.set_up_grid(num_clients=2)
3980 c0 = self.g.clients[0]
3983 d = c0.upload(upload.Data(DATA, convergence=""))
3984 def _stash_uri(ur, which):
3985 self.uris[which] = ur.uri
3986 d.addCallback(_stash_uri, "one")
3987 d.addCallback(lambda ign:
3988 c0.upload(upload.Data(DATA+"1", convergence="")))
3989 d.addCallback(_stash_uri, "two")
3990 def _stash_mutable_uri(n, which):
3991 self.uris[which] = n.get_uri()
3992 assert isinstance(self.uris[which], str)
3993 d.addCallback(lambda ign: c0.create_mutable_file(DATA+"2"))
3994 d.addCallback(_stash_mutable_uri, "mutable")
3996 def _compute_fileurls(ignored):
3998 for which in self.uris:
3999 self.fileurls[which] = "uri/" + urllib.quote(self.uris[which])
4000 d.addCallback(_compute_fileurls)
4002 d.addCallback(self._count_leases, "one")
4003 d.addCallback(self._assert_leasecount, 1)
4004 d.addCallback(self._count_leases, "two")
4005 d.addCallback(self._assert_leasecount, 1)
4006 d.addCallback(self._count_leases, "mutable")
4007 d.addCallback(self._assert_leasecount, 1)
4009 d.addCallback(self.CHECK, "one", "t=check") # no add-lease
4010 def _got_html_good(res):
4011 self.failUnless("Healthy" in res, res)
4012 self.failIf("Not Healthy" in res, res)
4013 d.addCallback(_got_html_good)
4015 d.addCallback(self._count_leases, "one")
4016 d.addCallback(self._assert_leasecount, 1)
4017 d.addCallback(self._count_leases, "two")
4018 d.addCallback(self._assert_leasecount, 1)
4019 d.addCallback(self._count_leases, "mutable")
4020 d.addCallback(self._assert_leasecount, 1)
4022 # this CHECK uses the original client, which uses the same
4023 # lease-secrets, so it will just renew the original lease
4024 d.addCallback(self.CHECK, "one", "t=check&add-lease=true")
4025 d.addCallback(_got_html_good)
4027 d.addCallback(self._count_leases, "one")
4028 d.addCallback(self._assert_leasecount, 1)
4029 d.addCallback(self._count_leases, "two")
4030 d.addCallback(self._assert_leasecount, 1)
4031 d.addCallback(self._count_leases, "mutable")
4032 d.addCallback(self._assert_leasecount, 1)
4034 # this CHECK uses an alternate client, which adds a second lease
4035 d.addCallback(self.CHECK, "one", "t=check&add-lease=true", clientnum=1)
4036 d.addCallback(_got_html_good)
4038 d.addCallback(self._count_leases, "one")
4039 d.addCallback(self._assert_leasecount, 2)
4040 d.addCallback(self._count_leases, "two")
4041 d.addCallback(self._assert_leasecount, 1)
4042 d.addCallback(self._count_leases, "mutable")
4043 d.addCallback(self._assert_leasecount, 1)
4045 d.addCallback(self.CHECK, "mutable", "t=check&add-lease=true")
4046 d.addCallback(_got_html_good)
4048 d.addCallback(self._count_leases, "one")
4049 d.addCallback(self._assert_leasecount, 2)
4050 d.addCallback(self._count_leases, "two")
4051 d.addCallback(self._assert_leasecount, 1)
4052 d.addCallback(self._count_leases, "mutable")
4053 d.addCallback(self._assert_leasecount, 1)
4055 d.addCallback(self.CHECK, "mutable", "t=check&add-lease=true",
4057 d.addCallback(_got_html_good)
4059 d.addCallback(self._count_leases, "one")
4060 d.addCallback(self._assert_leasecount, 2)
4061 d.addCallback(self._count_leases, "two")
4062 d.addCallback(self._assert_leasecount, 1)
4063 d.addCallback(self._count_leases, "mutable")
4064 d.addCallback(self._assert_leasecount, 2)
4066 d.addErrback(self.explain_web_error)
4069 def test_deep_add_lease(self):
4070 self.basedir = "web/Grid/deep_add_lease"
4071 self.set_up_grid(num_clients=2)
4072 c0 = self.g.clients[0]
4076 d = c0.create_dirnode()
4077 def _stash_root_and_create_file(n):
4079 self.uris["root"] = n.get_uri()
4080 self.fileurls["root"] = "uri/" + urllib.quote(n.get_uri()) + "/"
4081 return n.add_file(u"one", upload.Data(DATA, convergence=""))
4082 d.addCallback(_stash_root_and_create_file)
4083 def _stash_uri(fn, which):
4084 self.uris[which] = fn.get_uri()
4085 d.addCallback(_stash_uri, "one")
4086 d.addCallback(lambda ign:
4087 self.rootnode.add_file(u"small",
4088 upload.Data("literal",
4090 d.addCallback(_stash_uri, "small")
4092 d.addCallback(lambda ign: c0.create_mutable_file("mutable"))
4093 d.addCallback(lambda fn: self.rootnode.set_node(u"mutable", fn))
4094 d.addCallback(_stash_uri, "mutable")
4096 d.addCallback(self.CHECK, "root", "t=stream-deep-check") # no add-lease
4098 units = [simplejson.loads(line)
4099 for line in res.splitlines()
4101 # root, one, small, mutable, stats
4102 self.failUnlessReallyEqual(len(units), 4+1)
4103 d.addCallback(_done)
4105 d.addCallback(self._count_leases, "root")
4106 d.addCallback(self._assert_leasecount, 1)
4107 d.addCallback(self._count_leases, "one")
4108 d.addCallback(self._assert_leasecount, 1)
4109 d.addCallback(self._count_leases, "mutable")
4110 d.addCallback(self._assert_leasecount, 1)
4112 d.addCallback(self.CHECK, "root", "t=stream-deep-check&add-lease=true")
4113 d.addCallback(_done)
4115 d.addCallback(self._count_leases, "root")
4116 d.addCallback(self._assert_leasecount, 1)
4117 d.addCallback(self._count_leases, "one")
4118 d.addCallback(self._assert_leasecount, 1)
4119 d.addCallback(self._count_leases, "mutable")
4120 d.addCallback(self._assert_leasecount, 1)
4122 d.addCallback(self.CHECK, "root", "t=stream-deep-check&add-lease=true",
4124 d.addCallback(_done)
4126 d.addCallback(self._count_leases, "root")
4127 d.addCallback(self._assert_leasecount, 2)
4128 d.addCallback(self._count_leases, "one")
4129 d.addCallback(self._assert_leasecount, 2)
4130 d.addCallback(self._count_leases, "mutable")
4131 d.addCallback(self._assert_leasecount, 2)
4133 d.addErrback(self.explain_web_error)
4137 def test_exceptions(self):
4138 self.basedir = "web/Grid/exceptions"
4139 self.set_up_grid(num_clients=1, num_servers=2)
4140 c0 = self.g.clients[0]
4141 c0.DEFAULT_ENCODING_PARAMETERS['happy'] = 2
4144 d = c0.create_dirnode()
4146 self.fileurls["root"] = "uri/" + urllib.quote(n.get_uri()) + "/"
4147 self.fileurls["imaginary"] = self.fileurls["root"] + "imaginary"
4149 d.addCallback(_stash_root)
4150 d.addCallback(lambda ign: c0.upload(upload.Data(DATA, convergence="")))
4152 self.fileurls["1share"] = "uri/" + urllib.quote(ur.uri)
4153 self.delete_shares_numbered(ur.uri, range(1,10))
4155 u = uri.from_string(ur.uri)
4156 u.key = testutil.flip_bit(u.key, 0)
4157 baduri = u.to_string()
4158 self.fileurls["0shares"] = "uri/" + urllib.quote(baduri)
4159 d.addCallback(_stash_bad)
4160 d.addCallback(lambda ign: c0.create_dirnode())
4161 def _mangle_dirnode_1share(n):
4163 url = self.fileurls["dir-1share"] = "uri/" + urllib.quote(u) + "/"
4164 self.fileurls["dir-1share-json"] = url + "?t=json"
4165 self.delete_shares_numbered(u, range(1,10))
4166 d.addCallback(_mangle_dirnode_1share)
4167 d.addCallback(lambda ign: c0.create_dirnode())
4168 def _mangle_dirnode_0share(n):
4170 url = self.fileurls["dir-0share"] = "uri/" + urllib.quote(u) + "/"
4171 self.fileurls["dir-0share-json"] = url + "?t=json"
4172 self.delete_shares_numbered(u, range(0,10))
4173 d.addCallback(_mangle_dirnode_0share)
4175 # NotEnoughSharesError should be reported sensibly, with a
4176 # text/plain explanation of the problem, and perhaps some
4177 # information on which shares *could* be found.
4179 d.addCallback(lambda ignored:
4180 self.shouldHTTPError("GET unrecoverable",
4181 410, "Gone", "NoSharesError",
4182 self.GET, self.fileurls["0shares"]))
4183 def _check_zero_shares(body):
4184 self.failIf("<html>" in body, body)
4185 body = " ".join(body.strip().split())
4186 exp = ("NoSharesError: no shares could be found. "
4187 "Zero shares usually indicates a corrupt URI, or that "
4188 "no servers were connected, but it might also indicate "
4189 "severe corruption. You should perform a filecheck on "
4190 "this object to learn more. The full error message is: "
4191 "no shares (need 3). Last failure: None")
4192 self.failUnlessReallyEqual(exp, body)
4193 d.addCallback(_check_zero_shares)
4196 d.addCallback(lambda ignored:
4197 self.shouldHTTPError("GET 1share",
4198 410, "Gone", "NotEnoughSharesError",
4199 self.GET, self.fileurls["1share"]))
4200 def _check_one_share(body):
4201 self.failIf("<html>" in body, body)
4202 body = " ".join(body.strip().split())
4203 msg = ("NotEnoughSharesError: This indicates that some "
4204 "servers were unavailable, or that shares have been "
4205 "lost to server departure, hard drive failure, or disk "
4206 "corruption. You should perform a filecheck on "
4207 "this object to learn more. The full error message is:"
4208 " ran out of shares: %d complete, %d pending, 0 overdue,"
4209 " 0 unused, need 3. Last failure: None")
4212 self.failUnless(body == msg1 or body == msg2, body)
4213 d.addCallback(_check_one_share)
4215 d.addCallback(lambda ignored:
4216 self.shouldHTTPError("GET imaginary",
4217 404, "Not Found", None,
4218 self.GET, self.fileurls["imaginary"]))
4219 def _missing_child(body):
4220 self.failUnless("No such child: imaginary" in body, body)
4221 d.addCallback(_missing_child)
4223 d.addCallback(lambda ignored: self.GET(self.fileurls["dir-0share"]))
4224 def _check_0shares_dir_html(body):
4225 self.failUnless("<html>" in body, body)
4226 # we should see the regular page, but without the child table or
4228 body = " ".join(body.strip().split())
4229 self.failUnlessIn('href="?t=info">More info on this directory',
4231 exp = ("UnrecoverableFileError: the directory (or mutable file) "
4232 "could not be retrieved, because there were insufficient "
4233 "good shares. This might indicate that no servers were "
4234 "connected, insufficient servers were connected, the URI "
4235 "was corrupt, or that shares have been lost due to server "
4236 "departure, hard drive failure, or disk corruption. You "
4237 "should perform a filecheck on this object to learn more.")
4238 self.failUnlessIn(exp, body)
4239 self.failUnlessIn("No upload forms: directory is unreadable", body)
4240 d.addCallback(_check_0shares_dir_html)
4242 d.addCallback(lambda ignored: self.GET(self.fileurls["dir-1share"]))
4243 def _check_1shares_dir_html(body):
4244 # at some point, we'll split UnrecoverableFileError into 0-shares
4245 # and some-shares like we did for immutable files (since there
4246 # are different sorts of advice to offer in each case). For now,
4247 # they present the same way.
4248 self.failUnless("<html>" in body, body)
4249 body = " ".join(body.strip().split())
4250 self.failUnlessIn('href="?t=info">More info on this directory',
4252 exp = ("UnrecoverableFileError: the directory (or mutable file) "
4253 "could not be retrieved, because there were insufficient "
4254 "good shares. This might indicate that no servers were "
4255 "connected, insufficient servers were connected, the URI "
4256 "was corrupt, or that shares have been lost due to server "
4257 "departure, hard drive failure, or disk corruption. You "
4258 "should perform a filecheck on this object to learn more.")
4259 self.failUnlessIn(exp, body)
4260 self.failUnlessIn("No upload forms: directory is unreadable", body)
4261 d.addCallback(_check_1shares_dir_html)
4263 d.addCallback(lambda ignored:
4264 self.shouldHTTPError("GET dir-0share-json",
4265 410, "Gone", "UnrecoverableFileError",
4267 self.fileurls["dir-0share-json"]))
4268 def _check_unrecoverable_file(body):
4269 self.failIf("<html>" in body, body)
4270 body = " ".join(body.strip().split())
4271 exp = ("UnrecoverableFileError: the directory (or mutable file) "
4272 "could not be retrieved, because there were insufficient "
4273 "good shares. This might indicate that no servers were "
4274 "connected, insufficient servers were connected, the URI "
4275 "was corrupt, or that shares have been lost due to server "
4276 "departure, hard drive failure, or disk corruption. You "
4277 "should perform a filecheck on this object to learn more.")
4278 self.failUnlessReallyEqual(exp, body)
4279 d.addCallback(_check_unrecoverable_file)
4281 d.addCallback(lambda ignored:
4282 self.shouldHTTPError("GET dir-1share-json",
4283 410, "Gone", "UnrecoverableFileError",
4285 self.fileurls["dir-1share-json"]))
4286 d.addCallback(_check_unrecoverable_file)
4288 d.addCallback(lambda ignored:
4289 self.shouldHTTPError("GET imaginary",
4290 404, "Not Found", None,
4291 self.GET, self.fileurls["imaginary"]))
4293 # attach a webapi child that throws a random error, to test how it
4295 w = c0.getServiceNamed("webish")
4296 w.root.putChild("ERRORBOOM", ErrorBoom())
4298 # "Accept: */*" : should get a text/html stack trace
4299 # "Accept: text/plain" : should get a text/plain stack trace
4300 # "Accept: text/plain, application/octet-stream" : text/plain (CLI)
4301 # no Accept header: should get a text/html stack trace
4303 d.addCallback(lambda ignored:
4304 self.shouldHTTPError("GET errorboom_html",
4305 500, "Internal Server Error", None,
4306 self.GET, "ERRORBOOM",
4307 headers={"accept": ["*/*"]}))
4308 def _internal_error_html1(body):
4309 self.failUnless("<html>" in body, "expected HTML, not '%s'" % body)
4310 d.addCallback(_internal_error_html1)
4312 d.addCallback(lambda ignored:
4313 self.shouldHTTPError("GET errorboom_text",
4314 500, "Internal Server Error", None,
4315 self.GET, "ERRORBOOM",
4316 headers={"accept": ["text/plain"]}))
4317 def _internal_error_text2(body):
4318 self.failIf("<html>" in body, body)
4319 self.failUnless(body.startswith("Traceback "), body)
4320 d.addCallback(_internal_error_text2)
4322 CLI_accepts = "text/plain, application/octet-stream"
4323 d.addCallback(lambda ignored:
4324 self.shouldHTTPError("GET errorboom_text",
4325 500, "Internal Server Error", None,
4326 self.GET, "ERRORBOOM",
4327 headers={"accept": [CLI_accepts]}))
4328 def _internal_error_text3(body):
4329 self.failIf("<html>" in body, body)
4330 self.failUnless(body.startswith("Traceback "), body)
4331 d.addCallback(_internal_error_text3)
4333 d.addCallback(lambda ignored:
4334 self.shouldHTTPError("GET errorboom_text",
4335 500, "Internal Server Error", None,
4336 self.GET, "ERRORBOOM"))
4337 def _internal_error_html4(body):
4338 self.failUnless("<html>" in body, "expected HTML, not '%s'" % body)
4339 d.addCallback(_internal_error_html4)
4341 def _flush_errors(res):
4342 # Trial: please ignore the CompletelyUnhandledError in the logs
4343 self.flushLoggedErrors(CompletelyUnhandledError)
4345 d.addBoth(_flush_errors)
4349 class CompletelyUnhandledError(Exception):
4351 class ErrorBoom(rend.Page):
4352 def beforeRender(self, ctx):
4353 raise CompletelyUnhandledError("whoops")