2 import os.path, re, urllib, time
4 from StringIO import StringIO
5 from twisted.application import service
6 from twisted.trial import unittest
7 from twisted.internet import defer, reactor
8 from twisted.internet.task import Clock
9 from twisted.web import client, error, http
10 from twisted.python import failure, log
11 from nevow import rend
12 from allmydata import interfaces, uri, webish, dirnode
13 from allmydata.storage.shares import get_share_file
14 from allmydata.storage_client import StorageFarmBroker
15 from allmydata.immutable import upload
16 from allmydata.immutable.downloader.status import DownloadStatus
17 from allmydata.dirnode import DirectoryNode
18 from allmydata.nodemaker import NodeMaker
19 from allmydata.unknown import UnknownNode
20 from allmydata.web import status, common
21 from allmydata.scripts.debug import CorruptShareOptions, corrupt_share
22 from allmydata.util import fileutil, base32
23 from allmydata.util.consumer import download_to_data
24 from allmydata.util.netstring import split_netstring
25 from allmydata.util.encodingutil import to_str
26 from allmydata.test.common import FakeCHKFileNode, FakeMutableFileNode, \
27 create_chk_filenode, WebErrorMixin, ShouldFailMixin, make_mutable_file_uri
28 from allmydata.interfaces import IMutableFileNode
29 from allmydata.mutable import servermap, publish, retrieve
30 import allmydata.test.common_util as testutil
31 from allmydata.test.no_network import GridTestMixin
32 from allmydata.test.common_web import HTTPClientGETFactory, \
34 from allmydata.client import Client, SecretHolder
36 # create a fake uploader/downloader, and a couple of fake dirnodes, then
37 # create a webserver that works against them
39 timeout = 480 # Most of these take longer than 240 seconds on Francois's arm box.
41 unknown_rwcap = u"lafs://from_the_future_rw_\u263A".encode('utf-8')
42 unknown_rocap = u"ro.lafs://readonly_from_the_future_ro_\u263A".encode('utf-8')
43 unknown_immcap = u"imm.lafs://immutable_from_the_future_imm_\u263A".encode('utf-8')
45 class FakeStatsProvider:
47 stats = {'stats': {}, 'counters': {}}
50 class FakeNodeMaker(NodeMaker):
51 def _create_lit(self, cap):
52 return FakeCHKFileNode(cap)
53 def _create_immutable(self, cap):
54 return FakeCHKFileNode(cap)
55 def _create_mutable(self, cap):
56 return FakeMutableFileNode(None, None, None, None).init_from_cap(cap)
57 def create_mutable_file(self, contents="", keysize=None):
58 n = FakeMutableFileNode(None, None, None, None)
59 return n.create(contents)
61 class FakeUploader(service.Service):
63 def upload(self, uploadable, history=None):
64 d = uploadable.get_size()
65 d.addCallback(lambda size: uploadable.read(size))
68 n = create_chk_filenode(data)
69 results = upload.UploadResults()
70 results.uri = n.get_uri()
72 d.addCallback(_got_data)
74 def get_helper_info(self):
78 ds = DownloadStatus("storage_index", 1234)
81 ds.add_segment_request(0, now)
82 # segnum, when, start,len, decodetime
83 ds.add_segment_delivery(0, now+1, 0, 100, 0.5)
84 ds.add_segment_request(1, now+2)
85 ds.add_segment_error(1, now+3)
86 # two outstanding requests
87 ds.add_segment_request(2, now+4)
88 ds.add_segment_request(3, now+5)
90 e = ds.add_dyhb_sent("serverid_a", now)
91 e.finished([1,2], now+1)
92 e = ds.add_dyhb_sent("serverid_b", now+2) # left unfinished
94 e = ds.add_read_event(0, 120, now)
95 e.update(60, 0.5, 0.1) # bytes, decrypttime, pausetime
97 e = ds.add_read_event(120, 30, now+2) # left unfinished
99 e = ds.add_request_sent("serverid_a", 1, 100, 20, now)
100 e.finished(20, now+1)
101 e = ds.add_request_sent("serverid_a", 1, 120, 30, now+1) # left unfinished
103 # make sure that add_read_event() can come first too
104 ds1 = DownloadStatus("storage_index", 1234)
105 e = ds1.add_read_event(0, 120, now)
106 e.update(60, 0.5, 0.1) # bytes, decrypttime, pausetime
112 _all_upload_status = [upload.UploadStatus()]
113 _all_download_status = [build_one_ds()]
114 _all_mapupdate_statuses = [servermap.UpdateStatus()]
115 _all_publish_statuses = [publish.PublishStatus()]
116 _all_retrieve_statuses = [retrieve.RetrieveStatus()]
118 def list_all_upload_statuses(self):
119 return self._all_upload_status
120 def list_all_download_statuses(self):
121 return self._all_download_status
122 def list_all_mapupdate_statuses(self):
123 return self._all_mapupdate_statuses
124 def list_all_publish_statuses(self):
125 return self._all_publish_statuses
126 def list_all_retrieve_statuses(self):
127 return self._all_retrieve_statuses
128 def list_all_helper_statuses(self):
131 class FakeClient(Client):
133 # don't upcall to Client.__init__, since we only want to initialize a
135 service.MultiService.__init__(self)
136 self.nodeid = "fake_nodeid"
137 self.nickname = "fake_nickname"
138 self.introducer_furl = "None"
139 self.stats_provider = FakeStatsProvider()
140 self._secret_holder = SecretHolder("lease secret", "convergence secret")
142 self.convergence = "some random string"
143 self.storage_broker = StorageFarmBroker(None, permute_peers=True)
144 self.introducer_client = None
145 self.history = FakeHistory()
146 self.uploader = FakeUploader()
147 self.uploader.setServiceParent(self)
148 self.nodemaker = FakeNodeMaker(None, self._secret_holder, None,
152 def startService(self):
153 return service.MultiService.startService(self)
154 def stopService(self):
155 return service.MultiService.stopService(self)
157 MUTABLE_SIZELIMIT = FakeMutableFileNode.MUTABLE_SIZELIMIT
159 class WebMixin(object):
161 self.s = FakeClient()
162 self.s.startService()
163 self.staticdir = self.mktemp()
165 self.ws = webish.WebishServer(self.s, "0", staticdir=self.staticdir,
167 self.ws.setServiceParent(self.s)
168 self.webish_port = port = self.ws.listener._port.getHost().port
169 self.webish_url = "http://localhost:%d" % port
171 l = [ self.s.create_dirnode() for x in range(6) ]
172 d = defer.DeferredList(l)
174 self.public_root = res[0][1]
175 assert interfaces.IDirectoryNode.providedBy(self.public_root), res
176 self.public_url = "/uri/" + self.public_root.get_uri()
177 self.private_root = res[1][1]
181 self._foo_uri = foo.get_uri()
182 self._foo_readonly_uri = foo.get_readonly_uri()
183 self._foo_verifycap = foo.get_verify_cap().to_string()
184 # NOTE: we ignore the deferred on all set_uri() calls, because we
185 # know the fake nodes do these synchronously
186 self.public_root.set_uri(u"foo", foo.get_uri(),
187 foo.get_readonly_uri())
189 self.BAR_CONTENTS, n, self._bar_txt_uri = self.makefile(0)
190 foo.set_uri(u"bar.txt", self._bar_txt_uri, self._bar_txt_uri)
191 self._bar_txt_verifycap = n.get_verify_cap().to_string()
193 foo.set_uri(u"empty", res[3][1].get_uri(),
194 res[3][1].get_readonly_uri())
195 sub_uri = res[4][1].get_uri()
196 self._sub_uri = sub_uri
197 foo.set_uri(u"sub", sub_uri, sub_uri)
198 sub = self.s.create_node_from_uri(sub_uri)
200 _ign, n, blocking_uri = self.makefile(1)
201 foo.set_uri(u"blockingfile", blocking_uri, blocking_uri)
203 unicode_filename = u"n\u00fc.txt" # n u-umlaut . t x t
204 # ok, unicode calls it LATIN SMALL LETTER U WITH DIAERESIS but I
205 # still think of it as an umlaut
206 foo.set_uri(unicode_filename, self._bar_txt_uri, self._bar_txt_uri)
208 _ign, n, baz_file = self.makefile(2)
209 self._baz_file_uri = baz_file
210 sub.set_uri(u"baz.txt", baz_file, baz_file)
212 _ign, n, self._bad_file_uri = self.makefile(3)
213 # this uri should not be downloadable
214 del FakeCHKFileNode.all_contents[self._bad_file_uri]
217 self.public_root.set_uri(u"reedownlee", rodir.get_readonly_uri(),
218 rodir.get_readonly_uri())
219 rodir.set_uri(u"nor", baz_file, baz_file)
224 # public/foo/blockingfile
227 # public/foo/sub/baz.txt
229 # public/reedownlee/nor
230 self.NEWFILE_CONTENTS = "newfile contents\n"
232 return foo.get_metadata_for(u"bar.txt")
234 def _got_metadata(metadata):
235 self._bar_txt_metadata = metadata
236 d.addCallback(_got_metadata)
239 def makefile(self, number):
240 contents = "contents of file %s\n" % number
241 n = create_chk_filenode(contents)
242 return contents, n, n.get_uri()
245 return self.s.stopService()
247 def failUnlessIsBarDotTxt(self, res):
248 self.failUnlessReallyEqual(res, self.BAR_CONTENTS, res)
250 def failUnlessIsBarJSON(self, res):
251 data = simplejson.loads(res)
252 self.failUnless(isinstance(data, list))
253 self.failUnlessEqual(data[0], "filenode")
254 self.failUnless(isinstance(data[1], dict))
255 self.failIf(data[1]["mutable"])
256 self.failIf("rw_uri" in data[1]) # immutable
257 self.failUnlessReallyEqual(to_str(data[1]["ro_uri"]), self._bar_txt_uri)
258 self.failUnlessReallyEqual(to_str(data[1]["verify_uri"]), self._bar_txt_verifycap)
259 self.failUnlessReallyEqual(data[1]["size"], len(self.BAR_CONTENTS))
261 def failUnlessIsFooJSON(self, res):
262 data = simplejson.loads(res)
263 self.failUnless(isinstance(data, list))
264 self.failUnlessEqual(data[0], "dirnode", res)
265 self.failUnless(isinstance(data[1], dict))
266 self.failUnless(data[1]["mutable"])
267 self.failUnless("rw_uri" in data[1]) # mutable
268 self.failUnlessReallyEqual(to_str(data[1]["rw_uri"]), self._foo_uri)
269 self.failUnlessReallyEqual(to_str(data[1]["ro_uri"]), self._foo_readonly_uri)
270 self.failUnlessReallyEqual(to_str(data[1]["verify_uri"]), self._foo_verifycap)
272 kidnames = sorted([unicode(n) for n in data[1]["children"]])
273 self.failUnlessEqual(kidnames,
274 [u"bar.txt", u"blockingfile", u"empty",
275 u"n\u00fc.txt", u"sub"])
276 kids = dict( [(unicode(name),value)
278 in data[1]["children"].iteritems()] )
279 self.failUnlessEqual(kids[u"sub"][0], "dirnode")
280 self.failUnlessIn("metadata", kids[u"sub"][1])
281 self.failUnlessIn("tahoe", kids[u"sub"][1]["metadata"])
282 tahoe_md = kids[u"sub"][1]["metadata"]["tahoe"]
283 self.failUnlessIn("linkcrtime", tahoe_md)
284 self.failUnlessIn("linkmotime", tahoe_md)
285 self.failUnlessEqual(kids[u"bar.txt"][0], "filenode")
286 self.failUnlessReallyEqual(kids[u"bar.txt"][1]["size"], len(self.BAR_CONTENTS))
287 self.failUnlessReallyEqual(to_str(kids[u"bar.txt"][1]["ro_uri"]), self._bar_txt_uri)
288 self.failUnlessReallyEqual(to_str(kids[u"bar.txt"][1]["verify_uri"]),
289 self._bar_txt_verifycap)
290 self.failUnlessIn("metadata", kids[u"bar.txt"][1])
291 self.failUnlessIn("tahoe", kids[u"bar.txt"][1]["metadata"])
292 self.failUnlessReallyEqual(kids[u"bar.txt"][1]["metadata"]["tahoe"]["linkcrtime"],
293 self._bar_txt_metadata["tahoe"]["linkcrtime"])
294 self.failUnlessReallyEqual(to_str(kids[u"n\u00fc.txt"][1]["ro_uri"]),
297 def GET(self, urlpath, followRedirect=False, return_response=False,
299 # if return_response=True, this fires with (data, statuscode,
300 # respheaders) instead of just data.
301 assert not isinstance(urlpath, unicode)
302 url = self.webish_url + urlpath
303 factory = HTTPClientGETFactory(url, method="GET",
304 followRedirect=followRedirect, **kwargs)
305 reactor.connectTCP("localhost", self.webish_port, factory)
308 return (data, factory.status, factory.response_headers)
310 d.addCallback(_got_data)
311 return factory.deferred
313 def HEAD(self, urlpath, return_response=False, **kwargs):
314 # this requires some surgery, because twisted.web.client doesn't want
315 # to give us back the response headers.
316 factory = HTTPClientHEADFactory(urlpath, method="HEAD", **kwargs)
317 reactor.connectTCP("localhost", self.webish_port, factory)
320 return (data, factory.status, factory.response_headers)
322 d.addCallback(_got_data)
323 return factory.deferred
325 def PUT(self, urlpath, data, **kwargs):
326 url = self.webish_url + urlpath
327 return client.getPage(url, method="PUT", postdata=data, **kwargs)
329 def DELETE(self, urlpath):
330 url = self.webish_url + urlpath
331 return client.getPage(url, method="DELETE")
333 def POST(self, urlpath, followRedirect=False, **fields):
334 sepbase = "boogabooga"
338 form.append('Content-Disposition: form-data; name="_charset"')
342 for name, value in fields.iteritems():
343 if isinstance(value, tuple):
344 filename, value = value
345 form.append('Content-Disposition: form-data; name="%s"; '
346 'filename="%s"' % (name, filename.encode("utf-8")))
348 form.append('Content-Disposition: form-data; name="%s"' % name)
350 if isinstance(value, unicode):
351 value = value.encode("utf-8")
354 assert isinstance(value, str)
361 body = "\r\n".join(form) + "\r\n"
362 headers["content-type"] = "multipart/form-data; boundary=%s" % sepbase
363 return self.POST2(urlpath, body, headers, followRedirect)
365 def POST2(self, urlpath, body="", headers={}, followRedirect=False):
366 url = self.webish_url + urlpath
367 return client.getPage(url, method="POST", postdata=body,
368 headers=headers, followRedirect=followRedirect)
370 def shouldFail(self, res, expected_failure, which,
371 substring=None, response_substring=None):
372 if isinstance(res, failure.Failure):
373 res.trap(expected_failure)
375 self.failUnless(substring in str(res),
376 "substring '%s' not in '%s'"
377 % (substring, str(res)))
378 if response_substring:
379 self.failUnless(response_substring in res.value.response,
380 "response substring '%s' not in '%s'"
381 % (response_substring, res.value.response))
383 self.fail("%s was supposed to raise %s, not get '%s'" %
384 (which, expected_failure, res))
386 def shouldFail2(self, expected_failure, which, substring,
388 callable, *args, **kwargs):
389 assert substring is None or isinstance(substring, str)
390 assert response_substring is None or isinstance(response_substring, str)
391 d = defer.maybeDeferred(callable, *args, **kwargs)
393 if isinstance(res, failure.Failure):
394 res.trap(expected_failure)
396 self.failUnless(substring in str(res),
397 "%s: substring '%s' not in '%s'"
398 % (which, substring, str(res)))
399 if response_substring:
400 self.failUnless(response_substring in res.value.response,
401 "%s: response substring '%s' not in '%s'"
403 response_substring, res.value.response))
405 self.fail("%s was supposed to raise %s, not get '%s'" %
406 (which, expected_failure, res))
410 def should404(self, res, which):
411 if isinstance(res, failure.Failure):
412 res.trap(error.Error)
413 self.failUnlessReallyEqual(res.value.status, "404")
415 self.fail("%s was supposed to Error(404), not get '%s'" %
418 def should302(self, res, which):
419 if isinstance(res, failure.Failure):
420 res.trap(error.Error)
421 self.failUnlessReallyEqual(res.value.status, "302")
423 self.fail("%s was supposed to Error(302), not get '%s'" %
427 class Web(WebMixin, WebErrorMixin, testutil.StallMixin, testutil.ReallyEqualMixin, unittest.TestCase):
428 def test_create(self):
431 def test_welcome(self):
434 self.failUnless('Welcome To Tahoe-LAFS' in res, res)
436 self.s.basedir = 'web/test_welcome'
437 fileutil.make_dirs("web/test_welcome")
438 fileutil.make_dirs("web/test_welcome/private")
440 d.addCallback(_check)
443 def test_provisioning(self):
444 d = self.GET("/provisioning/")
446 self.failUnless('Provisioning Tool' in res)
447 fields = {'filled': True,
448 "num_users": int(50e3),
449 "files_per_user": 1000,
450 "space_per_user": int(1e9),
451 "sharing_ratio": 1.0,
452 "encoding_parameters": "3-of-10-5",
454 "ownership_mode": "A",
455 "download_rate": 100,
460 return self.POST("/provisioning/", **fields)
462 d.addCallback(_check)
464 self.failUnless('Provisioning Tool' in res)
465 self.failUnless("Share space consumed: 167.01TB" in res)
467 fields = {'filled': True,
468 "num_users": int(50e6),
469 "files_per_user": 1000,
470 "space_per_user": int(5e9),
471 "sharing_ratio": 1.0,
472 "encoding_parameters": "25-of-100-50",
473 "num_servers": 30000,
474 "ownership_mode": "E",
475 "drive_failure_model": "U",
477 "download_rate": 1000,
482 return self.POST("/provisioning/", **fields)
483 d.addCallback(_check2)
485 self.failUnless("Share space consumed: huge!" in res)
486 fields = {'filled': True}
487 return self.POST("/provisioning/", **fields)
488 d.addCallback(_check3)
490 self.failUnless("Share space consumed:" in res)
491 d.addCallback(_check4)
494 def test_reliability_tool(self):
496 from allmydata import reliability
497 _hush_pyflakes = reliability
500 raise unittest.SkipTest("reliability tool requires NumPy")
502 d = self.GET("/reliability/")
504 self.failUnless('Reliability Tool' in res)
505 fields = {'drive_lifetime': "8Y",
510 "check_period": "1M",
511 "report_period": "3M",
514 return self.POST("/reliability/", **fields)
516 d.addCallback(_check)
518 self.failUnless('Reliability Tool' in res)
519 r = r'Probability of loss \(no maintenance\):\s+<span>0.033591'
520 self.failUnless(re.search(r, res), res)
521 d.addCallback(_check2)
524 def test_status(self):
525 h = self.s.get_history()
526 dl_num = h.list_all_download_statuses()[0].get_counter()
527 ul_num = h.list_all_upload_statuses()[0].get_counter()
528 mu_num = h.list_all_mapupdate_statuses()[0].get_counter()
529 pub_num = h.list_all_publish_statuses()[0].get_counter()
530 ret_num = h.list_all_retrieve_statuses()[0].get_counter()
531 d = self.GET("/status", followRedirect=True)
533 self.failUnless('Upload and Download Status' in res, res)
534 self.failUnless('"down-%d"' % dl_num in res, res)
535 self.failUnless('"up-%d"' % ul_num in res, res)
536 self.failUnless('"mapupdate-%d"' % mu_num in res, res)
537 self.failUnless('"publish-%d"' % pub_num in res, res)
538 self.failUnless('"retrieve-%d"' % ret_num in res, res)
539 d.addCallback(_check)
540 d.addCallback(lambda res: self.GET("/status/?t=json"))
541 def _check_json(res):
542 data = simplejson.loads(res)
543 self.failUnless(isinstance(data, dict))
544 #active = data["active"]
545 # TODO: test more. We need a way to fake an active operation
547 d.addCallback(_check_json)
549 d.addCallback(lambda res: self.GET("/status/down-%d" % dl_num))
551 self.failUnless("File Download Status" in res, res)
552 d.addCallback(_check_dl)
553 d.addCallback(lambda res: self.GET("/status/down-%d?t=json" % dl_num))
554 def _check_dl_json(res):
555 data = simplejson.loads(res)
556 self.failUnless(isinstance(data, dict))
557 d.addCallback(_check_dl_json)
558 d.addCallback(lambda res: self.GET("/status/up-%d" % ul_num))
560 self.failUnless("File Upload Status" in res, res)
561 d.addCallback(_check_ul)
562 d.addCallback(lambda res: self.GET("/status/mapupdate-%d" % mu_num))
563 def _check_mapupdate(res):
564 self.failUnless("Mutable File Servermap Update Status" in res, res)
565 d.addCallback(_check_mapupdate)
566 d.addCallback(lambda res: self.GET("/status/publish-%d" % pub_num))
567 def _check_publish(res):
568 self.failUnless("Mutable File Publish Status" in res, res)
569 d.addCallback(_check_publish)
570 d.addCallback(lambda res: self.GET("/status/retrieve-%d" % ret_num))
571 def _check_retrieve(res):
572 self.failUnless("Mutable File Retrieve Status" in res, res)
573 d.addCallback(_check_retrieve)
577 def test_status_numbers(self):
578 drrm = status.DownloadResultsRendererMixin()
579 self.failUnlessReallyEqual(drrm.render_time(None, None), "")
580 self.failUnlessReallyEqual(drrm.render_time(None, 2.5), "2.50s")
581 self.failUnlessReallyEqual(drrm.render_time(None, 0.25), "250ms")
582 self.failUnlessReallyEqual(drrm.render_time(None, 0.0021), "2.1ms")
583 self.failUnlessReallyEqual(drrm.render_time(None, 0.000123), "123us")
584 self.failUnlessReallyEqual(drrm.render_rate(None, None), "")
585 self.failUnlessReallyEqual(drrm.render_rate(None, 2500000), "2.50MBps")
586 self.failUnlessReallyEqual(drrm.render_rate(None, 30100), "30.1kBps")
587 self.failUnlessReallyEqual(drrm.render_rate(None, 123), "123Bps")
589 urrm = status.UploadResultsRendererMixin()
590 self.failUnlessReallyEqual(urrm.render_time(None, None), "")
591 self.failUnlessReallyEqual(urrm.render_time(None, 2.5), "2.50s")
592 self.failUnlessReallyEqual(urrm.render_time(None, 0.25), "250ms")
593 self.failUnlessReallyEqual(urrm.render_time(None, 0.0021), "2.1ms")
594 self.failUnlessReallyEqual(urrm.render_time(None, 0.000123), "123us")
595 self.failUnlessReallyEqual(urrm.render_rate(None, None), "")
596 self.failUnlessReallyEqual(urrm.render_rate(None, 2500000), "2.50MBps")
597 self.failUnlessReallyEqual(urrm.render_rate(None, 30100), "30.1kBps")
598 self.failUnlessReallyEqual(urrm.render_rate(None, 123), "123Bps")
600 def test_GET_FILEURL(self):
601 d = self.GET(self.public_url + "/foo/bar.txt")
602 d.addCallback(self.failUnlessIsBarDotTxt)
605 def test_GET_FILEURL_range(self):
606 headers = {"range": "bytes=1-10"}
607 d = self.GET(self.public_url + "/foo/bar.txt", headers=headers,
608 return_response=True)
609 def _got((res, status, headers)):
610 self.failUnlessReallyEqual(int(status), 206)
611 self.failUnless(headers.has_key("content-range"))
612 self.failUnlessReallyEqual(headers["content-range"][0],
613 "bytes 1-10/%d" % len(self.BAR_CONTENTS))
614 self.failUnlessReallyEqual(res, self.BAR_CONTENTS[1:11])
618 def test_GET_FILEURL_partial_range(self):
619 headers = {"range": "bytes=5-"}
620 length = len(self.BAR_CONTENTS)
621 d = self.GET(self.public_url + "/foo/bar.txt", headers=headers,
622 return_response=True)
623 def _got((res, status, headers)):
624 self.failUnlessReallyEqual(int(status), 206)
625 self.failUnless(headers.has_key("content-range"))
626 self.failUnlessReallyEqual(headers["content-range"][0],
627 "bytes 5-%d/%d" % (length-1, length))
628 self.failUnlessReallyEqual(res, self.BAR_CONTENTS[5:])
632 def test_GET_FILEURL_partial_end_range(self):
633 headers = {"range": "bytes=-5"}
634 length = len(self.BAR_CONTENTS)
635 d = self.GET(self.public_url + "/foo/bar.txt", headers=headers,
636 return_response=True)
637 def _got((res, status, headers)):
638 self.failUnlessReallyEqual(int(status), 206)
639 self.failUnless(headers.has_key("content-range"))
640 self.failUnlessReallyEqual(headers["content-range"][0],
641 "bytes %d-%d/%d" % (length-5, length-1, length))
642 self.failUnlessReallyEqual(res, self.BAR_CONTENTS[-5:])
646 def test_GET_FILEURL_partial_range_overrun(self):
647 headers = {"range": "bytes=100-200"}
648 d = self.shouldFail2(error.Error, "test_GET_FILEURL_range_overrun",
649 "416 Requested Range not satisfiable",
650 "First beyond end of file",
651 self.GET, self.public_url + "/foo/bar.txt",
655 def test_HEAD_FILEURL_range(self):
656 headers = {"range": "bytes=1-10"}
657 d = self.HEAD(self.public_url + "/foo/bar.txt", headers=headers,
658 return_response=True)
659 def _got((res, status, headers)):
660 self.failUnlessReallyEqual(res, "")
661 self.failUnlessReallyEqual(int(status), 206)
662 self.failUnless(headers.has_key("content-range"))
663 self.failUnlessReallyEqual(headers["content-range"][0],
664 "bytes 1-10/%d" % len(self.BAR_CONTENTS))
668 def test_HEAD_FILEURL_partial_range(self):
669 headers = {"range": "bytes=5-"}
670 length = len(self.BAR_CONTENTS)
671 d = self.HEAD(self.public_url + "/foo/bar.txt", headers=headers,
672 return_response=True)
673 def _got((res, status, headers)):
674 self.failUnlessReallyEqual(int(status), 206)
675 self.failUnless(headers.has_key("content-range"))
676 self.failUnlessReallyEqual(headers["content-range"][0],
677 "bytes 5-%d/%d" % (length-1, length))
681 def test_HEAD_FILEURL_partial_end_range(self):
682 headers = {"range": "bytes=-5"}
683 length = len(self.BAR_CONTENTS)
684 d = self.HEAD(self.public_url + "/foo/bar.txt", headers=headers,
685 return_response=True)
686 def _got((res, status, headers)):
687 self.failUnlessReallyEqual(int(status), 206)
688 self.failUnless(headers.has_key("content-range"))
689 self.failUnlessReallyEqual(headers["content-range"][0],
690 "bytes %d-%d/%d" % (length-5, length-1, length))
694 def test_HEAD_FILEURL_partial_range_overrun(self):
695 headers = {"range": "bytes=100-200"}
696 d = self.shouldFail2(error.Error, "test_HEAD_FILEURL_range_overrun",
697 "416 Requested Range not satisfiable",
699 self.HEAD, self.public_url + "/foo/bar.txt",
703 def test_GET_FILEURL_range_bad(self):
704 headers = {"range": "BOGUS=fizbop-quarnak"}
705 d = self.GET(self.public_url + "/foo/bar.txt", headers=headers,
706 return_response=True)
707 def _got((res, status, headers)):
708 self.failUnlessReallyEqual(int(status), 200)
709 self.failUnless(not headers.has_key("content-range"))
710 self.failUnlessReallyEqual(res, self.BAR_CONTENTS)
714 def test_HEAD_FILEURL(self):
715 d = self.HEAD(self.public_url + "/foo/bar.txt", return_response=True)
716 def _got((res, status, headers)):
717 self.failUnlessReallyEqual(res, "")
718 self.failUnlessReallyEqual(headers["content-length"][0],
719 str(len(self.BAR_CONTENTS)))
720 self.failUnlessReallyEqual(headers["content-type"], ["text/plain"])
724 def test_GET_FILEURL_named(self):
725 base = "/file/%s" % urllib.quote(self._bar_txt_uri)
726 base2 = "/named/%s" % urllib.quote(self._bar_txt_uri)
727 d = self.GET(base + "/@@name=/blah.txt")
728 d.addCallback(self.failUnlessIsBarDotTxt)
729 d.addCallback(lambda res: self.GET(base + "/blah.txt"))
730 d.addCallback(self.failUnlessIsBarDotTxt)
731 d.addCallback(lambda res: self.GET(base + "/ignore/lots/blah.txt"))
732 d.addCallback(self.failUnlessIsBarDotTxt)
733 d.addCallback(lambda res: self.GET(base2 + "/@@name=/blah.txt"))
734 d.addCallback(self.failUnlessIsBarDotTxt)
735 save_url = base + "?save=true&filename=blah.txt"
736 d.addCallback(lambda res: self.GET(save_url))
737 d.addCallback(self.failUnlessIsBarDotTxt) # TODO: check headers
738 u_filename = u"n\u00e9wer.txt" # n e-acute w e r . t x t
739 u_fn_e = urllib.quote(u_filename.encode("utf-8"))
740 u_url = base + "?save=true&filename=" + u_fn_e
741 d.addCallback(lambda res: self.GET(u_url))
742 d.addCallback(self.failUnlessIsBarDotTxt) # TODO: check headers
745 def test_PUT_FILEURL_named_bad(self):
746 base = "/file/%s" % urllib.quote(self._bar_txt_uri)
747 d = self.shouldFail2(error.Error, "test_PUT_FILEURL_named_bad",
749 "/file can only be used with GET or HEAD",
750 self.PUT, base + "/@@name=/blah.txt", "")
753 def test_GET_DIRURL_named_bad(self):
754 base = "/file/%s" % urllib.quote(self._foo_uri)
755 d = self.shouldFail2(error.Error, "test_PUT_DIRURL_named_bad",
758 self.GET, base + "/@@name=/blah.txt")
761 def test_GET_slash_file_bad(self):
762 d = self.shouldFail2(error.Error, "test_GET_slash_file_bad",
764 "/file must be followed by a file-cap and a name",
768 def test_GET_unhandled_URI_named(self):
769 contents, n, newuri = self.makefile(12)
770 verifier_cap = n.get_verify_cap().to_string()
771 base = "/file/%s" % urllib.quote(verifier_cap)
772 # client.create_node_from_uri() can't handle verify-caps
773 d = self.shouldFail2(error.Error, "GET_unhandled_URI_named",
774 "400 Bad Request", "is not a file-cap",
778 def test_GET_unhandled_URI(self):
779 contents, n, newuri = self.makefile(12)
780 verifier_cap = n.get_verify_cap().to_string()
781 base = "/uri/%s" % urllib.quote(verifier_cap)
782 # client.create_node_from_uri() can't handle verify-caps
783 d = self.shouldFail2(error.Error, "test_GET_unhandled_URI",
785 "GET unknown URI type: can only do t=info",
789 def test_GET_FILE_URI(self):
790 base = "/uri/%s" % urllib.quote(self._bar_txt_uri)
792 d.addCallback(self.failUnlessIsBarDotTxt)
795 def test_GET_FILE_URI_badchild(self):
796 base = "/uri/%s/boguschild" % urllib.quote(self._bar_txt_uri)
797 errmsg = "Files have no children, certainly not named 'boguschild'"
798 d = self.shouldFail2(error.Error, "test_GET_FILE_URI_badchild",
799 "400 Bad Request", errmsg,
803 def test_PUT_FILE_URI_badchild(self):
804 base = "/uri/%s/boguschild" % urllib.quote(self._bar_txt_uri)
805 errmsg = "Cannot create directory 'boguschild', because its parent is a file, not a directory"
806 d = self.shouldFail2(error.Error, "test_GET_FILE_URI_badchild",
807 "400 Bad Request", errmsg,
811 # TODO: version of this with a Unicode filename
812 def test_GET_FILEURL_save(self):
813 d = self.GET(self.public_url + "/foo/bar.txt?filename=bar.txt&save=true",
814 return_response=True)
815 def _got((res, statuscode, headers)):
816 content_disposition = headers["content-disposition"][0]
817 self.failUnless(content_disposition == 'attachment; filename="bar.txt"', content_disposition)
818 self.failUnlessIsBarDotTxt(res)
822 def test_GET_FILEURL_missing(self):
823 d = self.GET(self.public_url + "/foo/missing")
824 d.addBoth(self.should404, "test_GET_FILEURL_missing")
827 def test_PUT_overwrite_only_files(self):
828 # create a directory, put a file in that directory.
829 contents, n, filecap = self.makefile(8)
830 d = self.PUT(self.public_url + "/foo/dir?t=mkdir", "")
831 d.addCallback(lambda res:
832 self.PUT(self.public_url + "/foo/dir/file1.txt",
833 self.NEWFILE_CONTENTS))
834 # try to overwrite the file with replace=only-files
836 d.addCallback(lambda res:
837 self.PUT(self.public_url + "/foo/dir/file1.txt?t=uri&replace=only-files",
839 d.addCallback(lambda res:
840 self.shouldFail2(error.Error, "PUT_bad_t", "409 Conflict",
841 "There was already a child by that name, and you asked me "
843 self.PUT, self.public_url + "/foo/dir?t=uri&replace=only-files",
847 def test_PUT_NEWFILEURL(self):
848 d = self.PUT(self.public_url + "/foo/new.txt", self.NEWFILE_CONTENTS)
849 # TODO: we lose the response code, so we can't check this
850 #self.failUnlessReallyEqual(responsecode, 201)
851 d.addCallback(self.failUnlessURIMatchesROChild, self._foo_node, u"new.txt")
852 d.addCallback(lambda res:
853 self.failUnlessChildContentsAre(self._foo_node, u"new.txt",
854 self.NEWFILE_CONTENTS))
857 def test_PUT_NEWFILEURL_not_mutable(self):
858 d = self.PUT(self.public_url + "/foo/new.txt?mutable=false",
859 self.NEWFILE_CONTENTS)
860 # TODO: we lose the response code, so we can't check this
861 #self.failUnlessReallyEqual(responsecode, 201)
862 d.addCallback(self.failUnlessURIMatchesROChild, self._foo_node, u"new.txt")
863 d.addCallback(lambda res:
864 self.failUnlessChildContentsAre(self._foo_node, u"new.txt",
865 self.NEWFILE_CONTENTS))
868 def test_PUT_NEWFILEURL_range_bad(self):
869 headers = {"content-range": "bytes 1-10/%d" % len(self.NEWFILE_CONTENTS)}
870 target = self.public_url + "/foo/new.txt"
871 d = self.shouldFail2(error.Error, "test_PUT_NEWFILEURL_range_bad",
872 "501 Not Implemented",
873 "Content-Range in PUT not yet supported",
874 # (and certainly not for immutable files)
875 self.PUT, target, self.NEWFILE_CONTENTS[1:11],
877 d.addCallback(lambda res:
878 self.failIfNodeHasChild(self._foo_node, u"new.txt"))
881 def test_PUT_NEWFILEURL_mutable(self):
882 d = self.PUT(self.public_url + "/foo/new.txt?mutable=true",
883 self.NEWFILE_CONTENTS)
884 # TODO: we lose the response code, so we can't check this
885 #self.failUnlessReallyEqual(responsecode, 201)
887 u = uri.from_string_mutable_filenode(res)
888 self.failUnless(u.is_mutable())
889 self.failIf(u.is_readonly())
891 d.addCallback(_check_uri)
892 d.addCallback(self.failUnlessURIMatchesRWChild, self._foo_node, u"new.txt")
893 d.addCallback(lambda res:
894 self.failUnlessMutableChildContentsAre(self._foo_node,
896 self.NEWFILE_CONTENTS))
899 def test_PUT_NEWFILEURL_mutable_toobig(self):
900 d = self.shouldFail2(error.Error, "test_PUT_NEWFILEURL_mutable_toobig",
901 "413 Request Entity Too Large",
902 "SDMF is limited to one segment, and 10001 > 10000",
904 self.public_url + "/foo/new.txt?mutable=true",
905 "b" * (self.s.MUTABLE_SIZELIMIT+1))
908 def test_PUT_NEWFILEURL_replace(self):
909 d = self.PUT(self.public_url + "/foo/bar.txt", self.NEWFILE_CONTENTS)
910 # TODO: we lose the response code, so we can't check this
911 #self.failUnlessReallyEqual(responsecode, 200)
912 d.addCallback(self.failUnlessURIMatchesROChild, self._foo_node, u"bar.txt")
913 d.addCallback(lambda res:
914 self.failUnlessChildContentsAre(self._foo_node, u"bar.txt",
915 self.NEWFILE_CONTENTS))
918 def test_PUT_NEWFILEURL_bad_t(self):
919 d = self.shouldFail2(error.Error, "PUT_bad_t", "400 Bad Request",
920 "PUT to a file: bad t=bogus",
921 self.PUT, self.public_url + "/foo/bar.txt?t=bogus",
925 def test_PUT_NEWFILEURL_no_replace(self):
926 d = self.PUT(self.public_url + "/foo/bar.txt?replace=false",
927 self.NEWFILE_CONTENTS)
928 d.addBoth(self.shouldFail, error.Error, "PUT_NEWFILEURL_no_replace",
930 "There was already a child by that name, and you asked me "
934 def test_PUT_NEWFILEURL_mkdirs(self):
935 d = self.PUT(self.public_url + "/foo/newdir/new.txt", self.NEWFILE_CONTENTS)
937 d.addCallback(self.failUnlessURIMatchesROChild, fn, u"newdir/new.txt")
938 d.addCallback(lambda res: self.failIfNodeHasChild(fn, u"new.txt"))
939 d.addCallback(lambda res: self.failUnlessNodeHasChild(fn, u"newdir"))
940 d.addCallback(lambda res:
941 self.failUnlessChildContentsAre(fn, u"newdir/new.txt",
942 self.NEWFILE_CONTENTS))
945 def test_PUT_NEWFILEURL_blocked(self):
946 d = self.PUT(self.public_url + "/foo/blockingfile/new.txt",
947 self.NEWFILE_CONTENTS)
948 d.addBoth(self.shouldFail, error.Error, "PUT_NEWFILEURL_blocked",
950 "Unable to create directory 'blockingfile': a file was in the way")
953 def test_PUT_NEWFILEURL_emptyname(self):
954 # an empty pathname component (i.e. a double-slash) is disallowed
955 d = self.shouldFail2(error.Error, "test_PUT_NEWFILEURL_emptyname",
957 "The webapi does not allow empty pathname components",
958 self.PUT, self.public_url + "/foo//new.txt", "")
961 def test_DELETE_FILEURL(self):
962 d = self.DELETE(self.public_url + "/foo/bar.txt")
963 d.addCallback(lambda res:
964 self.failIfNodeHasChild(self._foo_node, u"bar.txt"))
967 def test_DELETE_FILEURL_missing(self):
968 d = self.DELETE(self.public_url + "/foo/missing")
969 d.addBoth(self.should404, "test_DELETE_FILEURL_missing")
972 def test_DELETE_FILEURL_missing2(self):
973 d = self.DELETE(self.public_url + "/missing/missing")
974 d.addBoth(self.should404, "test_DELETE_FILEURL_missing2")
977 def failUnlessHasBarDotTxtMetadata(self, res):
978 data = simplejson.loads(res)
979 self.failUnless(isinstance(data, list))
980 self.failUnlessIn("metadata", data[1])
981 self.failUnlessIn("tahoe", data[1]["metadata"])
982 self.failUnlessIn("linkcrtime", data[1]["metadata"]["tahoe"])
983 self.failUnlessIn("linkmotime", data[1]["metadata"]["tahoe"])
984 self.failUnlessReallyEqual(data[1]["metadata"]["tahoe"]["linkcrtime"],
985 self._bar_txt_metadata["tahoe"]["linkcrtime"])
987 def test_GET_FILEURL_json(self):
988 # twisted.web.http.parse_qs ignores any query args without an '=', so
989 # I can't do "GET /path?json", I have to do "GET /path/t=json"
990 # instead. This may make it tricky to emulate the S3 interface
992 d = self.GET(self.public_url + "/foo/bar.txt?t=json")
994 self.failUnlessIsBarJSON(data)
995 self.failUnlessHasBarDotTxtMetadata(data)
997 d.addCallback(_check1)
1000 def test_GET_FILEURL_json_missing(self):
1001 d = self.GET(self.public_url + "/foo/missing?json")
1002 d.addBoth(self.should404, "test_GET_FILEURL_json_missing")
1005 def test_GET_FILEURL_uri(self):
1006 d = self.GET(self.public_url + "/foo/bar.txt?t=uri")
1008 self.failUnlessReallyEqual(res, self._bar_txt_uri)
1009 d.addCallback(_check)
1010 d.addCallback(lambda res:
1011 self.GET(self.public_url + "/foo/bar.txt?t=readonly-uri"))
1013 # for now, for files, uris and readonly-uris are the same
1014 self.failUnlessReallyEqual(res, self._bar_txt_uri)
1015 d.addCallback(_check2)
1018 def test_GET_FILEURL_badtype(self):
1019 d = self.shouldHTTPError("GET t=bogus", 400, "Bad Request",
1022 self.public_url + "/foo/bar.txt?t=bogus")
1025 def test_CSS_FILE(self):
1026 d = self.GET("/tahoe_css", followRedirect=True)
1028 CSS_STYLE=re.compile('toolbar\s{.+text-align:\scenter.+toolbar-item.+display:\sinline',re.DOTALL)
1029 self.failUnless(CSS_STYLE.search(res), res)
1030 d.addCallback(_check)
1033 def test_GET_FILEURL_uri_missing(self):
1034 d = self.GET(self.public_url + "/foo/missing?t=uri")
1035 d.addBoth(self.should404, "test_GET_FILEURL_uri_missing")
1038 def test_GET_DIRECTORY_html_banner(self):
1039 d = self.GET(self.public_url + "/foo", followRedirect=True)
1041 self.failUnlessIn('<div class="toolbar-item"><a href="../../..">Return to Welcome page</a></div>',res)
1042 d.addCallback(_check)
1045 def test_GET_DIRURL(self):
1046 # the addSlash means we get a redirect here
1047 # from /uri/$URI/foo/ , we need ../../../ to get back to the root
1049 d = self.GET(self.public_url + "/foo", followRedirect=True)
1051 self.failUnless(('<a href="%s">Return to Welcome page' % ROOT)
1053 # the FILE reference points to a URI, but it should end in bar.txt
1054 bar_url = ("%s/file/%s/@@named=/bar.txt" %
1055 (ROOT, urllib.quote(self._bar_txt_uri)))
1056 get_bar = "".join([r'<td>FILE</td>',
1058 r'<a href="%s">bar.txt</a>' % bar_url,
1060 r'\s+<td>%d</td>' % len(self.BAR_CONTENTS),
1062 self.failUnless(re.search(get_bar, res), res)
1063 for line in res.split("\n"):
1064 # find the line that contains the delete button for bar.txt
1065 if ("form action" in line and
1066 'value="delete"' in line and
1067 'value="bar.txt"' in line):
1068 # the form target should use a relative URL
1069 foo_url = urllib.quote("%s/uri/%s/" % (ROOT, self._foo_uri))
1070 self.failUnless(('action="%s"' % foo_url) in line, line)
1071 # and the when_done= should too
1072 #done_url = urllib.quote(???)
1073 #self.failUnless(('name="when_done" value="%s"' % done_url)
1077 self.fail("unable to find delete-bar.txt line", res)
1079 # the DIR reference just points to a URI
1080 sub_url = ("%s/uri/%s/" % (ROOT, urllib.quote(self._sub_uri)))
1081 get_sub = ((r'<td>DIR</td>')
1082 +r'\s+<td><a href="%s">sub</a></td>' % sub_url)
1083 self.failUnless(re.search(get_sub, res), res)
1084 d.addCallback(_check)
1086 # look at a readonly directory
1087 d.addCallback(lambda res:
1088 self.GET(self.public_url + "/reedownlee", followRedirect=True))
1090 self.failUnless("(read-only)" in res, res)
1091 self.failIf("Upload a file" in res, res)
1092 d.addCallback(_check2)
1094 # and at a directory that contains a readonly directory
1095 d.addCallback(lambda res:
1096 self.GET(self.public_url, followRedirect=True))
1098 self.failUnless(re.search('<td>DIR-RO</td>'
1099 r'\s+<td><a href="[\.\/]+/uri/URI%3ADIR2-RO%3A[^"]+">reedownlee</a></td>', res), res)
1100 d.addCallback(_check3)
1102 # and an empty directory
1103 d.addCallback(lambda res: self.GET(self.public_url + "/foo/empty/"))
1105 self.failUnless("directory is empty" in res, res)
1106 MKDIR_BUTTON_RE=re.compile('<input type="hidden" name="t" value="mkdir" />.*<legend class="freeform-form-label">Create a new directory in this directory</legend>.*<input type="submit" value="Create" />', re.I)
1107 self.failUnless(MKDIR_BUTTON_RE.search(res), res)
1108 d.addCallback(_check4)
1110 # and at a literal directory
1111 tiny_litdir_uri = "URI:DIR2-LIT:gqytunj2onug64tufqzdcosvkjetutcjkq5gw4tvm5vwszdgnz5hgyzufqydulbshj5x2lbm" # contains one child which is itself also LIT
1112 d.addCallback(lambda res:
1113 self.GET("/uri/" + tiny_litdir_uri + "/", followRedirect=True))
1115 self.failUnless('(immutable)' in res, res)
1116 self.failUnless(re.search('<td>FILE</td>'
1117 r'\s+<td><a href="[\.\/]+/file/URI%3ALIT%3Akrugkidfnzsc4/@@named=/short">short</a></td>', res), res)
1118 d.addCallback(_check5)
1121 def test_GET_DIRURL_badtype(self):
1122 d = self.shouldHTTPError("test_GET_DIRURL_badtype",
1126 self.public_url + "/foo?t=bogus")
1129 def test_GET_DIRURL_json(self):
1130 d = self.GET(self.public_url + "/foo?t=json")
1131 d.addCallback(self.failUnlessIsFooJSON)
1135 def test_POST_DIRURL_manifest_no_ophandle(self):
1136 d = self.shouldFail2(error.Error,
1137 "test_POST_DIRURL_manifest_no_ophandle",
1139 "slow operation requires ophandle=",
1140 self.POST, self.public_url, t="start-manifest")
1143 def test_POST_DIRURL_manifest(self):
1144 d = defer.succeed(None)
1145 def getman(ignored, output):
1146 d = self.POST(self.public_url + "/foo/?t=start-manifest&ophandle=125",
1147 followRedirect=True)
1148 d.addCallback(self.wait_for_operation, "125")
1149 d.addCallback(self.get_operation_results, "125", output)
1151 d.addCallback(getman, None)
1152 def _got_html(manifest):
1153 self.failUnless("Manifest of SI=" in manifest)
1154 self.failUnless("<td>sub</td>" in manifest)
1155 self.failUnless(self._sub_uri in manifest)
1156 self.failUnless("<td>sub/baz.txt</td>" in manifest)
1157 d.addCallback(_got_html)
1159 # both t=status and unadorned GET should be identical
1160 d.addCallback(lambda res: self.GET("/operations/125"))
1161 d.addCallback(_got_html)
1163 d.addCallback(getman, "html")
1164 d.addCallback(_got_html)
1165 d.addCallback(getman, "text")
1166 def _got_text(manifest):
1167 self.failUnless("\nsub " + self._sub_uri + "\n" in manifest)
1168 self.failUnless("\nsub/baz.txt URI:CHK:" in manifest)
1169 d.addCallback(_got_text)
1170 d.addCallback(getman, "JSON")
1172 data = res["manifest"]
1174 for (path_list, cap) in data:
1175 got[tuple(path_list)] = cap
1176 self.failUnlessReallyEqual(to_str(got[(u"sub",)]), self._sub_uri)
1177 self.failUnless((u"sub",u"baz.txt") in got)
1178 self.failUnless("finished" in res)
1179 self.failUnless("origin" in res)
1180 self.failUnless("storage-index" in res)
1181 self.failUnless("verifycaps" in res)
1182 self.failUnless("stats" in res)
1183 d.addCallback(_got_json)
1186 def test_POST_DIRURL_deepsize_no_ophandle(self):
1187 d = self.shouldFail2(error.Error,
1188 "test_POST_DIRURL_deepsize_no_ophandle",
1190 "slow operation requires ophandle=",
1191 self.POST, self.public_url, t="start-deep-size")
1194 def test_POST_DIRURL_deepsize(self):
1195 d = self.POST(self.public_url + "/foo/?t=start-deep-size&ophandle=126",
1196 followRedirect=True)
1197 d.addCallback(self.wait_for_operation, "126")
1198 d.addCallback(self.get_operation_results, "126", "json")
1199 def _got_json(data):
1200 self.failUnlessReallyEqual(data["finished"], True)
1202 self.failUnless(size > 1000)
1203 d.addCallback(_got_json)
1204 d.addCallback(self.get_operation_results, "126", "text")
1206 mo = re.search(r'^size: (\d+)$', res, re.M)
1207 self.failUnless(mo, res)
1208 size = int(mo.group(1))
1209 # with directories, the size varies.
1210 self.failUnless(size > 1000)
1211 d.addCallback(_got_text)
1214 def test_POST_DIRURL_deepstats_no_ophandle(self):
1215 d = self.shouldFail2(error.Error,
1216 "test_POST_DIRURL_deepstats_no_ophandle",
1218 "slow operation requires ophandle=",
1219 self.POST, self.public_url, t="start-deep-stats")
1222 def test_POST_DIRURL_deepstats(self):
1223 d = self.POST(self.public_url + "/foo/?t=start-deep-stats&ophandle=127",
1224 followRedirect=True)
1225 d.addCallback(self.wait_for_operation, "127")
1226 d.addCallback(self.get_operation_results, "127", "json")
1227 def _got_json(stats):
1228 expected = {"count-immutable-files": 3,
1229 "count-mutable-files": 0,
1230 "count-literal-files": 0,
1232 "count-directories": 3,
1233 "size-immutable-files": 57,
1234 "size-literal-files": 0,
1235 #"size-directories": 1912, # varies
1236 #"largest-directory": 1590,
1237 "largest-directory-children": 5,
1238 "largest-immutable-file": 19,
1240 for k,v in expected.iteritems():
1241 self.failUnlessReallyEqual(stats[k], v,
1242 "stats[%s] was %s, not %s" %
1244 self.failUnlessReallyEqual(stats["size-files-histogram"],
1246 d.addCallback(_got_json)
1249 def test_POST_DIRURL_stream_manifest(self):
1250 d = self.POST(self.public_url + "/foo/?t=stream-manifest")
1252 self.failUnless(res.endswith("\n"))
1253 units = [simplejson.loads(t) for t in res[:-1].split("\n")]
1254 self.failUnlessReallyEqual(len(units), 7)
1255 self.failUnlessEqual(units[-1]["type"], "stats")
1257 self.failUnlessEqual(first["path"], [])
1258 self.failUnlessReallyEqual(to_str(first["cap"]), self._foo_uri)
1259 self.failUnlessEqual(first["type"], "directory")
1260 baz = [u for u in units[:-1] if to_str(u["cap"]) == self._baz_file_uri][0]
1261 self.failUnlessEqual(baz["path"], ["sub", "baz.txt"])
1262 self.failIfEqual(baz["storage-index"], None)
1263 self.failIfEqual(baz["verifycap"], None)
1264 self.failIfEqual(baz["repaircap"], None)
1266 d.addCallback(_check)
1269 def test_GET_DIRURL_uri(self):
1270 d = self.GET(self.public_url + "/foo?t=uri")
1272 self.failUnlessReallyEqual(to_str(res), self._foo_uri)
1273 d.addCallback(_check)
1276 def test_GET_DIRURL_readonly_uri(self):
1277 d = self.GET(self.public_url + "/foo?t=readonly-uri")
1279 self.failUnlessReallyEqual(to_str(res), self._foo_readonly_uri)
1280 d.addCallback(_check)
1283 def test_PUT_NEWDIRURL(self):
1284 d = self.PUT(self.public_url + "/foo/newdir?t=mkdir", "")
1285 d.addCallback(lambda res:
1286 self.failUnlessNodeHasChild(self._foo_node, u"newdir"))
1287 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
1288 d.addCallback(self.failUnlessNodeKeysAre, [])
1291 def test_POST_NEWDIRURL(self):
1292 d = self.POST2(self.public_url + "/foo/newdir?t=mkdir", "")
1293 d.addCallback(lambda res:
1294 self.failUnlessNodeHasChild(self._foo_node, u"newdir"))
1295 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
1296 d.addCallback(self.failUnlessNodeKeysAre, [])
1299 def test_POST_NEWDIRURL_emptyname(self):
1300 # an empty pathname component (i.e. a double-slash) is disallowed
1301 d = self.shouldFail2(error.Error, "test_POST_NEWDIRURL_emptyname",
1303 "The webapi does not allow empty pathname components, i.e. a double slash",
1304 self.POST, self.public_url + "//?t=mkdir")
1307 def test_POST_NEWDIRURL_initial_children(self):
1308 (newkids, caps) = self._create_initial_children()
1309 d = self.POST2(self.public_url + "/foo/newdir?t=mkdir-with-children",
1310 simplejson.dumps(newkids))
1312 n = self.s.create_node_from_uri(uri.strip())
1313 d2 = self.failUnlessNodeKeysAre(n, newkids.keys())
1314 d2.addCallback(lambda ign:
1315 self.failUnlessROChildURIIs(n, u"child-imm",
1317 d2.addCallback(lambda ign:
1318 self.failUnlessRWChildURIIs(n, u"child-mutable",
1320 d2.addCallback(lambda ign:
1321 self.failUnlessROChildURIIs(n, u"child-mutable-ro",
1323 d2.addCallback(lambda ign:
1324 self.failUnlessROChildURIIs(n, u"unknownchild-ro",
1325 caps['unknown_rocap']))
1326 d2.addCallback(lambda ign:
1327 self.failUnlessRWChildURIIs(n, u"unknownchild-rw",
1328 caps['unknown_rwcap']))
1329 d2.addCallback(lambda ign:
1330 self.failUnlessROChildURIIs(n, u"unknownchild-imm",
1331 caps['unknown_immcap']))
1332 d2.addCallback(lambda ign:
1333 self.failUnlessRWChildURIIs(n, u"dirchild",
1335 d2.addCallback(lambda ign:
1336 self.failUnlessROChildURIIs(n, u"dirchild-lit",
1338 d2.addCallback(lambda ign:
1339 self.failUnlessROChildURIIs(n, u"dirchild-empty",
1340 caps['emptydircap']))
1342 d.addCallback(_check)
1343 d.addCallback(lambda res:
1344 self.failUnlessNodeHasChild(self._foo_node, u"newdir"))
1345 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
1346 d.addCallback(self.failUnlessNodeKeysAre, newkids.keys())
1347 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
1348 d.addCallback(self.failUnlessROChildURIIs, u"child-imm", caps['filecap1'])
1351 def test_POST_NEWDIRURL_immutable(self):
1352 (newkids, caps) = self._create_immutable_children()
1353 d = self.POST2(self.public_url + "/foo/newdir?t=mkdir-immutable",
1354 simplejson.dumps(newkids))
1356 n = self.s.create_node_from_uri(uri.strip())
1357 d2 = self.failUnlessNodeKeysAre(n, newkids.keys())
1358 d2.addCallback(lambda ign:
1359 self.failUnlessROChildURIIs(n, u"child-imm",
1361 d2.addCallback(lambda ign:
1362 self.failUnlessROChildURIIs(n, u"unknownchild-imm",
1363 caps['unknown_immcap']))
1364 d2.addCallback(lambda ign:
1365 self.failUnlessROChildURIIs(n, u"dirchild-imm",
1367 d2.addCallback(lambda ign:
1368 self.failUnlessROChildURIIs(n, u"dirchild-lit",
1370 d2.addCallback(lambda ign:
1371 self.failUnlessROChildURIIs(n, u"dirchild-empty",
1372 caps['emptydircap']))
1374 d.addCallback(_check)
1375 d.addCallback(lambda res:
1376 self.failUnlessNodeHasChild(self._foo_node, u"newdir"))
1377 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
1378 d.addCallback(self.failUnlessNodeKeysAre, newkids.keys())
1379 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
1380 d.addCallback(self.failUnlessROChildURIIs, u"child-imm", caps['filecap1'])
1381 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
1382 d.addCallback(self.failUnlessROChildURIIs, u"unknownchild-imm", caps['unknown_immcap'])
1383 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
1384 d.addCallback(self.failUnlessROChildURIIs, u"dirchild-imm", caps['immdircap'])
1385 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
1386 d.addCallback(self.failUnlessROChildURIIs, u"dirchild-lit", caps['litdircap'])
1387 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
1388 d.addCallback(self.failUnlessROChildURIIs, u"dirchild-empty", caps['emptydircap'])
1389 d.addErrback(self.explain_web_error)
1392 def test_POST_NEWDIRURL_immutable_bad(self):
1393 (newkids, caps) = self._create_initial_children()
1394 d = self.shouldFail2(error.Error, "test_POST_NEWDIRURL_immutable_bad",
1396 "needed to be immutable but was not",
1398 self.public_url + "/foo/newdir?t=mkdir-immutable",
1399 simplejson.dumps(newkids))
1402 def test_PUT_NEWDIRURL_exists(self):
1403 d = self.PUT(self.public_url + "/foo/sub?t=mkdir", "")
1404 d.addCallback(lambda res:
1405 self.failUnlessNodeHasChild(self._foo_node, u"sub"))
1406 d.addCallback(lambda res: self._foo_node.get(u"sub"))
1407 d.addCallback(self.failUnlessNodeKeysAre, [u"baz.txt"])
1410 def test_PUT_NEWDIRURL_blocked(self):
1411 d = self.shouldFail2(error.Error, "PUT_NEWDIRURL_blocked",
1412 "409 Conflict", "Unable to create directory 'bar.txt': a file was in the way",
1414 self.public_url + "/foo/bar.txt/sub?t=mkdir", "")
1415 d.addCallback(lambda res:
1416 self.failUnlessNodeHasChild(self._foo_node, u"sub"))
1417 d.addCallback(lambda res: self._foo_node.get(u"sub"))
1418 d.addCallback(self.failUnlessNodeKeysAre, [u"baz.txt"])
1421 def test_PUT_NEWDIRURL_mkdir_p(self):
1422 d = defer.succeed(None)
1423 d.addCallback(lambda res: self.POST(self.public_url + "/foo", t='mkdir', name='mkp'))
1424 d.addCallback(lambda res: self.failUnlessNodeHasChild(self._foo_node, u"mkp"))
1425 d.addCallback(lambda res: self._foo_node.get(u"mkp"))
1426 def mkdir_p(mkpnode):
1427 url = '/uri/%s?t=mkdir-p&path=/sub1/sub2' % urllib.quote(mkpnode.get_uri())
1429 def made_subsub(ssuri):
1430 d = self._foo_node.get_child_at_path(u"mkp/sub1/sub2")
1431 d.addCallback(lambda ssnode: self.failUnlessReallyEqual(ssnode.get_uri(), ssuri))
1433 d.addCallback(lambda uri2: self.failUnlessReallyEqual(uri2, ssuri))
1435 d.addCallback(made_subsub)
1437 d.addCallback(mkdir_p)
1440 def test_PUT_NEWDIRURL_mkdirs(self):
1441 d = self.PUT(self.public_url + "/foo/subdir/newdir?t=mkdir", "")
1442 d.addCallback(lambda res:
1443 self.failIfNodeHasChild(self._foo_node, u"newdir"))
1444 d.addCallback(lambda res:
1445 self.failUnlessNodeHasChild(self._foo_node, u"subdir"))
1446 d.addCallback(lambda res:
1447 self._foo_node.get_child_at_path(u"subdir/newdir"))
1448 d.addCallback(self.failUnlessNodeKeysAre, [])
1451 def test_DELETE_DIRURL(self):
1452 d = self.DELETE(self.public_url + "/foo")
1453 d.addCallback(lambda res:
1454 self.failIfNodeHasChild(self.public_root, u"foo"))
1457 def test_DELETE_DIRURL_missing(self):
1458 d = self.DELETE(self.public_url + "/foo/missing")
1459 d.addBoth(self.should404, "test_DELETE_DIRURL_missing")
1460 d.addCallback(lambda res:
1461 self.failUnlessNodeHasChild(self.public_root, u"foo"))
1464 def test_DELETE_DIRURL_missing2(self):
1465 d = self.DELETE(self.public_url + "/missing")
1466 d.addBoth(self.should404, "test_DELETE_DIRURL_missing2")
1469 def dump_root(self):
1471 w = webish.DirnodeWalkerMixin()
1472 def visitor(childpath, childnode, metadata):
1474 d = w.walk(self.public_root, visitor)
1477 def failUnlessNodeKeysAre(self, node, expected_keys):
1478 for k in expected_keys:
1479 assert isinstance(k, unicode)
1481 def _check(children):
1482 self.failUnlessReallyEqual(sorted(children.keys()), sorted(expected_keys))
1483 d.addCallback(_check)
1485 def failUnlessNodeHasChild(self, node, name):
1486 assert isinstance(name, unicode)
1488 def _check(children):
1489 self.failUnless(name in children)
1490 d.addCallback(_check)
1492 def failIfNodeHasChild(self, node, name):
1493 assert isinstance(name, unicode)
1495 def _check(children):
1496 self.failIf(name in children)
1497 d.addCallback(_check)
1500 def failUnlessChildContentsAre(self, node, name, expected_contents):
1501 assert isinstance(name, unicode)
1502 d = node.get_child_at_path(name)
1503 d.addCallback(lambda node: download_to_data(node))
1504 def _check(contents):
1505 self.failUnlessReallyEqual(contents, expected_contents)
1506 d.addCallback(_check)
1509 def failUnlessMutableChildContentsAre(self, node, name, expected_contents):
1510 assert isinstance(name, unicode)
1511 d = node.get_child_at_path(name)
1512 d.addCallback(lambda node: node.download_best_version())
1513 def _check(contents):
1514 self.failUnlessReallyEqual(contents, expected_contents)
1515 d.addCallback(_check)
1518 def failUnlessRWChildURIIs(self, node, name, expected_uri):
1519 assert isinstance(name, unicode)
1520 d = node.get_child_at_path(name)
1522 self.failUnless(child.is_unknown() or not child.is_readonly())
1523 self.failUnlessReallyEqual(child.get_uri(), expected_uri.strip())
1524 self.failUnlessReallyEqual(child.get_write_uri(), expected_uri.strip())
1525 expected_ro_uri = self._make_readonly(expected_uri)
1527 self.failUnlessReallyEqual(child.get_readonly_uri(), expected_ro_uri.strip())
1528 d.addCallback(_check)
1531 def failUnlessROChildURIIs(self, node, name, expected_uri):
1532 assert isinstance(name, unicode)
1533 d = node.get_child_at_path(name)
1535 self.failUnless(child.is_unknown() or child.is_readonly())
1536 self.failUnlessReallyEqual(child.get_write_uri(), None)
1537 self.failUnlessReallyEqual(child.get_uri(), expected_uri.strip())
1538 self.failUnlessReallyEqual(child.get_readonly_uri(), expected_uri.strip())
1539 d.addCallback(_check)
1542 def failUnlessURIMatchesRWChild(self, got_uri, node, name):
1543 assert isinstance(name, unicode)
1544 d = node.get_child_at_path(name)
1546 self.failUnless(child.is_unknown() or not child.is_readonly())
1547 self.failUnlessReallyEqual(child.get_uri(), got_uri.strip())
1548 self.failUnlessReallyEqual(child.get_write_uri(), got_uri.strip())
1549 expected_ro_uri = self._make_readonly(got_uri)
1551 self.failUnlessReallyEqual(child.get_readonly_uri(), expected_ro_uri.strip())
1552 d.addCallback(_check)
1555 def failUnlessURIMatchesROChild(self, got_uri, node, name):
1556 assert isinstance(name, unicode)
1557 d = node.get_child_at_path(name)
1559 self.failUnless(child.is_unknown() or child.is_readonly())
1560 self.failUnlessReallyEqual(child.get_write_uri(), None)
1561 self.failUnlessReallyEqual(got_uri.strip(), child.get_uri())
1562 self.failUnlessReallyEqual(got_uri.strip(), child.get_readonly_uri())
1563 d.addCallback(_check)
1566 def failUnlessCHKURIHasContents(self, got_uri, contents):
1567 self.failUnless(FakeCHKFileNode.all_contents[got_uri] == contents)
1569 def test_POST_upload(self):
1570 d = self.POST(self.public_url + "/foo", t="upload",
1571 file=("new.txt", self.NEWFILE_CONTENTS))
1573 d.addCallback(self.failUnlessURIMatchesROChild, fn, u"new.txt")
1574 d.addCallback(lambda res:
1575 self.failUnlessChildContentsAre(fn, u"new.txt",
1576 self.NEWFILE_CONTENTS))
1579 def test_POST_upload_unicode(self):
1580 filename = u"n\u00e9wer.txt" # n e-acute w e r . t x t
1581 d = self.POST(self.public_url + "/foo", t="upload",
1582 file=(filename, self.NEWFILE_CONTENTS))
1584 d.addCallback(self.failUnlessURIMatchesROChild, fn, filename)
1585 d.addCallback(lambda res:
1586 self.failUnlessChildContentsAre(fn, filename,
1587 self.NEWFILE_CONTENTS))
1588 target_url = self.public_url + "/foo/" + filename.encode("utf-8")
1589 d.addCallback(lambda res: self.GET(target_url))
1590 d.addCallback(lambda contents: self.failUnlessReallyEqual(contents,
1591 self.NEWFILE_CONTENTS,
1595 def test_POST_upload_unicode_named(self):
1596 filename = u"n\u00e9wer.txt" # n e-acute w e r . t x t
1597 d = self.POST(self.public_url + "/foo", t="upload",
1599 file=("overridden", self.NEWFILE_CONTENTS))
1601 d.addCallback(self.failUnlessURIMatchesROChild, fn, filename)
1602 d.addCallback(lambda res:
1603 self.failUnlessChildContentsAre(fn, filename,
1604 self.NEWFILE_CONTENTS))
1605 target_url = self.public_url + "/foo/" + filename.encode("utf-8")
1606 d.addCallback(lambda res: self.GET(target_url))
1607 d.addCallback(lambda contents: self.failUnlessReallyEqual(contents,
1608 self.NEWFILE_CONTENTS,
1612 def test_POST_upload_no_link(self):
1613 d = self.POST("/uri", t="upload",
1614 file=("new.txt", self.NEWFILE_CONTENTS))
1615 def _check_upload_results(page):
1616 # this should be a page which describes the results of the upload
1617 # that just finished.
1618 self.failUnless("Upload Results:" in page)
1619 self.failUnless("URI:" in page)
1620 uri_re = re.compile("URI: <tt><span>(.*)</span>")
1621 mo = uri_re.search(page)
1622 self.failUnless(mo, page)
1623 new_uri = mo.group(1)
1625 d.addCallback(_check_upload_results)
1626 d.addCallback(self.failUnlessCHKURIHasContents, self.NEWFILE_CONTENTS)
1629 def test_POST_upload_no_link_whendone(self):
1630 d = self.POST("/uri", t="upload", when_done="/",
1631 file=("new.txt", self.NEWFILE_CONTENTS))
1632 d.addBoth(self.shouldRedirect, "/")
1635 def shouldRedirect2(self, which, checker, callable, *args, **kwargs):
1636 d = defer.maybeDeferred(callable, *args, **kwargs)
1638 if isinstance(res, failure.Failure):
1639 res.trap(error.PageRedirect)
1640 statuscode = res.value.status
1641 target = res.value.location
1642 return checker(statuscode, target)
1643 self.fail("%s: callable was supposed to redirect, not return '%s'"
1648 def test_POST_upload_no_link_whendone_results(self):
1649 def check(statuscode, target):
1650 self.failUnlessReallyEqual(statuscode, str(http.FOUND))
1651 self.failUnless(target.startswith(self.webish_url), target)
1652 return client.getPage(target, method="GET")
1653 d = self.shouldRedirect2("test_POST_upload_no_link_whendone_results",
1655 self.POST, "/uri", t="upload",
1656 when_done="/uri/%(uri)s",
1657 file=("new.txt", self.NEWFILE_CONTENTS))
1658 d.addCallback(lambda res:
1659 self.failUnlessReallyEqual(res, self.NEWFILE_CONTENTS))
1662 def test_POST_upload_no_link_mutable(self):
1663 d = self.POST("/uri", t="upload", mutable="true",
1664 file=("new.txt", self.NEWFILE_CONTENTS))
1665 def _check(filecap):
1666 filecap = filecap.strip()
1667 self.failUnless(filecap.startswith("URI:SSK:"), filecap)
1668 self.filecap = filecap
1669 u = uri.WriteableSSKFileURI.init_from_string(filecap)
1670 self.failUnless(u.get_storage_index() in FakeMutableFileNode.all_contents)
1671 n = self.s.create_node_from_uri(filecap)
1672 return n.download_best_version()
1673 d.addCallback(_check)
1675 self.failUnlessReallyEqual(data, self.NEWFILE_CONTENTS)
1676 return self.GET("/uri/%s" % urllib.quote(self.filecap))
1677 d.addCallback(_check2)
1679 self.failUnlessReallyEqual(data, self.NEWFILE_CONTENTS)
1680 return self.GET("/file/%s" % urllib.quote(self.filecap))
1681 d.addCallback(_check3)
1683 self.failUnlessReallyEqual(data, self.NEWFILE_CONTENTS)
1684 d.addCallback(_check4)
1687 def test_POST_upload_no_link_mutable_toobig(self):
1688 d = self.shouldFail2(error.Error,
1689 "test_POST_upload_no_link_mutable_toobig",
1690 "413 Request Entity Too Large",
1691 "SDMF is limited to one segment, and 10001 > 10000",
1693 "/uri", t="upload", mutable="true",
1695 "b" * (self.s.MUTABLE_SIZELIMIT+1)) )
1698 def test_POST_upload_mutable(self):
1699 # this creates a mutable file
1700 d = self.POST(self.public_url + "/foo", t="upload", mutable="true",
1701 file=("new.txt", self.NEWFILE_CONTENTS))
1703 d.addCallback(self.failUnlessURIMatchesRWChild, fn, u"new.txt")
1704 d.addCallback(lambda res:
1705 self.failUnlessMutableChildContentsAre(fn, u"new.txt",
1706 self.NEWFILE_CONTENTS))
1707 d.addCallback(lambda res: self._foo_node.get(u"new.txt"))
1709 self.failUnless(IMutableFileNode.providedBy(newnode))
1710 self.failUnless(newnode.is_mutable())
1711 self.failIf(newnode.is_readonly())
1712 self._mutable_node = newnode
1713 self._mutable_uri = newnode.get_uri()
1716 # now upload it again and make sure that the URI doesn't change
1717 NEWER_CONTENTS = self.NEWFILE_CONTENTS + "newer\n"
1718 d.addCallback(lambda res:
1719 self.POST(self.public_url + "/foo", t="upload",
1721 file=("new.txt", NEWER_CONTENTS)))
1722 d.addCallback(self.failUnlessURIMatchesRWChild, fn, u"new.txt")
1723 d.addCallback(lambda res:
1724 self.failUnlessMutableChildContentsAre(fn, u"new.txt",
1726 d.addCallback(lambda res: self._foo_node.get(u"new.txt"))
1728 self.failUnless(IMutableFileNode.providedBy(newnode))
1729 self.failUnless(newnode.is_mutable())
1730 self.failIf(newnode.is_readonly())
1731 self.failUnlessReallyEqual(self._mutable_uri, newnode.get_uri())
1732 d.addCallback(_got2)
1734 # upload a second time, using PUT instead of POST
1735 NEW2_CONTENTS = NEWER_CONTENTS + "overwrite with PUT\n"
1736 d.addCallback(lambda res:
1737 self.PUT(self.public_url + "/foo/new.txt", NEW2_CONTENTS))
1738 d.addCallback(self.failUnlessURIMatchesRWChild, fn, u"new.txt")
1739 d.addCallback(lambda res:
1740 self.failUnlessMutableChildContentsAre(fn, u"new.txt",
1743 # finally list the directory, since mutable files are displayed
1744 # slightly differently
1746 d.addCallback(lambda res:
1747 self.GET(self.public_url + "/foo/",
1748 followRedirect=True))
1749 def _check_page(res):
1750 # TODO: assert more about the contents
1751 self.failUnless("SSK" in res)
1753 d.addCallback(_check_page)
1755 d.addCallback(lambda res: self._foo_node.get(u"new.txt"))
1757 self.failUnless(IMutableFileNode.providedBy(newnode))
1758 self.failUnless(newnode.is_mutable())
1759 self.failIf(newnode.is_readonly())
1760 self.failUnlessReallyEqual(self._mutable_uri, newnode.get_uri())
1761 d.addCallback(_got3)
1763 # look at the JSON form of the enclosing directory
1764 d.addCallback(lambda res:
1765 self.GET(self.public_url + "/foo/?t=json",
1766 followRedirect=True))
1767 def _check_page_json(res):
1768 parsed = simplejson.loads(res)
1769 self.failUnlessEqual(parsed[0], "dirnode")
1770 children = dict( [(unicode(name),value)
1772 in parsed[1]["children"].iteritems()] )
1773 self.failUnless(u"new.txt" in children)
1774 new_json = children[u"new.txt"]
1775 self.failUnlessEqual(new_json[0], "filenode")
1776 self.failUnless(new_json[1]["mutable"])
1777 self.failUnlessReallyEqual(to_str(new_json[1]["rw_uri"]), self._mutable_uri)
1778 ro_uri = self._mutable_node.get_readonly().to_string()
1779 self.failUnlessReallyEqual(to_str(new_json[1]["ro_uri"]), ro_uri)
1780 d.addCallback(_check_page_json)
1782 # and the JSON form of the file
1783 d.addCallback(lambda res:
1784 self.GET(self.public_url + "/foo/new.txt?t=json"))
1785 def _check_file_json(res):
1786 parsed = simplejson.loads(res)
1787 self.failUnlessEqual(parsed[0], "filenode")
1788 self.failUnless(parsed[1]["mutable"])
1789 self.failUnlessReallyEqual(to_str(parsed[1]["rw_uri"]), self._mutable_uri)
1790 ro_uri = self._mutable_node.get_readonly().to_string()
1791 self.failUnlessReallyEqual(to_str(parsed[1]["ro_uri"]), ro_uri)
1792 d.addCallback(_check_file_json)
1794 # and look at t=uri and t=readonly-uri
1795 d.addCallback(lambda res:
1796 self.GET(self.public_url + "/foo/new.txt?t=uri"))
1797 d.addCallback(lambda res: self.failUnlessReallyEqual(res, self._mutable_uri))
1798 d.addCallback(lambda res:
1799 self.GET(self.public_url + "/foo/new.txt?t=readonly-uri"))
1800 def _check_ro_uri(res):
1801 ro_uri = self._mutable_node.get_readonly().to_string()
1802 self.failUnlessReallyEqual(res, ro_uri)
1803 d.addCallback(_check_ro_uri)
1805 # make sure we can get to it from /uri/URI
1806 d.addCallback(lambda res:
1807 self.GET("/uri/%s" % urllib.quote(self._mutable_uri)))
1808 d.addCallback(lambda res:
1809 self.failUnlessReallyEqual(res, NEW2_CONTENTS))
1811 # and that HEAD computes the size correctly
1812 d.addCallback(lambda res:
1813 self.HEAD(self.public_url + "/foo/new.txt",
1814 return_response=True))
1815 def _got_headers((res, status, headers)):
1816 self.failUnlessReallyEqual(res, "")
1817 self.failUnlessReallyEqual(headers["content-length"][0],
1818 str(len(NEW2_CONTENTS)))
1819 self.failUnlessReallyEqual(headers["content-type"], ["text/plain"])
1820 d.addCallback(_got_headers)
1822 # make sure that size errors are displayed correctly for overwrite
1823 d.addCallback(lambda res:
1824 self.shouldFail2(error.Error,
1825 "test_POST_upload_mutable-toobig",
1826 "413 Request Entity Too Large",
1827 "SDMF is limited to one segment, and 10001 > 10000",
1829 self.public_url + "/foo", t="upload",
1832 "b" * (self.s.MUTABLE_SIZELIMIT+1)),
1835 d.addErrback(self.dump_error)
1838 def test_POST_upload_mutable_toobig(self):
1839 d = self.shouldFail2(error.Error,
1840 "test_POST_upload_mutable_toobig",
1841 "413 Request Entity Too Large",
1842 "SDMF is limited to one segment, and 10001 > 10000",
1844 self.public_url + "/foo",
1845 t="upload", mutable="true",
1847 "b" * (self.s.MUTABLE_SIZELIMIT+1)) )
1850 def dump_error(self, f):
1851 # if the web server returns an error code (like 400 Bad Request),
1852 # web.client.getPage puts the HTTP response body into the .response
1853 # attribute of the exception object that it gives back. It does not
1854 # appear in the Failure's repr(), so the ERROR that trial displays
1855 # will be rather terse and unhelpful. addErrback this method to the
1856 # end of your chain to get more information out of these errors.
1857 if f.check(error.Error):
1858 print "web.error.Error:"
1860 print f.value.response
1863 def test_POST_upload_replace(self):
1864 d = self.POST(self.public_url + "/foo", t="upload",
1865 file=("bar.txt", self.NEWFILE_CONTENTS))
1867 d.addCallback(self.failUnlessURIMatchesROChild, fn, u"bar.txt")
1868 d.addCallback(lambda res:
1869 self.failUnlessChildContentsAre(fn, u"bar.txt",
1870 self.NEWFILE_CONTENTS))
1873 def test_POST_upload_no_replace_ok(self):
1874 d = self.POST(self.public_url + "/foo?replace=false", t="upload",
1875 file=("new.txt", self.NEWFILE_CONTENTS))
1876 d.addCallback(lambda res: self.GET(self.public_url + "/foo/new.txt"))
1877 d.addCallback(lambda res: self.failUnlessReallyEqual(res,
1878 self.NEWFILE_CONTENTS))
1881 def test_POST_upload_no_replace_queryarg(self):
1882 d = self.POST(self.public_url + "/foo?replace=false", t="upload",
1883 file=("bar.txt", self.NEWFILE_CONTENTS))
1884 d.addBoth(self.shouldFail, error.Error,
1885 "POST_upload_no_replace_queryarg",
1887 "There was already a child by that name, and you asked me "
1888 "to not replace it")
1889 d.addCallback(lambda res: self.GET(self.public_url + "/foo/bar.txt"))
1890 d.addCallback(self.failUnlessIsBarDotTxt)
1893 def test_POST_upload_no_replace_field(self):
1894 d = self.POST(self.public_url + "/foo", t="upload", replace="false",
1895 file=("bar.txt", self.NEWFILE_CONTENTS))
1896 d.addBoth(self.shouldFail, error.Error, "POST_upload_no_replace_field",
1898 "There was already a child by that name, and you asked me "
1899 "to not replace it")
1900 d.addCallback(lambda res: self.GET(self.public_url + "/foo/bar.txt"))
1901 d.addCallback(self.failUnlessIsBarDotTxt)
1904 def test_POST_upload_whendone(self):
1905 d = self.POST(self.public_url + "/foo", t="upload", when_done="/THERE",
1906 file=("new.txt", self.NEWFILE_CONTENTS))
1907 d.addBoth(self.shouldRedirect, "/THERE")
1909 d.addCallback(lambda res:
1910 self.failUnlessChildContentsAre(fn, u"new.txt",
1911 self.NEWFILE_CONTENTS))
1914 def test_POST_upload_named(self):
1916 d = self.POST(self.public_url + "/foo", t="upload",
1917 name="new.txt", file=self.NEWFILE_CONTENTS)
1918 d.addCallback(self.failUnlessURIMatchesROChild, fn, u"new.txt")
1919 d.addCallback(lambda res:
1920 self.failUnlessChildContentsAre(fn, u"new.txt",
1921 self.NEWFILE_CONTENTS))
1924 def test_POST_upload_named_badfilename(self):
1925 d = self.POST(self.public_url + "/foo", t="upload",
1926 name="slashes/are/bad.txt", file=self.NEWFILE_CONTENTS)
1927 d.addBoth(self.shouldFail, error.Error,
1928 "test_POST_upload_named_badfilename",
1930 "name= may not contain a slash",
1932 # make sure that nothing was added
1933 d.addCallback(lambda res:
1934 self.failUnlessNodeKeysAre(self._foo_node,
1935 [u"bar.txt", u"blockingfile",
1936 u"empty", u"n\u00fc.txt",
1940 def test_POST_FILEURL_check(self):
1941 bar_url = self.public_url + "/foo/bar.txt"
1942 d = self.POST(bar_url, t="check")
1944 self.failUnless("Healthy :" in res)
1945 d.addCallback(_check)
1946 redir_url = "http://allmydata.org/TARGET"
1947 def _check2(statuscode, target):
1948 self.failUnlessReallyEqual(statuscode, str(http.FOUND))
1949 self.failUnlessReallyEqual(target, redir_url)
1950 d.addCallback(lambda res:
1951 self.shouldRedirect2("test_POST_FILEURL_check",
1955 when_done=redir_url))
1956 d.addCallback(lambda res:
1957 self.POST(bar_url, t="check", return_to=redir_url))
1959 self.failUnless("Healthy :" in res)
1960 self.failUnless("Return to file" in res)
1961 self.failUnless(redir_url in res)
1962 d.addCallback(_check3)
1964 d.addCallback(lambda res:
1965 self.POST(bar_url, t="check", output="JSON"))
1966 def _check_json(res):
1967 data = simplejson.loads(res)
1968 self.failUnless("storage-index" in data)
1969 self.failUnless(data["results"]["healthy"])
1970 d.addCallback(_check_json)
1974 def test_POST_FILEURL_check_and_repair(self):
1975 bar_url = self.public_url + "/foo/bar.txt"
1976 d = self.POST(bar_url, t="check", repair="true")
1978 self.failUnless("Healthy :" in res)
1979 d.addCallback(_check)
1980 redir_url = "http://allmydata.org/TARGET"
1981 def _check2(statuscode, target):
1982 self.failUnlessReallyEqual(statuscode, str(http.FOUND))
1983 self.failUnlessReallyEqual(target, redir_url)
1984 d.addCallback(lambda res:
1985 self.shouldRedirect2("test_POST_FILEURL_check_and_repair",
1988 t="check", repair="true",
1989 when_done=redir_url))
1990 d.addCallback(lambda res:
1991 self.POST(bar_url, t="check", return_to=redir_url))
1993 self.failUnless("Healthy :" in res)
1994 self.failUnless("Return to file" in res)
1995 self.failUnless(redir_url in res)
1996 d.addCallback(_check3)
1999 def test_POST_DIRURL_check(self):
2000 foo_url = self.public_url + "/foo/"
2001 d = self.POST(foo_url, t="check")
2003 self.failUnless("Healthy :" in res, res)
2004 d.addCallback(_check)
2005 redir_url = "http://allmydata.org/TARGET"
2006 def _check2(statuscode, target):
2007 self.failUnlessReallyEqual(statuscode, str(http.FOUND))
2008 self.failUnlessReallyEqual(target, redir_url)
2009 d.addCallback(lambda res:
2010 self.shouldRedirect2("test_POST_DIRURL_check",
2014 when_done=redir_url))
2015 d.addCallback(lambda res:
2016 self.POST(foo_url, t="check", return_to=redir_url))
2018 self.failUnless("Healthy :" in res, res)
2019 self.failUnless("Return to file/directory" in res)
2020 self.failUnless(redir_url in res)
2021 d.addCallback(_check3)
2023 d.addCallback(lambda res:
2024 self.POST(foo_url, t="check", output="JSON"))
2025 def _check_json(res):
2026 data = simplejson.loads(res)
2027 self.failUnless("storage-index" in data)
2028 self.failUnless(data["results"]["healthy"])
2029 d.addCallback(_check_json)
2033 def test_POST_DIRURL_check_and_repair(self):
2034 foo_url = self.public_url + "/foo/"
2035 d = self.POST(foo_url, t="check", repair="true")
2037 self.failUnless("Healthy :" in res, res)
2038 d.addCallback(_check)
2039 redir_url = "http://allmydata.org/TARGET"
2040 def _check2(statuscode, target):
2041 self.failUnlessReallyEqual(statuscode, str(http.FOUND))
2042 self.failUnlessReallyEqual(target, redir_url)
2043 d.addCallback(lambda res:
2044 self.shouldRedirect2("test_POST_DIRURL_check_and_repair",
2047 t="check", repair="true",
2048 when_done=redir_url))
2049 d.addCallback(lambda res:
2050 self.POST(foo_url, t="check", return_to=redir_url))
2052 self.failUnless("Healthy :" in res)
2053 self.failUnless("Return to file/directory" in res)
2054 self.failUnless(redir_url in res)
2055 d.addCallback(_check3)
2058 def wait_for_operation(self, ignored, ophandle):
2059 url = "/operations/" + ophandle
2060 url += "?t=status&output=JSON"
2063 data = simplejson.loads(res)
2064 if not data["finished"]:
2065 d = self.stall(delay=1.0)
2066 d.addCallback(self.wait_for_operation, ophandle)
2072 def get_operation_results(self, ignored, ophandle, output=None):
2073 url = "/operations/" + ophandle
2076 url += "&output=" + output
2079 if output and output.lower() == "json":
2080 return simplejson.loads(res)
2085 def test_POST_DIRURL_deepcheck_no_ophandle(self):
2086 d = self.shouldFail2(error.Error,
2087 "test_POST_DIRURL_deepcheck_no_ophandle",
2089 "slow operation requires ophandle=",
2090 self.POST, self.public_url, t="start-deep-check")
2093 def test_POST_DIRURL_deepcheck(self):
2094 def _check_redirect(statuscode, target):
2095 self.failUnlessReallyEqual(statuscode, str(http.FOUND))
2096 self.failUnless(target.endswith("/operations/123"))
2097 d = self.shouldRedirect2("test_POST_DIRURL_deepcheck", _check_redirect,
2098 self.POST, self.public_url,
2099 t="start-deep-check", ophandle="123")
2100 d.addCallback(self.wait_for_operation, "123")
2101 def _check_json(data):
2102 self.failUnlessReallyEqual(data["finished"], True)
2103 self.failUnlessReallyEqual(data["count-objects-checked"], 8)
2104 self.failUnlessReallyEqual(data["count-objects-healthy"], 8)
2105 d.addCallback(_check_json)
2106 d.addCallback(self.get_operation_results, "123", "html")
2107 def _check_html(res):
2108 self.failUnless("Objects Checked: <span>8</span>" in res)
2109 self.failUnless("Objects Healthy: <span>8</span>" in res)
2110 d.addCallback(_check_html)
2112 d.addCallback(lambda res:
2113 self.GET("/operations/123/"))
2114 d.addCallback(_check_html) # should be the same as without the slash
2116 d.addCallback(lambda res:
2117 self.shouldFail2(error.Error, "one", "404 Not Found",
2118 "No detailed results for SI bogus",
2119 self.GET, "/operations/123/bogus"))
2121 foo_si = self._foo_node.get_storage_index()
2122 foo_si_s = base32.b2a(foo_si)
2123 d.addCallback(lambda res:
2124 self.GET("/operations/123/%s?output=JSON" % foo_si_s))
2125 def _check_foo_json(res):
2126 data = simplejson.loads(res)
2127 self.failUnlessEqual(data["storage-index"], foo_si_s)
2128 self.failUnless(data["results"]["healthy"])
2129 d.addCallback(_check_foo_json)
2132 def test_POST_DIRURL_deepcheck_and_repair(self):
2133 d = self.POST(self.public_url, t="start-deep-check", repair="true",
2134 ophandle="124", output="json", followRedirect=True)
2135 d.addCallback(self.wait_for_operation, "124")
2136 def _check_json(data):
2137 self.failUnlessReallyEqual(data["finished"], True)
2138 self.failUnlessReallyEqual(data["count-objects-checked"], 8)
2139 self.failUnlessReallyEqual(data["count-objects-healthy-pre-repair"], 8)
2140 self.failUnlessReallyEqual(data["count-objects-unhealthy-pre-repair"], 0)
2141 self.failUnlessReallyEqual(data["count-corrupt-shares-pre-repair"], 0)
2142 self.failUnlessReallyEqual(data["count-repairs-attempted"], 0)
2143 self.failUnlessReallyEqual(data["count-repairs-successful"], 0)
2144 self.failUnlessReallyEqual(data["count-repairs-unsuccessful"], 0)
2145 self.failUnlessReallyEqual(data["count-objects-healthy-post-repair"], 8)
2146 self.failUnlessReallyEqual(data["count-objects-unhealthy-post-repair"], 0)
2147 self.failUnlessReallyEqual(data["count-corrupt-shares-post-repair"], 0)
2148 d.addCallback(_check_json)
2149 d.addCallback(self.get_operation_results, "124", "html")
2150 def _check_html(res):
2151 self.failUnless("Objects Checked: <span>8</span>" in res)
2153 self.failUnless("Objects Healthy (before repair): <span>8</span>" in res)
2154 self.failUnless("Objects Unhealthy (before repair): <span>0</span>" in res)
2155 self.failUnless("Corrupt Shares (before repair): <span>0</span>" in res)
2157 self.failUnless("Repairs Attempted: <span>0</span>" in res)
2158 self.failUnless("Repairs Successful: <span>0</span>" in res)
2159 self.failUnless("Repairs Unsuccessful: <span>0</span>" in res)
2161 self.failUnless("Objects Healthy (after repair): <span>8</span>" in res)
2162 self.failUnless("Objects Unhealthy (after repair): <span>0</span>" in res)
2163 self.failUnless("Corrupt Shares (after repair): <span>0</span>" in res)
2164 d.addCallback(_check_html)
2167 def test_POST_FILEURL_bad_t(self):
2168 d = self.shouldFail2(error.Error, "POST_bad_t", "400 Bad Request",
2169 "POST to file: bad t=bogus",
2170 self.POST, self.public_url + "/foo/bar.txt",
2174 def test_POST_mkdir(self): # return value?
2175 d = self.POST(self.public_url + "/foo", t="mkdir", name="newdir")
2176 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
2177 d.addCallback(self.failUnlessNodeKeysAre, [])
2180 def test_POST_mkdir_initial_children(self):
2181 (newkids, caps) = self._create_initial_children()
2182 d = self.POST2(self.public_url +
2183 "/foo?t=mkdir-with-children&name=newdir",
2184 simplejson.dumps(newkids))
2185 d.addCallback(lambda res:
2186 self.failUnlessNodeHasChild(self._foo_node, u"newdir"))
2187 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
2188 d.addCallback(self.failUnlessNodeKeysAre, newkids.keys())
2189 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
2190 d.addCallback(self.failUnlessROChildURIIs, u"child-imm", caps['filecap1'])
2193 def test_POST_mkdir_immutable(self):
2194 (newkids, caps) = self._create_immutable_children()
2195 d = self.POST2(self.public_url +
2196 "/foo?t=mkdir-immutable&name=newdir",
2197 simplejson.dumps(newkids))
2198 d.addCallback(lambda res:
2199 self.failUnlessNodeHasChild(self._foo_node, u"newdir"))
2200 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
2201 d.addCallback(self.failUnlessNodeKeysAre, newkids.keys())
2202 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
2203 d.addCallback(self.failUnlessROChildURIIs, u"child-imm", caps['filecap1'])
2204 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
2205 d.addCallback(self.failUnlessROChildURIIs, u"unknownchild-imm", caps['unknown_immcap'])
2206 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
2207 d.addCallback(self.failUnlessROChildURIIs, u"dirchild-imm", caps['immdircap'])
2208 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
2209 d.addCallback(self.failUnlessROChildURIIs, u"dirchild-lit", caps['litdircap'])
2210 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
2211 d.addCallback(self.failUnlessROChildURIIs, u"dirchild-empty", caps['emptydircap'])
2214 def test_POST_mkdir_immutable_bad(self):
2215 (newkids, caps) = self._create_initial_children()
2216 d = self.shouldFail2(error.Error, "test_POST_mkdir_immutable_bad",
2218 "needed to be immutable but was not",
2221 "/foo?t=mkdir-immutable&name=newdir",
2222 simplejson.dumps(newkids))
2225 def test_POST_mkdir_2(self):
2226 d = self.POST(self.public_url + "/foo/newdir?t=mkdir", "")
2227 d.addCallback(lambda res:
2228 self.failUnlessNodeHasChild(self._foo_node, u"newdir"))
2229 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
2230 d.addCallback(self.failUnlessNodeKeysAre, [])
2233 def test_POST_mkdirs_2(self):
2234 d = self.POST(self.public_url + "/foo/bardir/newdir?t=mkdir", "")
2235 d.addCallback(lambda res:
2236 self.failUnlessNodeHasChild(self._foo_node, u"bardir"))
2237 d.addCallback(lambda res: self._foo_node.get(u"bardir"))
2238 d.addCallback(lambda bardirnode: bardirnode.get(u"newdir"))
2239 d.addCallback(self.failUnlessNodeKeysAre, [])
2242 def test_POST_mkdir_no_parentdir_noredirect(self):
2243 d = self.POST("/uri?t=mkdir")
2244 def _after_mkdir(res):
2245 uri.DirectoryURI.init_from_string(res)
2246 d.addCallback(_after_mkdir)
2249 def test_POST_mkdir_no_parentdir_noredirect2(self):
2250 # make sure form-based arguments (as on the welcome page) still work
2251 d = self.POST("/uri", t="mkdir")
2252 def _after_mkdir(res):
2253 uri.DirectoryURI.init_from_string(res)
2254 d.addCallback(_after_mkdir)
2255 d.addErrback(self.explain_web_error)
2258 def test_POST_mkdir_no_parentdir_redirect(self):
2259 d = self.POST("/uri?t=mkdir&redirect_to_result=true")
2260 d.addBoth(self.shouldRedirect, None, statuscode='303')
2261 def _check_target(target):
2262 target = urllib.unquote(target)
2263 self.failUnless(target.startswith("uri/URI:DIR2:"), target)
2264 d.addCallback(_check_target)
2267 def test_POST_mkdir_no_parentdir_redirect2(self):
2268 d = self.POST("/uri", t="mkdir", redirect_to_result="true")
2269 d.addBoth(self.shouldRedirect, None, statuscode='303')
2270 def _check_target(target):
2271 target = urllib.unquote(target)
2272 self.failUnless(target.startswith("uri/URI:DIR2:"), target)
2273 d.addCallback(_check_target)
2274 d.addErrback(self.explain_web_error)
2277 def _make_readonly(self, u):
2278 ro_uri = uri.from_string(u).get_readonly()
2281 return ro_uri.to_string()
2283 def _create_initial_children(self):
2284 contents, n, filecap1 = self.makefile(12)
2285 md1 = {"metakey1": "metavalue1"}
2286 filecap2 = make_mutable_file_uri()
2287 node3 = self.s.create_node_from_uri(make_mutable_file_uri())
2288 filecap3 = node3.get_readonly_uri()
2289 node4 = self.s.create_node_from_uri(make_mutable_file_uri())
2290 dircap = DirectoryNode(node4, None, None).get_uri()
2291 litdircap = "URI:DIR2-LIT:ge3dumj2mewdcotyfqydulbshj5x2lbm"
2292 emptydircap = "URI:DIR2-LIT:"
2293 newkids = {u"child-imm": ["filenode", {"rw_uri": filecap1,
2294 "ro_uri": self._make_readonly(filecap1),
2295 "metadata": md1, }],
2296 u"child-mutable": ["filenode", {"rw_uri": filecap2,
2297 "ro_uri": self._make_readonly(filecap2)}],
2298 u"child-mutable-ro": ["filenode", {"ro_uri": filecap3}],
2299 u"unknownchild-rw": ["unknown", {"rw_uri": unknown_rwcap,
2300 "ro_uri": unknown_rocap}],
2301 u"unknownchild-ro": ["unknown", {"ro_uri": unknown_rocap}],
2302 u"unknownchild-imm": ["unknown", {"ro_uri": unknown_immcap}],
2303 u"dirchild": ["dirnode", {"rw_uri": dircap,
2304 "ro_uri": self._make_readonly(dircap)}],
2305 u"dirchild-lit": ["dirnode", {"ro_uri": litdircap}],
2306 u"dirchild-empty": ["dirnode", {"ro_uri": emptydircap}],
2308 return newkids, {'filecap1': filecap1,
2309 'filecap2': filecap2,
2310 'filecap3': filecap3,
2311 'unknown_rwcap': unknown_rwcap,
2312 'unknown_rocap': unknown_rocap,
2313 'unknown_immcap': unknown_immcap,
2315 'litdircap': litdircap,
2316 'emptydircap': emptydircap}
2318 def _create_immutable_children(self):
2319 contents, n, filecap1 = self.makefile(12)
2320 md1 = {"metakey1": "metavalue1"}
2321 tnode = create_chk_filenode("immutable directory contents\n"*10)
2322 dnode = DirectoryNode(tnode, None, None)
2323 assert not dnode.is_mutable()
2324 immdircap = dnode.get_uri()
2325 litdircap = "URI:DIR2-LIT:ge3dumj2mewdcotyfqydulbshj5x2lbm"
2326 emptydircap = "URI:DIR2-LIT:"
2327 newkids = {u"child-imm": ["filenode", {"ro_uri": filecap1,
2328 "metadata": md1, }],
2329 u"unknownchild-imm": ["unknown", {"ro_uri": unknown_immcap}],
2330 u"dirchild-imm": ["dirnode", {"ro_uri": immdircap}],
2331 u"dirchild-lit": ["dirnode", {"ro_uri": litdircap}],
2332 u"dirchild-empty": ["dirnode", {"ro_uri": emptydircap}],
2334 return newkids, {'filecap1': filecap1,
2335 'unknown_immcap': unknown_immcap,
2336 'immdircap': immdircap,
2337 'litdircap': litdircap,
2338 'emptydircap': emptydircap}
2340 def test_POST_mkdir_no_parentdir_initial_children(self):
2341 (newkids, caps) = self._create_initial_children()
2342 d = self.POST2("/uri?t=mkdir-with-children", simplejson.dumps(newkids))
2343 def _after_mkdir(res):
2344 self.failUnless(res.startswith("URI:DIR"), res)
2345 n = self.s.create_node_from_uri(res)
2346 d2 = self.failUnlessNodeKeysAre(n, newkids.keys())
2347 d2.addCallback(lambda ign:
2348 self.failUnlessROChildURIIs(n, u"child-imm",
2350 d2.addCallback(lambda ign:
2351 self.failUnlessRWChildURIIs(n, u"child-mutable",
2353 d2.addCallback(lambda ign:
2354 self.failUnlessROChildURIIs(n, u"child-mutable-ro",
2356 d2.addCallback(lambda ign:
2357 self.failUnlessRWChildURIIs(n, u"unknownchild-rw",
2358 caps['unknown_rwcap']))
2359 d2.addCallback(lambda ign:
2360 self.failUnlessROChildURIIs(n, u"unknownchild-ro",
2361 caps['unknown_rocap']))
2362 d2.addCallback(lambda ign:
2363 self.failUnlessROChildURIIs(n, u"unknownchild-imm",
2364 caps['unknown_immcap']))
2365 d2.addCallback(lambda ign:
2366 self.failUnlessRWChildURIIs(n, u"dirchild",
2369 d.addCallback(_after_mkdir)
2372 def test_POST_mkdir_no_parentdir_unexpected_children(self):
2373 # the regular /uri?t=mkdir operation is specified to ignore its body.
2374 # Only t=mkdir-with-children pays attention to it.
2375 (newkids, caps) = self._create_initial_children()
2376 d = self.shouldHTTPError("POST t=mkdir unexpected children",
2378 "t=mkdir does not accept children=, "
2379 "try t=mkdir-with-children instead",
2380 self.POST2, "/uri?t=mkdir", # without children
2381 simplejson.dumps(newkids))
2384 def test_POST_noparent_bad(self):
2385 d = self.shouldHTTPError("POST /uri?t=bogus", 400, "Bad Request",
2386 "/uri accepts only PUT, PUT?t=mkdir, "
2387 "POST?t=upload, and POST?t=mkdir",
2388 self.POST, "/uri?t=bogus")
2391 def test_POST_mkdir_no_parentdir_immutable(self):
2392 (newkids, caps) = self._create_immutable_children()
2393 d = self.POST2("/uri?t=mkdir-immutable", simplejson.dumps(newkids))
2394 def _after_mkdir(res):
2395 self.failUnless(res.startswith("URI:DIR"), res)
2396 n = self.s.create_node_from_uri(res)
2397 d2 = self.failUnlessNodeKeysAre(n, newkids.keys())
2398 d2.addCallback(lambda ign:
2399 self.failUnlessROChildURIIs(n, u"child-imm",
2401 d2.addCallback(lambda ign:
2402 self.failUnlessROChildURIIs(n, u"unknownchild-imm",
2403 caps['unknown_immcap']))
2404 d2.addCallback(lambda ign:
2405 self.failUnlessROChildURIIs(n, u"dirchild-imm",
2407 d2.addCallback(lambda ign:
2408 self.failUnlessROChildURIIs(n, u"dirchild-lit",
2410 d2.addCallback(lambda ign:
2411 self.failUnlessROChildURIIs(n, u"dirchild-empty",
2412 caps['emptydircap']))
2414 d.addCallback(_after_mkdir)
2417 def test_POST_mkdir_no_parentdir_immutable_bad(self):
2418 (newkids, caps) = self._create_initial_children()
2419 d = self.shouldFail2(error.Error,
2420 "test_POST_mkdir_no_parentdir_immutable_bad",
2422 "needed to be immutable but was not",
2424 "/uri?t=mkdir-immutable",
2425 simplejson.dumps(newkids))
2428 def test_welcome_page_mkdir_button(self):
2429 # Fetch the welcome page.
2431 def _after_get_welcome_page(res):
2432 MKDIR_BUTTON_RE = re.compile(
2433 '<form action="([^"]*)" method="post".*?'
2434 '<input type="hidden" name="t" value="([^"]*)" />'
2435 '<input type="hidden" name="([^"]*)" value="([^"]*)" />'
2436 '<input type="submit" value="Create a directory" />',
2438 mo = MKDIR_BUTTON_RE.search(res)
2439 formaction = mo.group(1)
2441 formaname = mo.group(3)
2442 formavalue = mo.group(4)
2443 return (formaction, formt, formaname, formavalue)
2444 d.addCallback(_after_get_welcome_page)
2445 def _after_parse_form(res):
2446 (formaction, formt, formaname, formavalue) = res
2447 return self.POST("/%s?t=%s&%s=%s" % (formaction, formt, formaname, formavalue))
2448 d.addCallback(_after_parse_form)
2449 d.addBoth(self.shouldRedirect, None, statuscode='303')
2452 def test_POST_mkdir_replace(self): # return value?
2453 d = self.POST(self.public_url + "/foo", t="mkdir", name="sub")
2454 d.addCallback(lambda res: self._foo_node.get(u"sub"))
2455 d.addCallback(self.failUnlessNodeKeysAre, [])
2458 def test_POST_mkdir_no_replace_queryarg(self): # return value?
2459 d = self.POST(self.public_url + "/foo?replace=false", t="mkdir", name="sub")
2460 d.addBoth(self.shouldFail, error.Error,
2461 "POST_mkdir_no_replace_queryarg",
2463 "There was already a child by that name, and you asked me "
2464 "to not replace it")
2465 d.addCallback(lambda res: self._foo_node.get(u"sub"))
2466 d.addCallback(self.failUnlessNodeKeysAre, [u"baz.txt"])
2469 def test_POST_mkdir_no_replace_field(self): # return value?
2470 d = self.POST(self.public_url + "/foo", t="mkdir", name="sub",
2472 d.addBoth(self.shouldFail, error.Error, "POST_mkdir_no_replace_field",
2474 "There was already a child by that name, and you asked me "
2475 "to not replace it")
2476 d.addCallback(lambda res: self._foo_node.get(u"sub"))
2477 d.addCallback(self.failUnlessNodeKeysAre, [u"baz.txt"])
2480 def test_POST_mkdir_whendone_field(self):
2481 d = self.POST(self.public_url + "/foo",
2482 t="mkdir", name="newdir", when_done="/THERE")
2483 d.addBoth(self.shouldRedirect, "/THERE")
2484 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
2485 d.addCallback(self.failUnlessNodeKeysAre, [])
2488 def test_POST_mkdir_whendone_queryarg(self):
2489 d = self.POST(self.public_url + "/foo?when_done=/THERE",
2490 t="mkdir", name="newdir")
2491 d.addBoth(self.shouldRedirect, "/THERE")
2492 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
2493 d.addCallback(self.failUnlessNodeKeysAre, [])
2496 def test_POST_bad_t(self):
2497 d = self.shouldFail2(error.Error, "POST_bad_t", "400 Bad Request",
2498 "POST to a directory with bad t=BOGUS",
2499 self.POST, self.public_url + "/foo", t="BOGUS")
2502 def test_POST_set_children(self, command_name="set_children"):
2503 contents9, n9, newuri9 = self.makefile(9)
2504 contents10, n10, newuri10 = self.makefile(10)
2505 contents11, n11, newuri11 = self.makefile(11)
2508 "atomic_added_1": [ "filenode", { "rw_uri": "%s",
2511 "ctime": 1002777696.7564139,
2512 "mtime": 1002777696.7564139
2515 "atomic_added_2": [ "filenode", { "rw_uri": "%s",
2518 "ctime": 1002777696.7564139,
2519 "mtime": 1002777696.7564139
2522 "atomic_added_3": [ "filenode", { "rw_uri": "%s",
2525 "ctime": 1002777696.7564139,
2526 "mtime": 1002777696.7564139
2529 }""" % (newuri9, newuri10, newuri11)
2531 url = self.webish_url + self.public_url + "/foo" + "?t=" + command_name
2533 d = client.getPage(url, method="POST", postdata=reqbody)
2535 self.failUnlessURIMatchesROChild(newuri9, self._foo_node, u"atomic_added_1")
2536 self.failUnlessURIMatchesROChild(newuri10, self._foo_node, u"atomic_added_2")
2537 self.failUnlessURIMatchesROChild(newuri11, self._foo_node, u"atomic_added_3")
2539 d.addCallback(_then)
2540 d.addErrback(self.dump_error)
2543 def test_POST_set_children_with_hyphen(self):
2544 return self.test_POST_set_children(command_name="set-children")
2546 def test_POST_link_uri(self):
2547 contents, n, newuri = self.makefile(8)
2548 d = self.POST(self.public_url + "/foo", t="uri", name="new.txt", uri=newuri)
2549 d.addCallback(self.failUnlessURIMatchesROChild, self._foo_node, u"new.txt")
2550 d.addCallback(lambda res:
2551 self.failUnlessChildContentsAre(self._foo_node, u"new.txt",
2555 def test_POST_link_uri_replace(self):
2556 contents, n, newuri = self.makefile(8)
2557 d = self.POST(self.public_url + "/foo", t="uri", name="bar.txt", uri=newuri)
2558 d.addCallback(self.failUnlessURIMatchesROChild, self._foo_node, u"bar.txt")
2559 d.addCallback(lambda res:
2560 self.failUnlessChildContentsAre(self._foo_node, u"bar.txt",
2564 def test_POST_link_uri_unknown_bad(self):
2565 d = self.POST(self.public_url + "/foo", t="uri", name="future.txt", uri=unknown_rwcap)
2566 d.addBoth(self.shouldFail, error.Error,
2567 "POST_link_uri_unknown_bad",
2569 "unknown cap in a write slot")
2572 def test_POST_link_uri_unknown_ro_good(self):
2573 d = self.POST(self.public_url + "/foo", t="uri", name="future-ro.txt", uri=unknown_rocap)
2574 d.addCallback(self.failUnlessURIMatchesROChild, self._foo_node, u"future-ro.txt")
2577 def test_POST_link_uri_unknown_imm_good(self):
2578 d = self.POST(self.public_url + "/foo", t="uri", name="future-imm.txt", uri=unknown_immcap)
2579 d.addCallback(self.failUnlessURIMatchesROChild, self._foo_node, u"future-imm.txt")
2582 def test_POST_link_uri_no_replace_queryarg(self):
2583 contents, n, newuri = self.makefile(8)
2584 d = self.POST(self.public_url + "/foo?replace=false", t="uri",
2585 name="bar.txt", uri=newuri)
2586 d.addBoth(self.shouldFail, error.Error,
2587 "POST_link_uri_no_replace_queryarg",
2589 "There was already a child by that name, and you asked me "
2590 "to not replace it")
2591 d.addCallback(lambda res: self.GET(self.public_url + "/foo/bar.txt"))
2592 d.addCallback(self.failUnlessIsBarDotTxt)
2595 def test_POST_link_uri_no_replace_field(self):
2596 contents, n, newuri = self.makefile(8)
2597 d = self.POST(self.public_url + "/foo", t="uri", replace="false",
2598 name="bar.txt", uri=newuri)
2599 d.addBoth(self.shouldFail, error.Error,
2600 "POST_link_uri_no_replace_field",
2602 "There was already a child by that name, and you asked me "
2603 "to not replace it")
2604 d.addCallback(lambda res: self.GET(self.public_url + "/foo/bar.txt"))
2605 d.addCallback(self.failUnlessIsBarDotTxt)
2608 def test_POST_delete(self):
2609 d = self.POST(self.public_url + "/foo", t="delete", name="bar.txt")
2610 d.addCallback(lambda res: self._foo_node.list())
2611 def _check(children):
2612 self.failIf(u"bar.txt" in children)
2613 d.addCallback(_check)
2616 def test_POST_rename_file(self):
2617 d = self.POST(self.public_url + "/foo", t="rename",
2618 from_name="bar.txt", to_name='wibble.txt')
2619 d.addCallback(lambda res:
2620 self.failIfNodeHasChild(self._foo_node, u"bar.txt"))
2621 d.addCallback(lambda res:
2622 self.failUnlessNodeHasChild(self._foo_node, u"wibble.txt"))
2623 d.addCallback(lambda res: self.GET(self.public_url + "/foo/wibble.txt"))
2624 d.addCallback(self.failUnlessIsBarDotTxt)
2625 d.addCallback(lambda res: self.GET(self.public_url + "/foo/wibble.txt?t=json"))
2626 d.addCallback(self.failUnlessIsBarJSON)
2629 def test_POST_rename_file_redundant(self):
2630 d = self.POST(self.public_url + "/foo", t="rename",
2631 from_name="bar.txt", to_name='bar.txt')
2632 d.addCallback(lambda res:
2633 self.failUnlessNodeHasChild(self._foo_node, u"bar.txt"))
2634 d.addCallback(lambda res: self.GET(self.public_url + "/foo/bar.txt"))
2635 d.addCallback(self.failUnlessIsBarDotTxt)
2636 d.addCallback(lambda res: self.GET(self.public_url + "/foo/bar.txt?t=json"))
2637 d.addCallback(self.failUnlessIsBarJSON)
2640 def test_POST_rename_file_replace(self):
2641 # rename a file and replace a directory with it
2642 d = self.POST(self.public_url + "/foo", t="rename",
2643 from_name="bar.txt", to_name='empty')
2644 d.addCallback(lambda res:
2645 self.failIfNodeHasChild(self._foo_node, u"bar.txt"))
2646 d.addCallback(lambda res:
2647 self.failUnlessNodeHasChild(self._foo_node, u"empty"))
2648 d.addCallback(lambda res: self.GET(self.public_url + "/foo/empty"))
2649 d.addCallback(self.failUnlessIsBarDotTxt)
2650 d.addCallback(lambda res: self.GET(self.public_url + "/foo/empty?t=json"))
2651 d.addCallback(self.failUnlessIsBarJSON)
2654 def test_POST_rename_file_no_replace_queryarg(self):
2655 # rename a file and replace a directory with it
2656 d = self.POST(self.public_url + "/foo?replace=false", t="rename",
2657 from_name="bar.txt", to_name='empty')
2658 d.addBoth(self.shouldFail, error.Error,
2659 "POST_rename_file_no_replace_queryarg",
2661 "There was already a child by that name, and you asked me "
2662 "to not replace it")
2663 d.addCallback(lambda res: self.GET(self.public_url + "/foo/empty?t=json"))
2664 d.addCallback(self.failUnlessIsEmptyJSON)
2667 def test_POST_rename_file_no_replace_field(self):
2668 # rename a file and replace a directory with it
2669 d = self.POST(self.public_url + "/foo", t="rename", replace="false",
2670 from_name="bar.txt", to_name='empty')
2671 d.addBoth(self.shouldFail, error.Error,
2672 "POST_rename_file_no_replace_field",
2674 "There was already a child by that name, and you asked me "
2675 "to not replace it")
2676 d.addCallback(lambda res: self.GET(self.public_url + "/foo/empty?t=json"))
2677 d.addCallback(self.failUnlessIsEmptyJSON)
2680 def failUnlessIsEmptyJSON(self, res):
2681 data = simplejson.loads(res)
2682 self.failUnlessEqual(data[0], "dirnode", data)
2683 self.failUnlessReallyEqual(len(data[1]["children"]), 0)
2685 def test_POST_rename_file_slash_fail(self):
2686 d = self.POST(self.public_url + "/foo", t="rename",
2687 from_name="bar.txt", to_name='kirk/spock.txt')
2688 d.addBoth(self.shouldFail, error.Error,
2689 "test_POST_rename_file_slash_fail",
2691 "to_name= may not contain a slash",
2693 d.addCallback(lambda res:
2694 self.failUnlessNodeHasChild(self._foo_node, u"bar.txt"))
2697 def test_POST_rename_dir(self):
2698 d = self.POST(self.public_url, t="rename",
2699 from_name="foo", to_name='plunk')
2700 d.addCallback(lambda res:
2701 self.failIfNodeHasChild(self.public_root, u"foo"))
2702 d.addCallback(lambda res:
2703 self.failUnlessNodeHasChild(self.public_root, u"plunk"))
2704 d.addCallback(lambda res: self.GET(self.public_url + "/plunk?t=json"))
2705 d.addCallback(self.failUnlessIsFooJSON)
2708 def shouldRedirect(self, res, target=None, statuscode=None, which=""):
2709 """ If target is not None then the redirection has to go to target. If
2710 statuscode is not None then the redirection has to be accomplished with
2711 that HTTP status code."""
2712 if not isinstance(res, failure.Failure):
2713 to_where = (target is None) and "somewhere" or ("to " + target)
2714 self.fail("%s: we were expecting to get redirected %s, not get an"
2715 " actual page: %s" % (which, to_where, res))
2716 res.trap(error.PageRedirect)
2717 if statuscode is not None:
2718 self.failUnlessReallyEqual(res.value.status, statuscode,
2719 "%s: not a redirect" % which)
2720 if target is not None:
2721 # the PageRedirect does not seem to capture the uri= query arg
2722 # properly, so we can't check for it.
2723 realtarget = self.webish_url + target
2724 self.failUnlessReallyEqual(res.value.location, realtarget,
2725 "%s: wrong target" % which)
2726 return res.value.location
2728 def test_GET_URI_form(self):
2729 base = "/uri?uri=%s" % self._bar_txt_uri
2730 # this is supposed to give us a redirect to /uri/$URI, plus arguments
2731 targetbase = "/uri/%s" % urllib.quote(self._bar_txt_uri)
2733 d.addBoth(self.shouldRedirect, targetbase)
2734 d.addCallback(lambda res: self.GET(base+"&filename=bar.txt"))
2735 d.addBoth(self.shouldRedirect, targetbase+"?filename=bar.txt")
2736 d.addCallback(lambda res: self.GET(base+"&t=json"))
2737 d.addBoth(self.shouldRedirect, targetbase+"?t=json")
2738 d.addCallback(self.log, "about to get file by uri")
2739 d.addCallback(lambda res: self.GET(base, followRedirect=True))
2740 d.addCallback(self.failUnlessIsBarDotTxt)
2741 d.addCallback(self.log, "got file by uri, about to get dir by uri")
2742 d.addCallback(lambda res: self.GET("/uri?uri=%s&t=json" % self._foo_uri,
2743 followRedirect=True))
2744 d.addCallback(self.failUnlessIsFooJSON)
2745 d.addCallback(self.log, "got dir by uri")
2749 def test_GET_URI_form_bad(self):
2750 d = self.shouldFail2(error.Error, "test_GET_URI_form_bad",
2751 "400 Bad Request", "GET /uri requires uri=",
2755 def test_GET_rename_form(self):
2756 d = self.GET(self.public_url + "/foo?t=rename-form&name=bar.txt",
2757 followRedirect=True)
2759 self.failUnless('name="when_done" value="."' in res, res)
2760 self.failUnless(re.search(r'name="from_name" value="bar\.txt"', res))
2761 d.addCallback(_check)
2764 def log(self, res, msg):
2765 #print "MSG: %s RES: %s" % (msg, res)
2769 def test_GET_URI_URL(self):
2770 base = "/uri/%s" % self._bar_txt_uri
2772 d.addCallback(self.failUnlessIsBarDotTxt)
2773 d.addCallback(lambda res: self.GET(base+"?filename=bar.txt"))
2774 d.addCallback(self.failUnlessIsBarDotTxt)
2775 d.addCallback(lambda res: self.GET(base+"?filename=bar.txt&save=true"))
2776 d.addCallback(self.failUnlessIsBarDotTxt)
2779 def test_GET_URI_URL_dir(self):
2780 base = "/uri/%s?t=json" % self._foo_uri
2782 d.addCallback(self.failUnlessIsFooJSON)
2785 def test_GET_URI_URL_missing(self):
2786 base = "/uri/%s" % self._bad_file_uri
2787 d = self.shouldHTTPError("test_GET_URI_URL_missing",
2788 http.GONE, None, "NotEnoughSharesError",
2790 # TODO: how can we exercise both sides of WebDownloadTarget.fail
2791 # here? we must arrange for a download to fail after target.open()
2792 # has been called, and then inspect the response to see that it is
2793 # shorter than we expected.
2796 def test_PUT_DIRURL_uri(self):
2797 d = self.s.create_dirnode()
2799 new_uri = dn.get_uri()
2800 # replace /foo with a new (empty) directory
2801 d = self.PUT(self.public_url + "/foo?t=uri", new_uri)
2802 d.addCallback(lambda res:
2803 self.failUnlessReallyEqual(res.strip(), new_uri))
2804 d.addCallback(lambda res:
2805 self.failUnlessRWChildURIIs(self.public_root,
2809 d.addCallback(_made_dir)
2812 def test_PUT_DIRURL_uri_noreplace(self):
2813 d = self.s.create_dirnode()
2815 new_uri = dn.get_uri()
2816 # replace /foo with a new (empty) directory, but ask that
2817 # replace=false, so it should fail
2818 d = self.shouldFail2(error.Error, "test_PUT_DIRURL_uri_noreplace",
2819 "409 Conflict", "There was already a child by that name, and you asked me to not replace it",
2821 self.public_url + "/foo?t=uri&replace=false",
2823 d.addCallback(lambda res:
2824 self.failUnlessRWChildURIIs(self.public_root,
2828 d.addCallback(_made_dir)
2831 def test_PUT_DIRURL_bad_t(self):
2832 d = self.shouldFail2(error.Error, "test_PUT_DIRURL_bad_t",
2833 "400 Bad Request", "PUT to a directory",
2834 self.PUT, self.public_url + "/foo?t=BOGUS", "")
2835 d.addCallback(lambda res:
2836 self.failUnlessRWChildURIIs(self.public_root,
2841 def test_PUT_NEWFILEURL_uri(self):
2842 contents, n, new_uri = self.makefile(8)
2843 d = self.PUT(self.public_url + "/foo/new.txt?t=uri", new_uri)
2844 d.addCallback(lambda res: self.failUnlessReallyEqual(res.strip(), new_uri))
2845 d.addCallback(lambda res:
2846 self.failUnlessChildContentsAre(self._foo_node, u"new.txt",
2850 def test_PUT_NEWFILEURL_uri_replace(self):
2851 contents, n, new_uri = self.makefile(8)
2852 d = self.PUT(self.public_url + "/foo/bar.txt?t=uri", new_uri)
2853 d.addCallback(lambda res: self.failUnlessReallyEqual(res.strip(), new_uri))
2854 d.addCallback(lambda res:
2855 self.failUnlessChildContentsAre(self._foo_node, u"bar.txt",
2859 def test_PUT_NEWFILEURL_uri_no_replace(self):
2860 contents, n, new_uri = self.makefile(8)
2861 d = self.PUT(self.public_url + "/foo/bar.txt?t=uri&replace=false", new_uri)
2862 d.addBoth(self.shouldFail, error.Error, "PUT_NEWFILEURL_uri_no_replace",
2864 "There was already a child by that name, and you asked me "
2865 "to not replace it")
2868 def test_PUT_NEWFILEURL_uri_unknown_bad(self):
2869 d = self.PUT(self.public_url + "/foo/put-future.txt?t=uri", unknown_rwcap)
2870 d.addBoth(self.shouldFail, error.Error,
2871 "POST_put_uri_unknown_bad",
2873 "unknown cap in a write slot")
2876 def test_PUT_NEWFILEURL_uri_unknown_ro_good(self):
2877 d = self.PUT(self.public_url + "/foo/put-future-ro.txt?t=uri", unknown_rocap)
2878 d.addCallback(self.failUnlessURIMatchesROChild, self._foo_node,
2879 u"put-future-ro.txt")
2882 def test_PUT_NEWFILEURL_uri_unknown_imm_good(self):
2883 d = self.PUT(self.public_url + "/foo/put-future-imm.txt?t=uri", unknown_immcap)
2884 d.addCallback(self.failUnlessURIMatchesROChild, self._foo_node,
2885 u"put-future-imm.txt")
2888 def test_PUT_NEWFILE_URI(self):
2889 file_contents = "New file contents here\n"
2890 d = self.PUT("/uri", file_contents)
2892 assert isinstance(uri, str), uri
2893 self.failUnless(uri in FakeCHKFileNode.all_contents)
2894 self.failUnlessReallyEqual(FakeCHKFileNode.all_contents[uri],
2896 return self.GET("/uri/%s" % uri)
2897 d.addCallback(_check)
2899 self.failUnlessReallyEqual(res, file_contents)
2900 d.addCallback(_check2)
2903 def test_PUT_NEWFILE_URI_not_mutable(self):
2904 file_contents = "New file contents here\n"
2905 d = self.PUT("/uri?mutable=false", file_contents)
2907 assert isinstance(uri, str), uri
2908 self.failUnless(uri in FakeCHKFileNode.all_contents)
2909 self.failUnlessReallyEqual(FakeCHKFileNode.all_contents[uri],
2911 return self.GET("/uri/%s" % uri)
2912 d.addCallback(_check)
2914 self.failUnlessReallyEqual(res, file_contents)
2915 d.addCallback(_check2)
2918 def test_PUT_NEWFILE_URI_only_PUT(self):
2919 d = self.PUT("/uri?t=bogus", "")
2920 d.addBoth(self.shouldFail, error.Error,
2921 "PUT_NEWFILE_URI_only_PUT",
2923 "/uri accepts only PUT, PUT?t=mkdir, POST?t=upload, and POST?t=mkdir")
2926 def test_PUT_NEWFILE_URI_mutable(self):
2927 file_contents = "New file contents here\n"
2928 d = self.PUT("/uri?mutable=true", file_contents)
2929 def _check1(filecap):
2930 filecap = filecap.strip()
2931 self.failUnless(filecap.startswith("URI:SSK:"), filecap)
2932 self.filecap = filecap
2933 u = uri.WriteableSSKFileURI.init_from_string(filecap)
2934 self.failUnless(u.get_storage_index() in FakeMutableFileNode.all_contents)
2935 n = self.s.create_node_from_uri(filecap)
2936 return n.download_best_version()
2937 d.addCallback(_check1)
2939 self.failUnlessReallyEqual(data, file_contents)
2940 return self.GET("/uri/%s" % urllib.quote(self.filecap))
2941 d.addCallback(_check2)
2943 self.failUnlessReallyEqual(res, file_contents)
2944 d.addCallback(_check3)
2947 def test_PUT_mkdir(self):
2948 d = self.PUT("/uri?t=mkdir", "")
2950 n = self.s.create_node_from_uri(uri.strip())
2951 d2 = self.failUnlessNodeKeysAre(n, [])
2952 d2.addCallback(lambda res:
2953 self.GET("/uri/%s?t=json" % uri))
2955 d.addCallback(_check)
2956 d.addCallback(self.failUnlessIsEmptyJSON)
2959 def test_POST_check(self):
2960 d = self.POST(self.public_url + "/foo", t="check", name="bar.txt")
2962 # this returns a string form of the results, which are probably
2963 # None since we're using fake filenodes.
2964 # TODO: verify that the check actually happened, by changing
2965 # FakeCHKFileNode to count how many times .check() has been
2968 d.addCallback(_done)
2971 def test_bad_method(self):
2972 url = self.webish_url + self.public_url + "/foo/bar.txt"
2973 d = self.shouldHTTPError("test_bad_method",
2974 501, "Not Implemented",
2975 "I don't know how to treat a BOGUS request.",
2976 client.getPage, url, method="BOGUS")
2979 def test_short_url(self):
2980 url = self.webish_url + "/uri"
2981 d = self.shouldHTTPError("test_short_url", 501, "Not Implemented",
2982 "I don't know how to treat a DELETE request.",
2983 client.getPage, url, method="DELETE")
2986 def test_ophandle_bad(self):
2987 url = self.webish_url + "/operations/bogus?t=status"
2988 d = self.shouldHTTPError("test_ophandle_bad", 404, "404 Not Found",
2989 "unknown/expired handle 'bogus'",
2990 client.getPage, url)
2993 def test_ophandle_cancel(self):
2994 d = self.POST(self.public_url + "/foo/?t=start-manifest&ophandle=128",
2995 followRedirect=True)
2996 d.addCallback(lambda ignored:
2997 self.GET("/operations/128?t=status&output=JSON"))
2999 data = simplejson.loads(res)
3000 self.failUnless("finished" in data, res)
3001 monitor = self.ws.root.child_operations.handles["128"][0]
3002 d = self.POST("/operations/128?t=cancel&output=JSON")
3004 data = simplejson.loads(res)
3005 self.failUnless("finished" in data, res)
3006 # t=cancel causes the handle to be forgotten
3007 self.failUnless(monitor.is_cancelled())
3008 d.addCallback(_check2)
3010 d.addCallback(_check1)
3011 d.addCallback(lambda ignored:
3012 self.shouldHTTPError("test_ophandle_cancel",
3013 404, "404 Not Found",
3014 "unknown/expired handle '128'",
3016 "/operations/128?t=status&output=JSON"))
3019 def test_ophandle_retainfor(self):
3020 d = self.POST(self.public_url + "/foo/?t=start-manifest&ophandle=129&retain-for=60",
3021 followRedirect=True)
3022 d.addCallback(lambda ignored:
3023 self.GET("/operations/129?t=status&output=JSON&retain-for=0"))
3025 data = simplejson.loads(res)
3026 self.failUnless("finished" in data, res)
3027 d.addCallback(_check1)
3028 # the retain-for=0 will cause the handle to be expired very soon
3029 d.addCallback(lambda ign:
3030 self.clock.advance(2.0))
3031 d.addCallback(lambda ignored:
3032 self.shouldHTTPError("test_ophandle_retainfor",
3033 404, "404 Not Found",
3034 "unknown/expired handle '129'",
3036 "/operations/129?t=status&output=JSON"))
3039 def test_ophandle_release_after_complete(self):
3040 d = self.POST(self.public_url + "/foo/?t=start-manifest&ophandle=130",
3041 followRedirect=True)
3042 d.addCallback(self.wait_for_operation, "130")
3043 d.addCallback(lambda ignored:
3044 self.GET("/operations/130?t=status&output=JSON&release-after-complete=true"))
3045 # the release-after-complete=true will cause the handle to be expired
3046 d.addCallback(lambda ignored:
3047 self.shouldHTTPError("test_ophandle_release_after_complete",
3048 404, "404 Not Found",
3049 "unknown/expired handle '130'",
3051 "/operations/130?t=status&output=JSON"))
3054 def test_uncollected_ophandle_expiration(self):
3055 # uncollected ophandles should expire after 4 days
3056 def _make_uncollected_ophandle(ophandle):
3057 d = self.POST(self.public_url +
3058 "/foo/?t=start-manifest&ophandle=%d" % ophandle,
3059 followRedirect=False)
3060 # When we start the operation, the webapi server will want
3061 # to redirect us to the page for the ophandle, so we get
3062 # confirmation that the operation has started. If the
3063 # manifest operation has finished by the time we get there,
3064 # following that redirect (by setting followRedirect=True
3065 # above) has the side effect of collecting the ophandle that
3066 # we've just created, which means that we can't use the
3067 # ophandle to test the uncollected timeout anymore. So,
3068 # instead, catch the 302 here and don't follow it.
3069 d.addBoth(self.should302, "uncollected_ophandle_creation")
3071 # Create an ophandle, don't collect it, then advance the clock by
3072 # 4 days - 1 second and make sure that the ophandle is still there.
3073 d = _make_uncollected_ophandle(131)
3074 d.addCallback(lambda ign:
3075 self.clock.advance((96*60*60) - 1)) # 96 hours = 4 days
3076 d.addCallback(lambda ign:
3077 self.GET("/operations/131?t=status&output=JSON"))
3079 data = simplejson.loads(res)
3080 self.failUnless("finished" in data, res)
3081 d.addCallback(_check1)
3082 # Create an ophandle, don't collect it, then try to collect it
3083 # after 4 days. It should be gone.
3084 d.addCallback(lambda ign:
3085 _make_uncollected_ophandle(132))
3086 d.addCallback(lambda ign:
3087 self.clock.advance(96*60*60))
3088 d.addCallback(lambda ign:
3089 self.shouldHTTPError("test_uncollected_ophandle_expired_after_100_hours",
3090 404, "404 Not Found",
3091 "unknown/expired handle '132'",
3093 "/operations/132?t=status&output=JSON"))
3096 def test_collected_ophandle_expiration(self):
3097 # collected ophandles should expire after 1 day
3098 def _make_collected_ophandle(ophandle):
3099 d = self.POST(self.public_url +
3100 "/foo/?t=start-manifest&ophandle=%d" % ophandle,
3101 followRedirect=True)
3102 # By following the initial redirect, we collect the ophandle
3103 # we've just created.
3105 # Create a collected ophandle, then collect it after 23 hours
3106 # and 59 seconds to make sure that it is still there.
3107 d = _make_collected_ophandle(133)
3108 d.addCallback(lambda ign:
3109 self.clock.advance((24*60*60) - 1))
3110 d.addCallback(lambda ign:
3111 self.GET("/operations/133?t=status&output=JSON"))
3113 data = simplejson.loads(res)
3114 self.failUnless("finished" in data, res)
3115 d.addCallback(_check1)
3116 # Create another uncollected ophandle, then try to collect it
3117 # after 24 hours to make sure that it is gone.
3118 d.addCallback(lambda ign:
3119 _make_collected_ophandle(134))
3120 d.addCallback(lambda ign:
3121 self.clock.advance(24*60*60))
3122 d.addCallback(lambda ign:
3123 self.shouldHTTPError("test_collected_ophandle_expired_after_1000_minutes",
3124 404, "404 Not Found",
3125 "unknown/expired handle '134'",
3127 "/operations/134?t=status&output=JSON"))
3130 def test_incident(self):
3131 d = self.POST("/report_incident", details="eek")
3133 self.failUnless("Thank you for your report!" in res, res)
3134 d.addCallback(_done)
3137 def test_static(self):
3138 webdir = os.path.join(self.staticdir, "subdir")
3139 fileutil.make_dirs(webdir)
3140 f = open(os.path.join(webdir, "hello.txt"), "wb")
3144 d = self.GET("/static/subdir/hello.txt")
3146 self.failUnlessReallyEqual(res, "hello")
3147 d.addCallback(_check)
3151 class Util(ShouldFailMixin, testutil.ReallyEqualMixin, unittest.TestCase):
3152 def test_load_file(self):
3153 # This will raise an exception unless a well-formed XML file is found under that name.
3154 common.getxmlfile('directory.xhtml').load()
3156 def test_parse_replace_arg(self):
3157 self.failUnlessReallyEqual(common.parse_replace_arg("true"), True)
3158 self.failUnlessReallyEqual(common.parse_replace_arg("false"), False)
3159 self.failUnlessReallyEqual(common.parse_replace_arg("only-files"),
3161 self.shouldFail(AssertionError, "test_parse_replace_arg", "",
3162 common.parse_replace_arg, "only_fles")
3164 def test_abbreviate_time(self):
3165 self.failUnlessReallyEqual(common.abbreviate_time(None), "")
3166 self.failUnlessReallyEqual(common.abbreviate_time(1.234), "1.23s")
3167 self.failUnlessReallyEqual(common.abbreviate_time(0.123), "123ms")
3168 self.failUnlessReallyEqual(common.abbreviate_time(0.00123), "1.2ms")
3169 self.failUnlessReallyEqual(common.abbreviate_time(0.000123), "123us")
3170 self.failUnlessReallyEqual(common.abbreviate_time(-123000), "-123000000000us")
3172 def test_abbreviate_rate(self):
3173 self.failUnlessReallyEqual(common.abbreviate_rate(None), "")
3174 self.failUnlessReallyEqual(common.abbreviate_rate(1234000), "1.23MBps")
3175 self.failUnlessReallyEqual(common.abbreviate_rate(12340), "12.3kBps")
3176 self.failUnlessReallyEqual(common.abbreviate_rate(123), "123Bps")
3178 def test_abbreviate_size(self):
3179 self.failUnlessReallyEqual(common.abbreviate_size(None), "")
3180 self.failUnlessReallyEqual(common.abbreviate_size(1.23*1000*1000*1000), "1.23GB")
3181 self.failUnlessReallyEqual(common.abbreviate_size(1.23*1000*1000), "1.23MB")
3182 self.failUnlessReallyEqual(common.abbreviate_size(1230), "1.2kB")
3183 self.failUnlessReallyEqual(common.abbreviate_size(123), "123B")
3185 def test_plural(self):
3187 return "%d second%s" % (s, status.plural(s))
3188 self.failUnlessReallyEqual(convert(0), "0 seconds")
3189 self.failUnlessReallyEqual(convert(1), "1 second")
3190 self.failUnlessReallyEqual(convert(2), "2 seconds")
3192 return "has share%s: %s" % (status.plural(s), ",".join(s))
3193 self.failUnlessReallyEqual(convert2([]), "has shares: ")
3194 self.failUnlessReallyEqual(convert2(["1"]), "has share: 1")
3195 self.failUnlessReallyEqual(convert2(["1","2"]), "has shares: 1,2")
3198 class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMixin, unittest.TestCase):
3200 def CHECK(self, ign, which, args, clientnum=0):
3201 fileurl = self.fileurls[which]
3202 url = fileurl + "?" + args
3203 return self.GET(url, method="POST", clientnum=clientnum)
3205 def test_filecheck(self):
3206 self.basedir = "web/Grid/filecheck"
3208 c0 = self.g.clients[0]
3211 d = c0.upload(upload.Data(DATA, convergence=""))
3212 def _stash_uri(ur, which):
3213 self.uris[which] = ur.uri
3214 d.addCallback(_stash_uri, "good")
3215 d.addCallback(lambda ign:
3216 c0.upload(upload.Data(DATA+"1", convergence="")))
3217 d.addCallback(_stash_uri, "sick")
3218 d.addCallback(lambda ign:
3219 c0.upload(upload.Data(DATA+"2", convergence="")))
3220 d.addCallback(_stash_uri, "dead")
3221 def _stash_mutable_uri(n, which):
3222 self.uris[which] = n.get_uri()
3223 assert isinstance(self.uris[which], str)
3224 d.addCallback(lambda ign: c0.create_mutable_file(DATA+"3"))
3225 d.addCallback(_stash_mutable_uri, "corrupt")
3226 d.addCallback(lambda ign:
3227 c0.upload(upload.Data("literal", convergence="")))
3228 d.addCallback(_stash_uri, "small")
3229 d.addCallback(lambda ign: c0.create_immutable_dirnode({}))
3230 d.addCallback(_stash_mutable_uri, "smalldir")
3232 def _compute_fileurls(ignored):
3234 for which in self.uris:
3235 self.fileurls[which] = "uri/" + urllib.quote(self.uris[which])
3236 d.addCallback(_compute_fileurls)
3238 def _clobber_shares(ignored):
3239 good_shares = self.find_uri_shares(self.uris["good"])
3240 self.failUnlessReallyEqual(len(good_shares), 10)
3241 sick_shares = self.find_uri_shares(self.uris["sick"])
3242 os.unlink(sick_shares[0][2])
3243 dead_shares = self.find_uri_shares(self.uris["dead"])
3244 for i in range(1, 10):
3245 os.unlink(dead_shares[i][2])
3246 c_shares = self.find_uri_shares(self.uris["corrupt"])
3247 cso = CorruptShareOptions()
3248 cso.stdout = StringIO()
3249 cso.parseOptions([c_shares[0][2]])
3251 d.addCallback(_clobber_shares)
3253 d.addCallback(self.CHECK, "good", "t=check")
3254 def _got_html_good(res):
3255 self.failUnless("Healthy" in res, res)
3256 self.failIf("Not Healthy" in res, res)
3257 d.addCallback(_got_html_good)
3258 d.addCallback(self.CHECK, "good", "t=check&return_to=somewhere")
3259 def _got_html_good_return_to(res):
3260 self.failUnless("Healthy" in res, res)
3261 self.failIf("Not Healthy" in res, res)
3262 self.failUnless('<a href="somewhere">Return to file'
3264 d.addCallback(_got_html_good_return_to)
3265 d.addCallback(self.CHECK, "good", "t=check&output=json")
3266 def _got_json_good(res):
3267 r = simplejson.loads(res)
3268 self.failUnlessEqual(r["summary"], "Healthy")
3269 self.failUnless(r["results"]["healthy"])
3270 self.failIf(r["results"]["needs-rebalancing"])
3271 self.failUnless(r["results"]["recoverable"])
3272 d.addCallback(_got_json_good)
3274 d.addCallback(self.CHECK, "small", "t=check")
3275 def _got_html_small(res):
3276 self.failUnless("Literal files are always healthy" in res, res)
3277 self.failIf("Not Healthy" in res, res)
3278 d.addCallback(_got_html_small)
3279 d.addCallback(self.CHECK, "small", "t=check&return_to=somewhere")
3280 def _got_html_small_return_to(res):
3281 self.failUnless("Literal files are always healthy" in res, res)
3282 self.failIf("Not Healthy" in res, res)
3283 self.failUnless('<a href="somewhere">Return to file'
3285 d.addCallback(_got_html_small_return_to)
3286 d.addCallback(self.CHECK, "small", "t=check&output=json")
3287 def _got_json_small(res):
3288 r = simplejson.loads(res)
3289 self.failUnlessEqual(r["storage-index"], "")
3290 self.failUnless(r["results"]["healthy"])
3291 d.addCallback(_got_json_small)
3293 d.addCallback(self.CHECK, "smalldir", "t=check")
3294 def _got_html_smalldir(res):
3295 self.failUnless("Literal files are always healthy" in res, res)
3296 self.failIf("Not Healthy" in res, res)
3297 d.addCallback(_got_html_smalldir)
3298 d.addCallback(self.CHECK, "smalldir", "t=check&output=json")
3299 def _got_json_smalldir(res):
3300 r = simplejson.loads(res)
3301 self.failUnlessEqual(r["storage-index"], "")
3302 self.failUnless(r["results"]["healthy"])
3303 d.addCallback(_got_json_smalldir)
3305 d.addCallback(self.CHECK, "sick", "t=check")
3306 def _got_html_sick(res):
3307 self.failUnless("Not Healthy" in res, res)
3308 d.addCallback(_got_html_sick)
3309 d.addCallback(self.CHECK, "sick", "t=check&output=json")
3310 def _got_json_sick(res):
3311 r = simplejson.loads(res)
3312 self.failUnlessEqual(r["summary"],
3313 "Not Healthy: 9 shares (enc 3-of-10)")
3314 self.failIf(r["results"]["healthy"])
3315 self.failIf(r["results"]["needs-rebalancing"])
3316 self.failUnless(r["results"]["recoverable"])
3317 d.addCallback(_got_json_sick)
3319 d.addCallback(self.CHECK, "dead", "t=check")
3320 def _got_html_dead(res):
3321 self.failUnless("Not Healthy" in res, res)
3322 d.addCallback(_got_html_dead)
3323 d.addCallback(self.CHECK, "dead", "t=check&output=json")
3324 def _got_json_dead(res):
3325 r = simplejson.loads(res)
3326 self.failUnlessEqual(r["summary"],
3327 "Not Healthy: 1 shares (enc 3-of-10)")
3328 self.failIf(r["results"]["healthy"])
3329 self.failIf(r["results"]["needs-rebalancing"])
3330 self.failIf(r["results"]["recoverable"])
3331 d.addCallback(_got_json_dead)
3333 d.addCallback(self.CHECK, "corrupt", "t=check&verify=true")
3334 def _got_html_corrupt(res):
3335 self.failUnless("Not Healthy! : Unhealthy" in res, res)
3336 d.addCallback(_got_html_corrupt)
3337 d.addCallback(self.CHECK, "corrupt", "t=check&verify=true&output=json")
3338 def _got_json_corrupt(res):
3339 r = simplejson.loads(res)
3340 self.failUnless("Unhealthy: 9 shares (enc 3-of-10)" in r["summary"],
3342 self.failIf(r["results"]["healthy"])
3343 self.failUnless(r["results"]["recoverable"])
3344 self.failUnlessReallyEqual(r["results"]["count-shares-good"], 9)
3345 self.failUnlessReallyEqual(r["results"]["count-corrupt-shares"], 1)
3346 d.addCallback(_got_json_corrupt)
3348 d.addErrback(self.explain_web_error)
3351 def test_repair_html(self):
3352 self.basedir = "web/Grid/repair_html"
3354 c0 = self.g.clients[0]
3357 d = c0.upload(upload.Data(DATA, convergence=""))
3358 def _stash_uri(ur, which):
3359 self.uris[which] = ur.uri
3360 d.addCallback(_stash_uri, "good")
3361 d.addCallback(lambda ign:
3362 c0.upload(upload.Data(DATA+"1", convergence="")))
3363 d.addCallback(_stash_uri, "sick")
3364 d.addCallback(lambda ign:
3365 c0.upload(upload.Data(DATA+"2", convergence="")))
3366 d.addCallback(_stash_uri, "dead")
3367 def _stash_mutable_uri(n, which):
3368 self.uris[which] = n.get_uri()
3369 assert isinstance(self.uris[which], str)
3370 d.addCallback(lambda ign: c0.create_mutable_file(DATA+"3"))
3371 d.addCallback(_stash_mutable_uri, "corrupt")
3373 def _compute_fileurls(ignored):
3375 for which in self.uris:
3376 self.fileurls[which] = "uri/" + urllib.quote(self.uris[which])
3377 d.addCallback(_compute_fileurls)
3379 def _clobber_shares(ignored):
3380 good_shares = self.find_uri_shares(self.uris["good"])
3381 self.failUnlessReallyEqual(len(good_shares), 10)
3382 sick_shares = self.find_uri_shares(self.uris["sick"])
3383 os.unlink(sick_shares[0][2])
3384 dead_shares = self.find_uri_shares(self.uris["dead"])
3385 for i in range(1, 10):
3386 os.unlink(dead_shares[i][2])
3387 c_shares = self.find_uri_shares(self.uris["corrupt"])
3388 cso = CorruptShareOptions()
3389 cso.stdout = StringIO()
3390 cso.parseOptions([c_shares[0][2]])
3392 d.addCallback(_clobber_shares)
3394 d.addCallback(self.CHECK, "good", "t=check&repair=true")
3395 def _got_html_good(res):
3396 self.failUnless("Healthy" in res, res)
3397 self.failIf("Not Healthy" in res, res)
3398 self.failUnless("No repair necessary" in res, res)
3399 d.addCallback(_got_html_good)
3401 d.addCallback(self.CHECK, "sick", "t=check&repair=true")
3402 def _got_html_sick(res):
3403 self.failUnless("Healthy : healthy" in res, res)
3404 self.failIf("Not Healthy" in res, res)
3405 self.failUnless("Repair successful" in res, res)
3406 d.addCallback(_got_html_sick)
3408 # repair of a dead file will fail, of course, but it isn't yet
3409 # clear how this should be reported. Right now it shows up as
3412 #d.addCallback(self.CHECK, "dead", "t=check&repair=true")
3413 #def _got_html_dead(res):
3415 # self.failUnless("Healthy : healthy" in res, res)
3416 # self.failIf("Not Healthy" in res, res)
3417 # self.failUnless("No repair necessary" in res, res)
3418 #d.addCallback(_got_html_dead)
3420 d.addCallback(self.CHECK, "corrupt", "t=check&verify=true&repair=true")
3421 def _got_html_corrupt(res):
3422 self.failUnless("Healthy : Healthy" in res, res)
3423 self.failIf("Not Healthy" in res, res)
3424 self.failUnless("Repair successful" in res, res)
3425 d.addCallback(_got_html_corrupt)
3427 d.addErrback(self.explain_web_error)
3430 def test_repair_json(self):
3431 self.basedir = "web/Grid/repair_json"
3433 c0 = self.g.clients[0]
3436 d = c0.upload(upload.Data(DATA+"1", convergence=""))
3437 def _stash_uri(ur, which):
3438 self.uris[which] = ur.uri
3439 d.addCallback(_stash_uri, "sick")
3441 def _compute_fileurls(ignored):
3443 for which in self.uris:
3444 self.fileurls[which] = "uri/" + urllib.quote(self.uris[which])
3445 d.addCallback(_compute_fileurls)
3447 def _clobber_shares(ignored):
3448 sick_shares = self.find_uri_shares(self.uris["sick"])
3449 os.unlink(sick_shares[0][2])
3450 d.addCallback(_clobber_shares)
3452 d.addCallback(self.CHECK, "sick", "t=check&repair=true&output=json")
3453 def _got_json_sick(res):
3454 r = simplejson.loads(res)
3455 self.failUnlessReallyEqual(r["repair-attempted"], True)
3456 self.failUnlessReallyEqual(r["repair-successful"], True)
3457 self.failUnlessEqual(r["pre-repair-results"]["summary"],
3458 "Not Healthy: 9 shares (enc 3-of-10)")
3459 self.failIf(r["pre-repair-results"]["results"]["healthy"])
3460 self.failUnlessEqual(r["post-repair-results"]["summary"], "healthy")
3461 self.failUnless(r["post-repair-results"]["results"]["healthy"])
3462 d.addCallback(_got_json_sick)
3464 d.addErrback(self.explain_web_error)
3467 def test_unknown(self, immutable=False):
3468 self.basedir = "web/Grid/unknown"
3470 self.basedir = "web/Grid/unknown-immutable"
3473 c0 = self.g.clients[0]
3477 # the future cap format may contain slashes, which must be tolerated
3478 expected_info_url = "uri/%s?t=info" % urllib.quote(unknown_rwcap,
3482 name = u"future-imm"
3483 future_node = UnknownNode(None, unknown_immcap, deep_immutable=True)
3484 d = c0.create_immutable_dirnode({name: (future_node, {})})
3487 future_node = UnknownNode(unknown_rwcap, unknown_rocap)
3488 d = c0.create_dirnode()
3490 def _stash_root_and_create_file(n):
3492 self.rooturl = "uri/" + urllib.quote(n.get_uri()) + "/"
3493 self.rourl = "uri/" + urllib.quote(n.get_readonly_uri()) + "/"
3495 return self.rootnode.set_node(name, future_node)
3496 d.addCallback(_stash_root_and_create_file)
3498 # make sure directory listing tolerates unknown nodes
3499 d.addCallback(lambda ign: self.GET(self.rooturl))
3500 def _check_directory_html(res, expected_type_suffix):
3501 pattern = re.compile(r'<td>\?%s</td>[ \t\n\r]*'
3502 '<td>%s</td>' % (expected_type_suffix, str(name)),
3504 self.failUnless(re.search(pattern, res), res)
3505 # find the More Info link for name, should be relative
3506 mo = re.search(r'<a href="([^"]+)">More Info</a>', res)
3507 info_url = mo.group(1)
3508 self.failUnlessReallyEqual(info_url, "%s?t=info" % (str(name),))
3510 d.addCallback(_check_directory_html, "-IMM")
3512 d.addCallback(_check_directory_html, "")
3514 d.addCallback(lambda ign: self.GET(self.rooturl+"?t=json"))
3515 def _check_directory_json(res, expect_rw_uri):
3516 data = simplejson.loads(res)
3517 self.failUnlessEqual(data[0], "dirnode")
3518 f = data[1]["children"][name]
3519 self.failUnlessEqual(f[0], "unknown")
3521 self.failUnlessReallyEqual(to_str(f[1]["rw_uri"]), unknown_rwcap, data)
3523 self.failIfIn("rw_uri", f[1])
3525 self.failUnlessReallyEqual(to_str(f[1]["ro_uri"]), unknown_immcap, data)
3527 self.failUnlessReallyEqual(to_str(f[1]["ro_uri"]), unknown_rocap, data)
3528 self.failUnless("metadata" in f[1])
3529 d.addCallback(_check_directory_json, expect_rw_uri=not immutable)
3531 def _check_info(res, expect_rw_uri, expect_ro_uri):
3532 self.failUnlessIn("Object Type: <span>unknown</span>", res)
3534 self.failUnlessIn(unknown_rwcap, res)
3537 self.failUnlessIn(unknown_immcap, res)
3539 self.failUnlessIn(unknown_rocap, res)
3541 self.failIfIn(unknown_rocap, res)
3542 self.failIfIn("Raw data as", res)
3543 self.failIfIn("Directory writecap", res)
3544 self.failIfIn("Checker Operations", res)
3545 self.failIfIn("Mutable File Operations", res)
3546 self.failIfIn("Directory Operations", res)
3548 # FIXME: these should have expect_rw_uri=not immutable; I don't know
3549 # why they fail. Possibly related to ticket #922.
3551 d.addCallback(lambda ign: self.GET(expected_info_url))
3552 d.addCallback(_check_info, expect_rw_uri=False, expect_ro_uri=False)
3553 d.addCallback(lambda ign: self.GET("%s%s?t=info" % (self.rooturl, str(name))))
3554 d.addCallback(_check_info, expect_rw_uri=False, expect_ro_uri=True)
3556 def _check_json(res, expect_rw_uri):
3557 data = simplejson.loads(res)
3558 self.failUnlessEqual(data[0], "unknown")
3560 self.failUnlessReallyEqual(to_str(data[1]["rw_uri"]), unknown_rwcap, data)
3562 self.failIfIn("rw_uri", data[1])
3565 self.failUnlessReallyEqual(to_str(data[1]["ro_uri"]), unknown_immcap, data)
3566 self.failUnlessReallyEqual(data[1]["mutable"], False)
3568 self.failUnlessReallyEqual(to_str(data[1]["ro_uri"]), unknown_rocap, data)
3569 self.failUnlessReallyEqual(data[1]["mutable"], True)
3571 self.failUnlessReallyEqual(to_str(data[1]["ro_uri"]), unknown_rocap, data)
3572 self.failIf("mutable" in data[1], data[1])
3574 # TODO: check metadata contents
3575 self.failUnless("metadata" in data[1])
3577 d.addCallback(lambda ign: self.GET("%s%s?t=json" % (self.rooturl, str(name))))
3578 d.addCallback(_check_json, expect_rw_uri=not immutable)
3580 # and make sure that a read-only version of the directory can be
3581 # rendered too. This version will not have unknown_rwcap, whether
3582 # or not future_node was immutable.
3583 d.addCallback(lambda ign: self.GET(self.rourl))
3585 d.addCallback(_check_directory_html, "-IMM")
3587 d.addCallback(_check_directory_html, "-RO")
3589 d.addCallback(lambda ign: self.GET(self.rourl+"?t=json"))
3590 d.addCallback(_check_directory_json, expect_rw_uri=False)
3592 d.addCallback(lambda ign: self.GET("%s%s?t=json" % (self.rourl, str(name))))
3593 d.addCallback(_check_json, expect_rw_uri=False)
3595 # TODO: check that getting t=info from the Info link in the ro directory
3596 # works, and does not include the writecap URI.
3599 def test_immutable_unknown(self):
3600 return self.test_unknown(immutable=True)
3602 def test_mutant_dirnodes_are_omitted(self):
3603 self.basedir = "web/Grid/mutant_dirnodes_are_omitted"
3606 c = self.g.clients[0]
3611 lonely_uri = "URI:LIT:n5xgk" # LIT for "one"
3612 mut_write_uri = "URI:SSK:vfvcbdfbszyrsaxchgevhmmlii:euw4iw7bbnkrrwpzuburbhppuxhc3gwxv26f6imekhz7zyw2ojnq"
3613 mut_read_uri = "URI:SSK-RO:e3mdrzfwhoq42hy5ubcz6rp3o4:ybyibhnp3vvwuq2vaw2ckjmesgkklfs6ghxleztqidihjyofgw7q"
3615 # This method tests mainly dirnode, but we'd have to duplicate code in order to
3616 # test the dirnode and web layers separately.
3618 # 'lonely' is a valid LIT child, 'ro' is a mutant child with an SSK-RO readcap,
3619 # and 'write-in-ro' is a mutant child with an SSK writecap in the ro_uri field.
3620 # When the directory is read, the mutants should be silently disposed of, leaving
3621 # their lonely sibling.
3622 # We don't test the case of a retrieving a cap from the encrypted rw_uri field,
3623 # because immutable directories don't have a writecap and therefore that field
3624 # isn't (and can't be) decrypted.
3625 # TODO: The field still exists in the netstring. Technically we should check what
3626 # happens if something is put there (_unpack_contents should raise ValueError),
3627 # but that can wait.
3629 lonely_child = nm.create_from_cap(lonely_uri)
3630 mutant_ro_child = nm.create_from_cap(mut_read_uri)
3631 mutant_write_in_ro_child = nm.create_from_cap(mut_write_uri)
3633 def _by_hook_or_by_crook():
3635 for n in [mutant_ro_child, mutant_write_in_ro_child]:
3636 n.is_allowed_in_immutable_directory = _by_hook_or_by_crook
3638 mutant_write_in_ro_child.get_write_uri = lambda: None
3639 mutant_write_in_ro_child.get_readonly_uri = lambda: mut_write_uri
3641 kids = {u"lonely": (lonely_child, {}),
3642 u"ro": (mutant_ro_child, {}),
3643 u"write-in-ro": (mutant_write_in_ro_child, {}),
3645 d = c.create_immutable_dirnode(kids)
3648 self.failUnless(isinstance(dn, dirnode.DirectoryNode))
3649 self.failIf(dn.is_mutable())
3650 self.failUnless(dn.is_readonly())
3651 # This checks that if we somehow ended up calling dn._decrypt_rwcapdata, it would fail.
3652 self.failIf(hasattr(dn._node, 'get_writekey'))
3654 self.failUnless("RO-IMM" in rep)
3656 self.failUnlessIn("CHK", cap.to_string())
3659 self.rooturl = "uri/" + urllib.quote(dn.get_uri()) + "/"
3660 return download_to_data(dn._node)
3661 d.addCallback(_created)
3663 def _check_data(data):
3664 # Decode the netstring representation of the directory to check that all children
3665 # are present. This is a bit of an abstraction violation, but there's not really
3666 # any other way to do it given that the real DirectoryNode._unpack_contents would
3667 # strip the mutant children out (which is what we're trying to test, later).
3670 while position < len(data):
3671 entries, position = split_netstring(data, 1, position)
3673 (name_utf8, ro_uri, rwcapdata, metadata_s), subpos = split_netstring(entry, 4)
3674 name = name_utf8.decode("utf-8")
3675 self.failUnless(rwcapdata == "")
3676 self.failUnless(name in kids)
3677 (expected_child, ign) = kids[name]
3678 self.failUnlessReallyEqual(ro_uri, expected_child.get_readonly_uri())
3681 self.failUnlessReallyEqual(numkids, 3)
3682 return self.rootnode.list()
3683 d.addCallback(_check_data)
3685 # Now when we use the real directory listing code, the mutants should be absent.
3686 def _check_kids(children):
3687 self.failUnlessReallyEqual(sorted(children.keys()), [u"lonely"])
3688 lonely_node, lonely_metadata = children[u"lonely"]
3690 self.failUnlessReallyEqual(lonely_node.get_write_uri(), None)
3691 self.failUnlessReallyEqual(lonely_node.get_readonly_uri(), lonely_uri)
3692 d.addCallback(_check_kids)
3694 d.addCallback(lambda ign: nm.create_from_cap(self.cap.to_string()))
3695 d.addCallback(lambda n: n.list())
3696 d.addCallback(_check_kids) # again with dirnode recreated from cap
3698 # Make sure the lonely child can be listed in HTML...
3699 d.addCallback(lambda ign: self.GET(self.rooturl))
3700 def _check_html(res):
3701 self.failIfIn("URI:SSK", res)
3702 get_lonely = "".join([r'<td>FILE</td>',
3704 r'<a href="[^"]+%s[^"]+">lonely</a>' % (urllib.quote(lonely_uri),),
3706 r'\s+<td>%d</td>' % len("one"),
3708 self.failUnless(re.search(get_lonely, res), res)
3710 # find the More Info link for name, should be relative
3711 mo = re.search(r'<a href="([^"]+)">More Info</a>', res)
3712 info_url = mo.group(1)
3713 self.failUnless(info_url.endswith(urllib.quote(lonely_uri) + "?t=info"), info_url)
3714 d.addCallback(_check_html)
3717 d.addCallback(lambda ign: self.GET(self.rooturl+"?t=json"))
3718 def _check_json(res):
3719 data = simplejson.loads(res)
3720 self.failUnlessEqual(data[0], "dirnode")
3721 listed_children = data[1]["children"]
3722 self.failUnlessReallyEqual(sorted(listed_children.keys()), [u"lonely"])
3723 ll_type, ll_data = listed_children[u"lonely"]
3724 self.failUnlessEqual(ll_type, "filenode")
3725 self.failIf("rw_uri" in ll_data)
3726 self.failUnlessReallyEqual(to_str(ll_data["ro_uri"]), lonely_uri)
3727 d.addCallback(_check_json)
3730 def test_deep_check(self):
3731 self.basedir = "web/Grid/deep_check"
3733 c0 = self.g.clients[0]
3737 d = c0.create_dirnode()
3738 def _stash_root_and_create_file(n):
3740 self.fileurls["root"] = "uri/" + urllib.quote(n.get_uri()) + "/"
3741 return n.add_file(u"good", upload.Data(DATA, convergence=""))
3742 d.addCallback(_stash_root_and_create_file)
3743 def _stash_uri(fn, which):
3744 self.uris[which] = fn.get_uri()
3746 d.addCallback(_stash_uri, "good")
3747 d.addCallback(lambda ign:
3748 self.rootnode.add_file(u"small",
3749 upload.Data("literal",
3751 d.addCallback(_stash_uri, "small")
3752 d.addCallback(lambda ign:
3753 self.rootnode.add_file(u"sick",
3754 upload.Data(DATA+"1",
3756 d.addCallback(_stash_uri, "sick")
3758 # this tests that deep-check and stream-manifest will ignore
3759 # UnknownNode instances. Hopefully this will also cover deep-stats.
3760 future_node = UnknownNode(unknown_rwcap, unknown_rocap)
3761 d.addCallback(lambda ign: self.rootnode.set_node(u"future", future_node))
3763 def _clobber_shares(ignored):
3764 self.delete_shares_numbered(self.uris["sick"], [0,1])
3765 d.addCallback(_clobber_shares)
3773 d.addCallback(self.CHECK, "root", "t=stream-deep-check")
3776 units = [simplejson.loads(line)
3777 for line in res.splitlines()
3780 print "response is:", res
3781 print "undecodeable line was '%s'" % line
3783 self.failUnlessReallyEqual(len(units), 5+1)
3784 # should be parent-first
3786 self.failUnlessEqual(u0["path"], [])
3787 self.failUnlessEqual(u0["type"], "directory")
3788 self.failUnlessReallyEqual(to_str(u0["cap"]), self.rootnode.get_uri())
3789 u0cr = u0["check-results"]
3790 self.failUnlessReallyEqual(u0cr["results"]["count-shares-good"], 10)
3792 ugood = [u for u in units
3793 if u["type"] == "file" and u["path"] == [u"good"]][0]
3794 self.failUnlessReallyEqual(to_str(ugood["cap"]), self.uris["good"])
3795 ugoodcr = ugood["check-results"]
3796 self.failUnlessReallyEqual(ugoodcr["results"]["count-shares-good"], 10)
3799 self.failUnlessEqual(stats["type"], "stats")
3801 self.failUnlessReallyEqual(s["count-immutable-files"], 2)
3802 self.failUnlessReallyEqual(s["count-literal-files"], 1)
3803 self.failUnlessReallyEqual(s["count-directories"], 1)
3804 self.failUnlessReallyEqual(s["count-unknown"], 1)
3805 d.addCallback(_done)
3807 d.addCallback(self.CHECK, "root", "t=stream-manifest")
3808 def _check_manifest(res):
3809 self.failUnless(res.endswith("\n"))
3810 units = [simplejson.loads(t) for t in res[:-1].split("\n")]
3811 self.failUnlessReallyEqual(len(units), 5+1)
3812 self.failUnlessEqual(units[-1]["type"], "stats")
3814 self.failUnlessEqual(first["path"], [])
3815 self.failUnlessEqual(to_str(first["cap"]), self.rootnode.get_uri())
3816 self.failUnlessEqual(first["type"], "directory")
3817 stats = units[-1]["stats"]
3818 self.failUnlessReallyEqual(stats["count-immutable-files"], 2)
3819 self.failUnlessReallyEqual(stats["count-literal-files"], 1)
3820 self.failUnlessReallyEqual(stats["count-mutable-files"], 0)
3821 self.failUnlessReallyEqual(stats["count-immutable-files"], 2)
3822 self.failUnlessReallyEqual(stats["count-unknown"], 1)
3823 d.addCallback(_check_manifest)
3825 # now add root/subdir and root/subdir/grandchild, then make subdir
3826 # unrecoverable, then see what happens
3828 d.addCallback(lambda ign:
3829 self.rootnode.create_subdirectory(u"subdir"))
3830 d.addCallback(_stash_uri, "subdir")
3831 d.addCallback(lambda subdir_node:
3832 subdir_node.add_file(u"grandchild",
3833 upload.Data(DATA+"2",
3835 d.addCallback(_stash_uri, "grandchild")
3837 d.addCallback(lambda ign:
3838 self.delete_shares_numbered(self.uris["subdir"],
3846 # root/subdir [unrecoverable]
3847 # root/subdir/grandchild
3849 # how should a streaming-JSON API indicate fatal error?
3850 # answer: emit ERROR: instead of a JSON string
3852 d.addCallback(self.CHECK, "root", "t=stream-manifest")
3853 def _check_broken_manifest(res):
3854 lines = res.splitlines()
3856 for (i,line) in enumerate(lines)
3857 if line.startswith("ERROR:")]
3859 self.fail("no ERROR: in output: %s" % (res,))
3860 first_error = error_lines[0]
3861 error_line = lines[first_error]
3862 error_msg = lines[first_error+1:]
3863 error_msg_s = "\n".join(error_msg) + "\n"
3864 self.failUnlessIn("ERROR: UnrecoverableFileError(no recoverable versions)",
3866 self.failUnless(len(error_msg) > 2, error_msg_s) # some traceback
3867 units = [simplejson.loads(line) for line in lines[:first_error]]
3868 self.failUnlessReallyEqual(len(units), 6) # includes subdir
3869 last_unit = units[-1]
3870 self.failUnlessEqual(last_unit["path"], ["subdir"])
3871 d.addCallback(_check_broken_manifest)
3873 d.addCallback(self.CHECK, "root", "t=stream-deep-check")
3874 def _check_broken_deepcheck(res):
3875 lines = res.splitlines()
3877 for (i,line) in enumerate(lines)
3878 if line.startswith("ERROR:")]
3880 self.fail("no ERROR: in output: %s" % (res,))
3881 first_error = error_lines[0]
3882 error_line = lines[first_error]
3883 error_msg = lines[first_error+1:]
3884 error_msg_s = "\n".join(error_msg) + "\n"
3885 self.failUnlessIn("ERROR: UnrecoverableFileError(no recoverable versions)",
3887 self.failUnless(len(error_msg) > 2, error_msg_s) # some traceback
3888 units = [simplejson.loads(line) for line in lines[:first_error]]
3889 self.failUnlessReallyEqual(len(units), 6) # includes subdir
3890 last_unit = units[-1]
3891 self.failUnlessEqual(last_unit["path"], ["subdir"])
3892 r = last_unit["check-results"]["results"]
3893 self.failUnlessReallyEqual(r["count-recoverable-versions"], 0)
3894 self.failUnlessReallyEqual(r["count-shares-good"], 1)
3895 self.failUnlessReallyEqual(r["recoverable"], False)
3896 d.addCallback(_check_broken_deepcheck)
3898 d.addErrback(self.explain_web_error)
3901 def test_deep_check_and_repair(self):
3902 self.basedir = "web/Grid/deep_check_and_repair"
3904 c0 = self.g.clients[0]
3908 d = c0.create_dirnode()
3909 def _stash_root_and_create_file(n):
3911 self.fileurls["root"] = "uri/" + urllib.quote(n.get_uri()) + "/"
3912 return n.add_file(u"good", upload.Data(DATA, convergence=""))
3913 d.addCallback(_stash_root_and_create_file)
3914 def _stash_uri(fn, which):
3915 self.uris[which] = fn.get_uri()
3916 d.addCallback(_stash_uri, "good")
3917 d.addCallback(lambda ign:
3918 self.rootnode.add_file(u"small",
3919 upload.Data("literal",
3921 d.addCallback(_stash_uri, "small")
3922 d.addCallback(lambda ign:
3923 self.rootnode.add_file(u"sick",
3924 upload.Data(DATA+"1",
3926 d.addCallback(_stash_uri, "sick")
3927 #d.addCallback(lambda ign:
3928 # self.rootnode.add_file(u"dead",
3929 # upload.Data(DATA+"2",
3931 #d.addCallback(_stash_uri, "dead")
3933 #d.addCallback(lambda ign: c0.create_mutable_file("mutable"))
3934 #d.addCallback(lambda fn: self.rootnode.set_node(u"corrupt", fn))
3935 #d.addCallback(_stash_uri, "corrupt")
3937 def _clobber_shares(ignored):
3938 good_shares = self.find_uri_shares(self.uris["good"])
3939 self.failUnlessReallyEqual(len(good_shares), 10)
3940 sick_shares = self.find_uri_shares(self.uris["sick"])
3941 os.unlink(sick_shares[0][2])
3942 #dead_shares = self.find_uri_shares(self.uris["dead"])
3943 #for i in range(1, 10):
3944 # os.unlink(dead_shares[i][2])
3946 #c_shares = self.find_uri_shares(self.uris["corrupt"])
3947 #cso = CorruptShareOptions()
3948 #cso.stdout = StringIO()
3949 #cso.parseOptions([c_shares[0][2]])
3951 d.addCallback(_clobber_shares)
3954 # root/good CHK, 10 shares
3956 # root/sick CHK, 9 shares
3958 d.addCallback(self.CHECK, "root", "t=stream-deep-check&repair=true")
3960 units = [simplejson.loads(line)
3961 for line in res.splitlines()
3963 self.failUnlessReallyEqual(len(units), 4+1)
3964 # should be parent-first
3966 self.failUnlessEqual(u0["path"], [])
3967 self.failUnlessEqual(u0["type"], "directory")
3968 self.failUnlessReallyEqual(to_str(u0["cap"]), self.rootnode.get_uri())
3969 u0crr = u0["check-and-repair-results"]
3970 self.failUnlessReallyEqual(u0crr["repair-attempted"], False)
3971 self.failUnlessReallyEqual(u0crr["pre-repair-results"]["results"]["count-shares-good"], 10)
3973 ugood = [u for u in units
3974 if u["type"] == "file" and u["path"] == [u"good"]][0]
3975 self.failUnlessEqual(to_str(ugood["cap"]), self.uris["good"])
3976 ugoodcrr = ugood["check-and-repair-results"]
3977 self.failUnlessReallyEqual(ugoodcrr["repair-attempted"], False)
3978 self.failUnlessReallyEqual(ugoodcrr["pre-repair-results"]["results"]["count-shares-good"], 10)
3980 usick = [u for u in units
3981 if u["type"] == "file" and u["path"] == [u"sick"]][0]
3982 self.failUnlessReallyEqual(to_str(usick["cap"]), self.uris["sick"])
3983 usickcrr = usick["check-and-repair-results"]
3984 self.failUnlessReallyEqual(usickcrr["repair-attempted"], True)
3985 self.failUnlessReallyEqual(usickcrr["repair-successful"], True)
3986 self.failUnlessReallyEqual(usickcrr["pre-repair-results"]["results"]["count-shares-good"], 9)
3987 self.failUnlessReallyEqual(usickcrr["post-repair-results"]["results"]["count-shares-good"], 10)
3990 self.failUnlessEqual(stats["type"], "stats")
3992 self.failUnlessReallyEqual(s["count-immutable-files"], 2)
3993 self.failUnlessReallyEqual(s["count-literal-files"], 1)
3994 self.failUnlessReallyEqual(s["count-directories"], 1)
3995 d.addCallback(_done)
3997 d.addErrback(self.explain_web_error)
4000 def _count_leases(self, ignored, which):
4001 u = self.uris[which]
4002 shares = self.find_uri_shares(u)
4004 for shnum, serverid, fn in shares:
4005 sf = get_share_file(fn)
4006 num_leases = len(list(sf.get_leases()))
4007 lease_counts.append( (fn, num_leases) )
4010 def _assert_leasecount(self, lease_counts, expected):
4011 for (fn, num_leases) in lease_counts:
4012 if num_leases != expected:
4013 self.fail("expected %d leases, have %d, on %s" %
4014 (expected, num_leases, fn))
4016 def test_add_lease(self):
4017 self.basedir = "web/Grid/add_lease"
4018 self.set_up_grid(num_clients=2)
4019 c0 = self.g.clients[0]
4022 d = c0.upload(upload.Data(DATA, convergence=""))
4023 def _stash_uri(ur, which):
4024 self.uris[which] = ur.uri
4025 d.addCallback(_stash_uri, "one")
4026 d.addCallback(lambda ign:
4027 c0.upload(upload.Data(DATA+"1", convergence="")))
4028 d.addCallback(_stash_uri, "two")
4029 def _stash_mutable_uri(n, which):
4030 self.uris[which] = n.get_uri()
4031 assert isinstance(self.uris[which], str)
4032 d.addCallback(lambda ign: c0.create_mutable_file(DATA+"2"))
4033 d.addCallback(_stash_mutable_uri, "mutable")
4035 def _compute_fileurls(ignored):
4037 for which in self.uris:
4038 self.fileurls[which] = "uri/" + urllib.quote(self.uris[which])
4039 d.addCallback(_compute_fileurls)
4041 d.addCallback(self._count_leases, "one")
4042 d.addCallback(self._assert_leasecount, 1)
4043 d.addCallback(self._count_leases, "two")
4044 d.addCallback(self._assert_leasecount, 1)
4045 d.addCallback(self._count_leases, "mutable")
4046 d.addCallback(self._assert_leasecount, 1)
4048 d.addCallback(self.CHECK, "one", "t=check") # no add-lease
4049 def _got_html_good(res):
4050 self.failUnless("Healthy" in res, res)
4051 self.failIf("Not Healthy" in res, res)
4052 d.addCallback(_got_html_good)
4054 d.addCallback(self._count_leases, "one")
4055 d.addCallback(self._assert_leasecount, 1)
4056 d.addCallback(self._count_leases, "two")
4057 d.addCallback(self._assert_leasecount, 1)
4058 d.addCallback(self._count_leases, "mutable")
4059 d.addCallback(self._assert_leasecount, 1)
4061 # this CHECK uses the original client, which uses the same
4062 # lease-secrets, so it will just renew the original lease
4063 d.addCallback(self.CHECK, "one", "t=check&add-lease=true")
4064 d.addCallback(_got_html_good)
4066 d.addCallback(self._count_leases, "one")
4067 d.addCallback(self._assert_leasecount, 1)
4068 d.addCallback(self._count_leases, "two")
4069 d.addCallback(self._assert_leasecount, 1)
4070 d.addCallback(self._count_leases, "mutable")
4071 d.addCallback(self._assert_leasecount, 1)
4073 # this CHECK uses an alternate client, which adds a second lease
4074 d.addCallback(self.CHECK, "one", "t=check&add-lease=true", clientnum=1)
4075 d.addCallback(_got_html_good)
4077 d.addCallback(self._count_leases, "one")
4078 d.addCallback(self._assert_leasecount, 2)
4079 d.addCallback(self._count_leases, "two")
4080 d.addCallback(self._assert_leasecount, 1)
4081 d.addCallback(self._count_leases, "mutable")
4082 d.addCallback(self._assert_leasecount, 1)
4084 d.addCallback(self.CHECK, "mutable", "t=check&add-lease=true")
4085 d.addCallback(_got_html_good)
4087 d.addCallback(self._count_leases, "one")
4088 d.addCallback(self._assert_leasecount, 2)
4089 d.addCallback(self._count_leases, "two")
4090 d.addCallback(self._assert_leasecount, 1)
4091 d.addCallback(self._count_leases, "mutable")
4092 d.addCallback(self._assert_leasecount, 1)
4094 d.addCallback(self.CHECK, "mutable", "t=check&add-lease=true",
4096 d.addCallback(_got_html_good)
4098 d.addCallback(self._count_leases, "one")
4099 d.addCallback(self._assert_leasecount, 2)
4100 d.addCallback(self._count_leases, "two")
4101 d.addCallback(self._assert_leasecount, 1)
4102 d.addCallback(self._count_leases, "mutable")
4103 d.addCallback(self._assert_leasecount, 2)
4105 d.addErrback(self.explain_web_error)
4108 def test_deep_add_lease(self):
4109 self.basedir = "web/Grid/deep_add_lease"
4110 self.set_up_grid(num_clients=2)
4111 c0 = self.g.clients[0]
4115 d = c0.create_dirnode()
4116 def _stash_root_and_create_file(n):
4118 self.uris["root"] = n.get_uri()
4119 self.fileurls["root"] = "uri/" + urllib.quote(n.get_uri()) + "/"
4120 return n.add_file(u"one", upload.Data(DATA, convergence=""))
4121 d.addCallback(_stash_root_and_create_file)
4122 def _stash_uri(fn, which):
4123 self.uris[which] = fn.get_uri()
4124 d.addCallback(_stash_uri, "one")
4125 d.addCallback(lambda ign:
4126 self.rootnode.add_file(u"small",
4127 upload.Data("literal",
4129 d.addCallback(_stash_uri, "small")
4131 d.addCallback(lambda ign: c0.create_mutable_file("mutable"))
4132 d.addCallback(lambda fn: self.rootnode.set_node(u"mutable", fn))
4133 d.addCallback(_stash_uri, "mutable")
4135 d.addCallback(self.CHECK, "root", "t=stream-deep-check") # no add-lease
4137 units = [simplejson.loads(line)
4138 for line in res.splitlines()
4140 # root, one, small, mutable, stats
4141 self.failUnlessReallyEqual(len(units), 4+1)
4142 d.addCallback(_done)
4144 d.addCallback(self._count_leases, "root")
4145 d.addCallback(self._assert_leasecount, 1)
4146 d.addCallback(self._count_leases, "one")
4147 d.addCallback(self._assert_leasecount, 1)
4148 d.addCallback(self._count_leases, "mutable")
4149 d.addCallback(self._assert_leasecount, 1)
4151 d.addCallback(self.CHECK, "root", "t=stream-deep-check&add-lease=true")
4152 d.addCallback(_done)
4154 d.addCallback(self._count_leases, "root")
4155 d.addCallback(self._assert_leasecount, 1)
4156 d.addCallback(self._count_leases, "one")
4157 d.addCallback(self._assert_leasecount, 1)
4158 d.addCallback(self._count_leases, "mutable")
4159 d.addCallback(self._assert_leasecount, 1)
4161 d.addCallback(self.CHECK, "root", "t=stream-deep-check&add-lease=true",
4163 d.addCallback(_done)
4165 d.addCallback(self._count_leases, "root")
4166 d.addCallback(self._assert_leasecount, 2)
4167 d.addCallback(self._count_leases, "one")
4168 d.addCallback(self._assert_leasecount, 2)
4169 d.addCallback(self._count_leases, "mutable")
4170 d.addCallback(self._assert_leasecount, 2)
4172 d.addErrback(self.explain_web_error)
4176 def test_exceptions(self):
4177 self.basedir = "web/Grid/exceptions"
4178 self.set_up_grid(num_clients=1, num_servers=2)
4179 c0 = self.g.clients[0]
4180 c0.DEFAULT_ENCODING_PARAMETERS['happy'] = 2
4183 d = c0.create_dirnode()
4185 self.fileurls["root"] = "uri/" + urllib.quote(n.get_uri()) + "/"
4186 self.fileurls["imaginary"] = self.fileurls["root"] + "imaginary"
4188 d.addCallback(_stash_root)
4189 d.addCallback(lambda ign: c0.upload(upload.Data(DATA, convergence="")))
4191 self.fileurls["1share"] = "uri/" + urllib.quote(ur.uri)
4192 self.delete_shares_numbered(ur.uri, range(1,10))
4194 u = uri.from_string(ur.uri)
4195 u.key = testutil.flip_bit(u.key, 0)
4196 baduri = u.to_string()
4197 self.fileurls["0shares"] = "uri/" + urllib.quote(baduri)
4198 d.addCallback(_stash_bad)
4199 d.addCallback(lambda ign: c0.create_dirnode())
4200 def _mangle_dirnode_1share(n):
4202 url = self.fileurls["dir-1share"] = "uri/" + urllib.quote(u) + "/"
4203 self.fileurls["dir-1share-json"] = url + "?t=json"
4204 self.delete_shares_numbered(u, range(1,10))
4205 d.addCallback(_mangle_dirnode_1share)
4206 d.addCallback(lambda ign: c0.create_dirnode())
4207 def _mangle_dirnode_0share(n):
4209 url = self.fileurls["dir-0share"] = "uri/" + urllib.quote(u) + "/"
4210 self.fileurls["dir-0share-json"] = url + "?t=json"
4211 self.delete_shares_numbered(u, range(0,10))
4212 d.addCallback(_mangle_dirnode_0share)
4214 # NotEnoughSharesError should be reported sensibly, with a
4215 # text/plain explanation of the problem, and perhaps some
4216 # information on which shares *could* be found.
4218 d.addCallback(lambda ignored:
4219 self.shouldHTTPError("GET unrecoverable",
4220 410, "Gone", "NoSharesError",
4221 self.GET, self.fileurls["0shares"]))
4222 def _check_zero_shares(body):
4223 self.failIf("<html>" in body, body)
4224 body = " ".join(body.strip().split())
4225 exp = ("NoSharesError: no shares could be found. "
4226 "Zero shares usually indicates a corrupt URI, or that "
4227 "no servers were connected, but it might also indicate "
4228 "severe corruption. You should perform a filecheck on "
4229 "this object to learn more. The full error message is: "
4230 "no shares (need 3). Last failure: None")
4231 self.failUnlessReallyEqual(exp, body)
4232 d.addCallback(_check_zero_shares)
4235 d.addCallback(lambda ignored:
4236 self.shouldHTTPError("GET 1share",
4237 410, "Gone", "NotEnoughSharesError",
4238 self.GET, self.fileurls["1share"]))
4239 def _check_one_share(body):
4240 self.failIf("<html>" in body, body)
4241 body = " ".join(body.strip().split())
4242 msg = ("NotEnoughSharesError: This indicates that some "
4243 "servers were unavailable, or that shares have been "
4244 "lost to server departure, hard drive failure, or disk "
4245 "corruption. You should perform a filecheck on "
4246 "this object to learn more. The full error message is:"
4247 " ran out of shares: %d complete, %d pending, 0 overdue,"
4248 " 0 unused, need 3. Last failure: None")
4251 self.failUnless(body == msg1 or body == msg2, body)
4252 d.addCallback(_check_one_share)
4254 d.addCallback(lambda ignored:
4255 self.shouldHTTPError("GET imaginary",
4256 404, "Not Found", None,
4257 self.GET, self.fileurls["imaginary"]))
4258 def _missing_child(body):
4259 self.failUnless("No such child: imaginary" in body, body)
4260 d.addCallback(_missing_child)
4262 d.addCallback(lambda ignored: self.GET(self.fileurls["dir-0share"]))
4263 def _check_0shares_dir_html(body):
4264 self.failUnless("<html>" in body, body)
4265 # we should see the regular page, but without the child table or
4267 body = " ".join(body.strip().split())
4268 self.failUnlessIn('href="?t=info">More info on this directory',
4270 exp = ("UnrecoverableFileError: the directory (or mutable file) "
4271 "could not be retrieved, because there were insufficient "
4272 "good shares. This might indicate that no servers were "
4273 "connected, insufficient servers were connected, the URI "
4274 "was corrupt, or that shares have been lost due to server "
4275 "departure, hard drive failure, or disk corruption. You "
4276 "should perform a filecheck on this object to learn more.")
4277 self.failUnlessIn(exp, body)
4278 self.failUnlessIn("No upload forms: directory is unreadable", body)
4279 d.addCallback(_check_0shares_dir_html)
4281 d.addCallback(lambda ignored: self.GET(self.fileurls["dir-1share"]))
4282 def _check_1shares_dir_html(body):
4283 # at some point, we'll split UnrecoverableFileError into 0-shares
4284 # and some-shares like we did for immutable files (since there
4285 # are different sorts of advice to offer in each case). For now,
4286 # they present the same way.
4287 self.failUnless("<html>" in body, body)
4288 body = " ".join(body.strip().split())
4289 self.failUnlessIn('href="?t=info">More info on this directory',
4291 exp = ("UnrecoverableFileError: the directory (or mutable file) "
4292 "could not be retrieved, because there were insufficient "
4293 "good shares. This might indicate that no servers were "
4294 "connected, insufficient servers were connected, the URI "
4295 "was corrupt, or that shares have been lost due to server "
4296 "departure, hard drive failure, or disk corruption. You "
4297 "should perform a filecheck on this object to learn more.")
4298 self.failUnlessIn(exp, body)
4299 self.failUnlessIn("No upload forms: directory is unreadable", body)
4300 d.addCallback(_check_1shares_dir_html)
4302 d.addCallback(lambda ignored:
4303 self.shouldHTTPError("GET dir-0share-json",
4304 410, "Gone", "UnrecoverableFileError",
4306 self.fileurls["dir-0share-json"]))
4307 def _check_unrecoverable_file(body):
4308 self.failIf("<html>" in body, body)
4309 body = " ".join(body.strip().split())
4310 exp = ("UnrecoverableFileError: the directory (or mutable file) "
4311 "could not be retrieved, because there were insufficient "
4312 "good shares. This might indicate that no servers were "
4313 "connected, insufficient servers were connected, the URI "
4314 "was corrupt, or that shares have been lost due to server "
4315 "departure, hard drive failure, or disk corruption. You "
4316 "should perform a filecheck on this object to learn more.")
4317 self.failUnlessReallyEqual(exp, body)
4318 d.addCallback(_check_unrecoverable_file)
4320 d.addCallback(lambda ignored:
4321 self.shouldHTTPError("GET dir-1share-json",
4322 410, "Gone", "UnrecoverableFileError",
4324 self.fileurls["dir-1share-json"]))
4325 d.addCallback(_check_unrecoverable_file)
4327 d.addCallback(lambda ignored:
4328 self.shouldHTTPError("GET imaginary",
4329 404, "Not Found", None,
4330 self.GET, self.fileurls["imaginary"]))
4332 # attach a webapi child that throws a random error, to test how it
4334 w = c0.getServiceNamed("webish")
4335 w.root.putChild("ERRORBOOM", ErrorBoom())
4337 # "Accept: */*" : should get a text/html stack trace
4338 # "Accept: text/plain" : should get a text/plain stack trace
4339 # "Accept: text/plain, application/octet-stream" : text/plain (CLI)
4340 # no Accept header: should get a text/html stack trace
4342 d.addCallback(lambda ignored:
4343 self.shouldHTTPError("GET errorboom_html",
4344 500, "Internal Server Error", None,
4345 self.GET, "ERRORBOOM",
4346 headers={"accept": ["*/*"]}))
4347 def _internal_error_html1(body):
4348 self.failUnless("<html>" in body, "expected HTML, not '%s'" % body)
4349 d.addCallback(_internal_error_html1)
4351 d.addCallback(lambda ignored:
4352 self.shouldHTTPError("GET errorboom_text",
4353 500, "Internal Server Error", None,
4354 self.GET, "ERRORBOOM",
4355 headers={"accept": ["text/plain"]}))
4356 def _internal_error_text2(body):
4357 self.failIf("<html>" in body, body)
4358 self.failUnless(body.startswith("Traceback "), body)
4359 d.addCallback(_internal_error_text2)
4361 CLI_accepts = "text/plain, application/octet-stream"
4362 d.addCallback(lambda ignored:
4363 self.shouldHTTPError("GET errorboom_text",
4364 500, "Internal Server Error", None,
4365 self.GET, "ERRORBOOM",
4366 headers={"accept": [CLI_accepts]}))
4367 def _internal_error_text3(body):
4368 self.failIf("<html>" in body, body)
4369 self.failUnless(body.startswith("Traceback "), body)
4370 d.addCallback(_internal_error_text3)
4372 d.addCallback(lambda ignored:
4373 self.shouldHTTPError("GET errorboom_text",
4374 500, "Internal Server Error", None,
4375 self.GET, "ERRORBOOM"))
4376 def _internal_error_html4(body):
4377 self.failUnless("<html>" in body, "expected HTML, not '%s'" % body)
4378 d.addCallback(_internal_error_html4)
4380 def _flush_errors(res):
4381 # Trial: please ignore the CompletelyUnhandledError in the logs
4382 self.flushLoggedErrors(CompletelyUnhandledError)
4384 d.addBoth(_flush_errors)
4388 class CompletelyUnhandledError(Exception):
4390 class ErrorBoom(rend.Page):
4391 def beforeRender(self, ctx):
4392 raise CompletelyUnhandledError("whoops")