2 import os.path, re, urllib, time
4 from StringIO import StringIO
5 from twisted.application import service
6 from twisted.trial import unittest
7 from twisted.internet import defer, reactor
8 from twisted.internet.task import Clock
9 from twisted.web import client, error, http
10 from twisted.python import failure, log
11 from nevow import rend
12 from allmydata import interfaces, uri, webish, dirnode
13 from allmydata.storage.shares import get_share_file
14 from allmydata.storage_client import StorageFarmBroker
15 from allmydata.immutable import upload
16 from allmydata.immutable.downloader.status import DownloadStatus
17 from allmydata.dirnode import DirectoryNode
18 from allmydata.nodemaker import NodeMaker
19 from allmydata.unknown import UnknownNode
20 from allmydata.web import status, common
21 from allmydata.scripts.debug import CorruptShareOptions, corrupt_share
22 from allmydata.util import fileutil, base32
23 from allmydata.util.consumer import download_to_data
24 from allmydata.util.netstring import split_netstring
25 from allmydata.util.encodingutil import to_str
26 from allmydata.test.common import FakeCHKFileNode, FakeMutableFileNode, \
27 create_chk_filenode, WebErrorMixin, ShouldFailMixin, make_mutable_file_uri
28 from allmydata.interfaces import IMutableFileNode
29 from allmydata.mutable import servermap, publish, retrieve
30 import allmydata.test.common_util as testutil
31 from allmydata.test.no_network import GridTestMixin
32 from allmydata.test.common_web import HTTPClientGETFactory, \
34 from allmydata.client import Client, SecretHolder
36 # create a fake uploader/downloader, and a couple of fake dirnodes, then
37 # create a webserver that works against them
39 timeout = 480 # Most of these take longer than 240 seconds on Francois's arm box.
41 unknown_rwcap = u"lafs://from_the_future_rw_\u263A".encode('utf-8')
42 unknown_rocap = u"ro.lafs://readonly_from_the_future_ro_\u263A".encode('utf-8')
43 unknown_immcap = u"imm.lafs://immutable_from_the_future_imm_\u263A".encode('utf-8')
45 class FakeStatsProvider:
47 stats = {'stats': {}, 'counters': {}}
50 class FakeNodeMaker(NodeMaker):
51 def _create_lit(self, cap):
52 return FakeCHKFileNode(cap)
53 def _create_immutable(self, cap):
54 return FakeCHKFileNode(cap)
55 def _create_mutable(self, cap):
56 return FakeMutableFileNode(None, None, None, None).init_from_cap(cap)
57 def create_mutable_file(self, contents="", keysize=None):
58 n = FakeMutableFileNode(None, None, None, None)
59 return n.create(contents)
61 class FakeUploader(service.Service):
63 def upload(self, uploadable, history=None):
64 d = uploadable.get_size()
65 d.addCallback(lambda size: uploadable.read(size))
68 n = create_chk_filenode(data)
69 results = upload.UploadResults()
70 results.uri = n.get_uri()
72 d.addCallback(_got_data)
74 def get_helper_info(self):
78 ds = DownloadStatus("storage_index", 1234)
81 ds.add_segment_request(0, now)
82 # segnum, when, start,len, decodetime
83 ds.add_segment_delivery(0, now+1, 0, 100, 0.5)
84 ds.add_segment_request(1, now+2)
85 ds.add_segment_error(1, now+3)
86 # two outstanding requests
87 ds.add_segment_request(2, now+4)
88 ds.add_segment_request(3, now+5)
90 # simulate a segment which gets delivered faster than a system clock tick (ticket #1166)
91 ds.add_segment_request(4, now)
92 ds.add_segment_delivery(4, now, 0, 140, 0.5)
94 e = ds.add_dyhb_sent("serverid_a", now)
95 e.finished([1,2], now+1)
96 e = ds.add_dyhb_sent("serverid_b", now+2) # left unfinished
98 e = ds.add_read_event(0, 120, now)
99 e.update(60, 0.5, 0.1) # bytes, decrypttime, pausetime
101 e = ds.add_read_event(120, 30, now+2) # left unfinished
103 e = ds.add_request_sent("serverid_a", 1, 100, 20, now)
104 e.finished(20, now+1)
105 e = ds.add_request_sent("serverid_a", 1, 120, 30, now+1) # left unfinished
107 # make sure that add_read_event() can come first too
108 ds1 = DownloadStatus("storage_index", 1234)
109 e = ds1.add_read_event(0, 120, now)
110 e.update(60, 0.5, 0.1) # bytes, decrypttime, pausetime
116 _all_upload_status = [upload.UploadStatus()]
117 _all_download_status = [build_one_ds()]
118 _all_mapupdate_statuses = [servermap.UpdateStatus()]
119 _all_publish_statuses = [publish.PublishStatus()]
120 _all_retrieve_statuses = [retrieve.RetrieveStatus()]
122 def list_all_upload_statuses(self):
123 return self._all_upload_status
124 def list_all_download_statuses(self):
125 return self._all_download_status
126 def list_all_mapupdate_statuses(self):
127 return self._all_mapupdate_statuses
128 def list_all_publish_statuses(self):
129 return self._all_publish_statuses
130 def list_all_retrieve_statuses(self):
131 return self._all_retrieve_statuses
132 def list_all_helper_statuses(self):
135 class FakeClient(Client):
137 # don't upcall to Client.__init__, since we only want to initialize a
139 service.MultiService.__init__(self)
140 self.nodeid = "fake_nodeid"
141 self.nickname = "fake_nickname"
142 self.introducer_furl = "None"
143 self.stats_provider = FakeStatsProvider()
144 self._secret_holder = SecretHolder("lease secret", "convergence secret")
146 self.convergence = "some random string"
147 self.storage_broker = StorageFarmBroker(None, permute_peers=True)
148 self.introducer_client = None
149 self.history = FakeHistory()
150 self.uploader = FakeUploader()
151 self.uploader.setServiceParent(self)
152 self.nodemaker = FakeNodeMaker(None, self._secret_holder, None,
156 def startService(self):
157 return service.MultiService.startService(self)
158 def stopService(self):
159 return service.MultiService.stopService(self)
161 MUTABLE_SIZELIMIT = FakeMutableFileNode.MUTABLE_SIZELIMIT
163 class WebMixin(object):
165 self.s = FakeClient()
166 self.s.startService()
167 self.staticdir = self.mktemp()
169 self.ws = webish.WebishServer(self.s, "0", staticdir=self.staticdir,
171 self.ws.setServiceParent(self.s)
172 self.webish_port = self.ws.getPortnum()
173 self.webish_url = self.ws.getURL()
174 assert self.webish_url.endswith("/")
175 self.webish_url = self.webish_url[:-1] # these tests add their own /
177 l = [ self.s.create_dirnode() for x in range(6) ]
178 d = defer.DeferredList(l)
180 self.public_root = res[0][1]
181 assert interfaces.IDirectoryNode.providedBy(self.public_root), res
182 self.public_url = "/uri/" + self.public_root.get_uri()
183 self.private_root = res[1][1]
187 self._foo_uri = foo.get_uri()
188 self._foo_readonly_uri = foo.get_readonly_uri()
189 self._foo_verifycap = foo.get_verify_cap().to_string()
190 # NOTE: we ignore the deferred on all set_uri() calls, because we
191 # know the fake nodes do these synchronously
192 self.public_root.set_uri(u"foo", foo.get_uri(),
193 foo.get_readonly_uri())
195 self.BAR_CONTENTS, n, self._bar_txt_uri = self.makefile(0)
196 foo.set_uri(u"bar.txt", self._bar_txt_uri, self._bar_txt_uri)
197 self._bar_txt_verifycap = n.get_verify_cap().to_string()
199 foo.set_uri(u"empty", res[3][1].get_uri(),
200 res[3][1].get_readonly_uri())
201 sub_uri = res[4][1].get_uri()
202 self._sub_uri = sub_uri
203 foo.set_uri(u"sub", sub_uri, sub_uri)
204 sub = self.s.create_node_from_uri(sub_uri)
206 _ign, n, blocking_uri = self.makefile(1)
207 foo.set_uri(u"blockingfile", blocking_uri, blocking_uri)
209 unicode_filename = u"n\u00fc.txt" # n u-umlaut . t x t
210 # ok, unicode calls it LATIN SMALL LETTER U WITH DIAERESIS but I
211 # still think of it as an umlaut
212 foo.set_uri(unicode_filename, self._bar_txt_uri, self._bar_txt_uri)
214 _ign, n, baz_file = self.makefile(2)
215 self._baz_file_uri = baz_file
216 sub.set_uri(u"baz.txt", baz_file, baz_file)
218 _ign, n, self._bad_file_uri = self.makefile(3)
219 # this uri should not be downloadable
220 del FakeCHKFileNode.all_contents[self._bad_file_uri]
223 self.public_root.set_uri(u"reedownlee", rodir.get_readonly_uri(),
224 rodir.get_readonly_uri())
225 rodir.set_uri(u"nor", baz_file, baz_file)
230 # public/foo/blockingfile
233 # public/foo/sub/baz.txt
235 # public/reedownlee/nor
236 self.NEWFILE_CONTENTS = "newfile contents\n"
238 return foo.get_metadata_for(u"bar.txt")
240 def _got_metadata(metadata):
241 self._bar_txt_metadata = metadata
242 d.addCallback(_got_metadata)
245 def makefile(self, number):
246 contents = "contents of file %s\n" % number
247 n = create_chk_filenode(contents)
248 return contents, n, n.get_uri()
251 return self.s.stopService()
253 def failUnlessIsBarDotTxt(self, res):
254 self.failUnlessReallyEqual(res, self.BAR_CONTENTS, res)
256 def failUnlessIsBarJSON(self, res):
257 data = simplejson.loads(res)
258 self.failUnless(isinstance(data, list))
259 self.failUnlessEqual(data[0], "filenode")
260 self.failUnless(isinstance(data[1], dict))
261 self.failIf(data[1]["mutable"])
262 self.failIf("rw_uri" in data[1]) # immutable
263 self.failUnlessReallyEqual(to_str(data[1]["ro_uri"]), self._bar_txt_uri)
264 self.failUnlessReallyEqual(to_str(data[1]["verify_uri"]), self._bar_txt_verifycap)
265 self.failUnlessReallyEqual(data[1]["size"], len(self.BAR_CONTENTS))
267 def failUnlessIsFooJSON(self, res):
268 data = simplejson.loads(res)
269 self.failUnless(isinstance(data, list))
270 self.failUnlessEqual(data[0], "dirnode", res)
271 self.failUnless(isinstance(data[1], dict))
272 self.failUnless(data[1]["mutable"])
273 self.failUnless("rw_uri" in data[1]) # mutable
274 self.failUnlessReallyEqual(to_str(data[1]["rw_uri"]), self._foo_uri)
275 self.failUnlessReallyEqual(to_str(data[1]["ro_uri"]), self._foo_readonly_uri)
276 self.failUnlessReallyEqual(to_str(data[1]["verify_uri"]), self._foo_verifycap)
278 kidnames = sorted([unicode(n) for n in data[1]["children"]])
279 self.failUnlessEqual(kidnames,
280 [u"bar.txt", u"blockingfile", u"empty",
281 u"n\u00fc.txt", u"sub"])
282 kids = dict( [(unicode(name),value)
284 in data[1]["children"].iteritems()] )
285 self.failUnlessEqual(kids[u"sub"][0], "dirnode")
286 self.failUnlessIn("metadata", kids[u"sub"][1])
287 self.failUnlessIn("tahoe", kids[u"sub"][1]["metadata"])
288 tahoe_md = kids[u"sub"][1]["metadata"]["tahoe"]
289 self.failUnlessIn("linkcrtime", tahoe_md)
290 self.failUnlessIn("linkmotime", tahoe_md)
291 self.failUnlessEqual(kids[u"bar.txt"][0], "filenode")
292 self.failUnlessReallyEqual(kids[u"bar.txt"][1]["size"], len(self.BAR_CONTENTS))
293 self.failUnlessReallyEqual(to_str(kids[u"bar.txt"][1]["ro_uri"]), self._bar_txt_uri)
294 self.failUnlessReallyEqual(to_str(kids[u"bar.txt"][1]["verify_uri"]),
295 self._bar_txt_verifycap)
296 self.failUnlessIn("metadata", kids[u"bar.txt"][1])
297 self.failUnlessIn("tahoe", kids[u"bar.txt"][1]["metadata"])
298 self.failUnlessReallyEqual(kids[u"bar.txt"][1]["metadata"]["tahoe"]["linkcrtime"],
299 self._bar_txt_metadata["tahoe"]["linkcrtime"])
300 self.failUnlessReallyEqual(to_str(kids[u"n\u00fc.txt"][1]["ro_uri"]),
303 def GET(self, urlpath, followRedirect=False, return_response=False,
305 # if return_response=True, this fires with (data, statuscode,
306 # respheaders) instead of just data.
307 assert not isinstance(urlpath, unicode)
308 url = self.webish_url + urlpath
309 factory = HTTPClientGETFactory(url, method="GET",
310 followRedirect=followRedirect, **kwargs)
311 reactor.connectTCP("localhost", self.webish_port, factory)
314 return (data, factory.status, factory.response_headers)
316 d.addCallback(_got_data)
317 return factory.deferred
319 def HEAD(self, urlpath, return_response=False, **kwargs):
320 # this requires some surgery, because twisted.web.client doesn't want
321 # to give us back the response headers.
322 factory = HTTPClientHEADFactory(urlpath, method="HEAD", **kwargs)
323 reactor.connectTCP("localhost", self.webish_port, factory)
326 return (data, factory.status, factory.response_headers)
328 d.addCallback(_got_data)
329 return factory.deferred
331 def PUT(self, urlpath, data, **kwargs):
332 url = self.webish_url + urlpath
333 return client.getPage(url, method="PUT", postdata=data, **kwargs)
335 def DELETE(self, urlpath):
336 url = self.webish_url + urlpath
337 return client.getPage(url, method="DELETE")
339 def POST(self, urlpath, followRedirect=False, **fields):
340 sepbase = "boogabooga"
344 form.append('Content-Disposition: form-data; name="_charset"')
348 for name, value in fields.iteritems():
349 if isinstance(value, tuple):
350 filename, value = value
351 form.append('Content-Disposition: form-data; name="%s"; '
352 'filename="%s"' % (name, filename.encode("utf-8")))
354 form.append('Content-Disposition: form-data; name="%s"' % name)
356 if isinstance(value, unicode):
357 value = value.encode("utf-8")
360 assert isinstance(value, str)
367 body = "\r\n".join(form) + "\r\n"
368 headers["content-type"] = "multipart/form-data; boundary=%s" % sepbase
369 return self.POST2(urlpath, body, headers, followRedirect)
371 def POST2(self, urlpath, body="", headers={}, followRedirect=False):
372 url = self.webish_url + urlpath
373 return client.getPage(url, method="POST", postdata=body,
374 headers=headers, followRedirect=followRedirect)
376 def shouldFail(self, res, expected_failure, which,
377 substring=None, response_substring=None):
378 if isinstance(res, failure.Failure):
379 res.trap(expected_failure)
381 self.failUnless(substring in str(res),
382 "substring '%s' not in '%s'"
383 % (substring, str(res)))
384 if response_substring:
385 self.failUnless(response_substring in res.value.response,
386 "response substring '%s' not in '%s'"
387 % (response_substring, res.value.response))
389 self.fail("%s was supposed to raise %s, not get '%s'" %
390 (which, expected_failure, res))
392 def shouldFail2(self, expected_failure, which, substring,
394 callable, *args, **kwargs):
395 assert substring is None or isinstance(substring, str)
396 assert response_substring is None or isinstance(response_substring, str)
397 d = defer.maybeDeferred(callable, *args, **kwargs)
399 if isinstance(res, failure.Failure):
400 res.trap(expected_failure)
402 self.failUnless(substring in str(res),
403 "%s: substring '%s' not in '%s'"
404 % (which, substring, str(res)))
405 if response_substring:
406 self.failUnless(response_substring in res.value.response,
407 "%s: response substring '%s' not in '%s'"
409 response_substring, res.value.response))
411 self.fail("%s was supposed to raise %s, not get '%s'" %
412 (which, expected_failure, res))
416 def should404(self, res, which):
417 if isinstance(res, failure.Failure):
418 res.trap(error.Error)
419 self.failUnlessReallyEqual(res.value.status, "404")
421 self.fail("%s was supposed to Error(404), not get '%s'" %
424 def should302(self, res, which):
425 if isinstance(res, failure.Failure):
426 res.trap(error.Error)
427 self.failUnlessReallyEqual(res.value.status, "302")
429 self.fail("%s was supposed to Error(302), not get '%s'" %
433 class Web(WebMixin, WebErrorMixin, testutil.StallMixin, testutil.ReallyEqualMixin, unittest.TestCase):
434 def test_create(self):
437 def test_welcome(self):
440 self.failUnless('Welcome To Tahoe-LAFS' in res, res)
442 self.s.basedir = 'web/test_welcome'
443 fileutil.make_dirs("web/test_welcome")
444 fileutil.make_dirs("web/test_welcome/private")
446 d.addCallback(_check)
449 def test_provisioning(self):
450 d = self.GET("/provisioning/")
452 self.failUnless('Provisioning Tool' in res)
453 fields = {'filled': True,
454 "num_users": int(50e3),
455 "files_per_user": 1000,
456 "space_per_user": int(1e9),
457 "sharing_ratio": 1.0,
458 "encoding_parameters": "3-of-10-5",
460 "ownership_mode": "A",
461 "download_rate": 100,
466 return self.POST("/provisioning/", **fields)
468 d.addCallback(_check)
470 self.failUnless('Provisioning Tool' in res)
471 self.failUnless("Share space consumed: 167.01TB" in res)
473 fields = {'filled': True,
474 "num_users": int(50e6),
475 "files_per_user": 1000,
476 "space_per_user": int(5e9),
477 "sharing_ratio": 1.0,
478 "encoding_parameters": "25-of-100-50",
479 "num_servers": 30000,
480 "ownership_mode": "E",
481 "drive_failure_model": "U",
483 "download_rate": 1000,
488 return self.POST("/provisioning/", **fields)
489 d.addCallback(_check2)
491 self.failUnless("Share space consumed: huge!" in res)
492 fields = {'filled': True}
493 return self.POST("/provisioning/", **fields)
494 d.addCallback(_check3)
496 self.failUnless("Share space consumed:" in res)
497 d.addCallback(_check4)
500 def test_reliability_tool(self):
502 from allmydata import reliability
503 _hush_pyflakes = reliability
506 raise unittest.SkipTest("reliability tool requires NumPy")
508 d = self.GET("/reliability/")
510 self.failUnless('Reliability Tool' in res)
511 fields = {'drive_lifetime': "8Y",
516 "check_period": "1M",
517 "report_period": "3M",
520 return self.POST("/reliability/", **fields)
522 d.addCallback(_check)
524 self.failUnless('Reliability Tool' in res)
525 r = r'Probability of loss \(no maintenance\):\s+<span>0.033591'
526 self.failUnless(re.search(r, res), res)
527 d.addCallback(_check2)
530 def test_status(self):
531 h = self.s.get_history()
532 dl_num = h.list_all_download_statuses()[0].get_counter()
533 ul_num = h.list_all_upload_statuses()[0].get_counter()
534 mu_num = h.list_all_mapupdate_statuses()[0].get_counter()
535 pub_num = h.list_all_publish_statuses()[0].get_counter()
536 ret_num = h.list_all_retrieve_statuses()[0].get_counter()
537 d = self.GET("/status", followRedirect=True)
539 self.failUnless('Upload and Download Status' in res, res)
540 self.failUnless('"down-%d"' % dl_num in res, res)
541 self.failUnless('"up-%d"' % ul_num in res, res)
542 self.failUnless('"mapupdate-%d"' % mu_num in res, res)
543 self.failUnless('"publish-%d"' % pub_num in res, res)
544 self.failUnless('"retrieve-%d"' % ret_num in res, res)
545 d.addCallback(_check)
546 d.addCallback(lambda res: self.GET("/status/?t=json"))
547 def _check_json(res):
548 data = simplejson.loads(res)
549 self.failUnless(isinstance(data, dict))
550 #active = data["active"]
551 # TODO: test more. We need a way to fake an active operation
553 d.addCallback(_check_json)
555 d.addCallback(lambda res: self.GET("/status/down-%d" % dl_num))
557 self.failUnless("File Download Status" in res, res)
558 d.addCallback(_check_dl)
559 d.addCallback(lambda res: self.GET("/status/down-%d?t=json" % dl_num))
560 def _check_dl_json(res):
561 data = simplejson.loads(res)
562 self.failUnless(isinstance(data, dict))
563 d.addCallback(_check_dl_json)
564 d.addCallback(lambda res: self.GET("/status/up-%d" % ul_num))
566 self.failUnless("File Upload Status" in res, res)
567 d.addCallback(_check_ul)
568 d.addCallback(lambda res: self.GET("/status/mapupdate-%d" % mu_num))
569 def _check_mapupdate(res):
570 self.failUnless("Mutable File Servermap Update Status" in res, res)
571 d.addCallback(_check_mapupdate)
572 d.addCallback(lambda res: self.GET("/status/publish-%d" % pub_num))
573 def _check_publish(res):
574 self.failUnless("Mutable File Publish Status" in res, res)
575 d.addCallback(_check_publish)
576 d.addCallback(lambda res: self.GET("/status/retrieve-%d" % ret_num))
577 def _check_retrieve(res):
578 self.failUnless("Mutable File Retrieve Status" in res, res)
579 d.addCallback(_check_retrieve)
583 def test_status_numbers(self):
584 drrm = status.DownloadResultsRendererMixin()
585 self.failUnlessReallyEqual(drrm.render_time(None, None), "")
586 self.failUnlessReallyEqual(drrm.render_time(None, 2.5), "2.50s")
587 self.failUnlessReallyEqual(drrm.render_time(None, 0.25), "250ms")
588 self.failUnlessReallyEqual(drrm.render_time(None, 0.0021), "2.1ms")
589 self.failUnlessReallyEqual(drrm.render_time(None, 0.000123), "123us")
590 self.failUnlessReallyEqual(drrm.render_rate(None, None), "")
591 self.failUnlessReallyEqual(drrm.render_rate(None, 2500000), "2.50MBps")
592 self.failUnlessReallyEqual(drrm.render_rate(None, 30100), "30.1kBps")
593 self.failUnlessReallyEqual(drrm.render_rate(None, 123), "123Bps")
595 urrm = status.UploadResultsRendererMixin()
596 self.failUnlessReallyEqual(urrm.render_time(None, None), "")
597 self.failUnlessReallyEqual(urrm.render_time(None, 2.5), "2.50s")
598 self.failUnlessReallyEqual(urrm.render_time(None, 0.25), "250ms")
599 self.failUnlessReallyEqual(urrm.render_time(None, 0.0021), "2.1ms")
600 self.failUnlessReallyEqual(urrm.render_time(None, 0.000123), "123us")
601 self.failUnlessReallyEqual(urrm.render_rate(None, None), "")
602 self.failUnlessReallyEqual(urrm.render_rate(None, 2500000), "2.50MBps")
603 self.failUnlessReallyEqual(urrm.render_rate(None, 30100), "30.1kBps")
604 self.failUnlessReallyEqual(urrm.render_rate(None, 123), "123Bps")
606 def test_GET_FILEURL(self):
607 d = self.GET(self.public_url + "/foo/bar.txt")
608 d.addCallback(self.failUnlessIsBarDotTxt)
611 def test_GET_FILEURL_range(self):
612 headers = {"range": "bytes=1-10"}
613 d = self.GET(self.public_url + "/foo/bar.txt", headers=headers,
614 return_response=True)
615 def _got((res, status, headers)):
616 self.failUnlessReallyEqual(int(status), 206)
617 self.failUnless(headers.has_key("content-range"))
618 self.failUnlessReallyEqual(headers["content-range"][0],
619 "bytes 1-10/%d" % len(self.BAR_CONTENTS))
620 self.failUnlessReallyEqual(res, self.BAR_CONTENTS[1:11])
624 def test_GET_FILEURL_partial_range(self):
625 headers = {"range": "bytes=5-"}
626 length = len(self.BAR_CONTENTS)
627 d = self.GET(self.public_url + "/foo/bar.txt", headers=headers,
628 return_response=True)
629 def _got((res, status, headers)):
630 self.failUnlessReallyEqual(int(status), 206)
631 self.failUnless(headers.has_key("content-range"))
632 self.failUnlessReallyEqual(headers["content-range"][0],
633 "bytes 5-%d/%d" % (length-1, length))
634 self.failUnlessReallyEqual(res, self.BAR_CONTENTS[5:])
638 def test_GET_FILEURL_partial_end_range(self):
639 headers = {"range": "bytes=-5"}
640 length = len(self.BAR_CONTENTS)
641 d = self.GET(self.public_url + "/foo/bar.txt", headers=headers,
642 return_response=True)
643 def _got((res, status, headers)):
644 self.failUnlessReallyEqual(int(status), 206)
645 self.failUnless(headers.has_key("content-range"))
646 self.failUnlessReallyEqual(headers["content-range"][0],
647 "bytes %d-%d/%d" % (length-5, length-1, length))
648 self.failUnlessReallyEqual(res, self.BAR_CONTENTS[-5:])
652 def test_GET_FILEURL_partial_range_overrun(self):
653 headers = {"range": "bytes=100-200"}
654 d = self.shouldFail2(error.Error, "test_GET_FILEURL_range_overrun",
655 "416 Requested Range not satisfiable",
656 "First beyond end of file",
657 self.GET, self.public_url + "/foo/bar.txt",
661 def test_HEAD_FILEURL_range(self):
662 headers = {"range": "bytes=1-10"}
663 d = self.HEAD(self.public_url + "/foo/bar.txt", headers=headers,
664 return_response=True)
665 def _got((res, status, headers)):
666 self.failUnlessReallyEqual(res, "")
667 self.failUnlessReallyEqual(int(status), 206)
668 self.failUnless(headers.has_key("content-range"))
669 self.failUnlessReallyEqual(headers["content-range"][0],
670 "bytes 1-10/%d" % len(self.BAR_CONTENTS))
674 def test_HEAD_FILEURL_partial_range(self):
675 headers = {"range": "bytes=5-"}
676 length = len(self.BAR_CONTENTS)
677 d = self.HEAD(self.public_url + "/foo/bar.txt", headers=headers,
678 return_response=True)
679 def _got((res, status, headers)):
680 self.failUnlessReallyEqual(int(status), 206)
681 self.failUnless(headers.has_key("content-range"))
682 self.failUnlessReallyEqual(headers["content-range"][0],
683 "bytes 5-%d/%d" % (length-1, length))
687 def test_HEAD_FILEURL_partial_end_range(self):
688 headers = {"range": "bytes=-5"}
689 length = len(self.BAR_CONTENTS)
690 d = self.HEAD(self.public_url + "/foo/bar.txt", headers=headers,
691 return_response=True)
692 def _got((res, status, headers)):
693 self.failUnlessReallyEqual(int(status), 206)
694 self.failUnless(headers.has_key("content-range"))
695 self.failUnlessReallyEqual(headers["content-range"][0],
696 "bytes %d-%d/%d" % (length-5, length-1, length))
700 def test_HEAD_FILEURL_partial_range_overrun(self):
701 headers = {"range": "bytes=100-200"}
702 d = self.shouldFail2(error.Error, "test_HEAD_FILEURL_range_overrun",
703 "416 Requested Range not satisfiable",
705 self.HEAD, self.public_url + "/foo/bar.txt",
709 def test_GET_FILEURL_range_bad(self):
710 headers = {"range": "BOGUS=fizbop-quarnak"}
711 d = self.GET(self.public_url + "/foo/bar.txt", headers=headers,
712 return_response=True)
713 def _got((res, status, headers)):
714 self.failUnlessReallyEqual(int(status), 200)
715 self.failUnless(not headers.has_key("content-range"))
716 self.failUnlessReallyEqual(res, self.BAR_CONTENTS)
720 def test_HEAD_FILEURL(self):
721 d = self.HEAD(self.public_url + "/foo/bar.txt", return_response=True)
722 def _got((res, status, headers)):
723 self.failUnlessReallyEqual(res, "")
724 self.failUnlessReallyEqual(headers["content-length"][0],
725 str(len(self.BAR_CONTENTS)))
726 self.failUnlessReallyEqual(headers["content-type"], ["text/plain"])
730 def test_GET_FILEURL_named(self):
731 base = "/file/%s" % urllib.quote(self._bar_txt_uri)
732 base2 = "/named/%s" % urllib.quote(self._bar_txt_uri)
733 d = self.GET(base + "/@@name=/blah.txt")
734 d.addCallback(self.failUnlessIsBarDotTxt)
735 d.addCallback(lambda res: self.GET(base + "/blah.txt"))
736 d.addCallback(self.failUnlessIsBarDotTxt)
737 d.addCallback(lambda res: self.GET(base + "/ignore/lots/blah.txt"))
738 d.addCallback(self.failUnlessIsBarDotTxt)
739 d.addCallback(lambda res: self.GET(base2 + "/@@name=/blah.txt"))
740 d.addCallback(self.failUnlessIsBarDotTxt)
741 save_url = base + "?save=true&filename=blah.txt"
742 d.addCallback(lambda res: self.GET(save_url))
743 d.addCallback(self.failUnlessIsBarDotTxt) # TODO: check headers
744 u_filename = u"n\u00e9wer.txt" # n e-acute w e r . t x t
745 u_fn_e = urllib.quote(u_filename.encode("utf-8"))
746 u_url = base + "?save=true&filename=" + u_fn_e
747 d.addCallback(lambda res: self.GET(u_url))
748 d.addCallback(self.failUnlessIsBarDotTxt) # TODO: check headers
751 def test_PUT_FILEURL_named_bad(self):
752 base = "/file/%s" % urllib.quote(self._bar_txt_uri)
753 d = self.shouldFail2(error.Error, "test_PUT_FILEURL_named_bad",
755 "/file can only be used with GET or HEAD",
756 self.PUT, base + "/@@name=/blah.txt", "")
759 def test_GET_DIRURL_named_bad(self):
760 base = "/file/%s" % urllib.quote(self._foo_uri)
761 d = self.shouldFail2(error.Error, "test_PUT_DIRURL_named_bad",
764 self.GET, base + "/@@name=/blah.txt")
767 def test_GET_slash_file_bad(self):
768 d = self.shouldFail2(error.Error, "test_GET_slash_file_bad",
770 "/file must be followed by a file-cap and a name",
774 def test_GET_unhandled_URI_named(self):
775 contents, n, newuri = self.makefile(12)
776 verifier_cap = n.get_verify_cap().to_string()
777 base = "/file/%s" % urllib.quote(verifier_cap)
778 # client.create_node_from_uri() can't handle verify-caps
779 d = self.shouldFail2(error.Error, "GET_unhandled_URI_named",
780 "400 Bad Request", "is not a file-cap",
784 def test_GET_unhandled_URI(self):
785 contents, n, newuri = self.makefile(12)
786 verifier_cap = n.get_verify_cap().to_string()
787 base = "/uri/%s" % urllib.quote(verifier_cap)
788 # client.create_node_from_uri() can't handle verify-caps
789 d = self.shouldFail2(error.Error, "test_GET_unhandled_URI",
791 "GET unknown URI type: can only do t=info",
795 def test_GET_FILE_URI(self):
796 base = "/uri/%s" % urllib.quote(self._bar_txt_uri)
798 d.addCallback(self.failUnlessIsBarDotTxt)
801 def test_GET_FILE_URI_badchild(self):
802 base = "/uri/%s/boguschild" % urllib.quote(self._bar_txt_uri)
803 errmsg = "Files have no children, certainly not named 'boguschild'"
804 d = self.shouldFail2(error.Error, "test_GET_FILE_URI_badchild",
805 "400 Bad Request", errmsg,
809 def test_PUT_FILE_URI_badchild(self):
810 base = "/uri/%s/boguschild" % urllib.quote(self._bar_txt_uri)
811 errmsg = "Cannot create directory 'boguschild', because its parent is a file, not a directory"
812 d = self.shouldFail2(error.Error, "test_GET_FILE_URI_badchild",
813 "400 Bad Request", errmsg,
817 # TODO: version of this with a Unicode filename
818 def test_GET_FILEURL_save(self):
819 d = self.GET(self.public_url + "/foo/bar.txt?filename=bar.txt&save=true",
820 return_response=True)
821 def _got((res, statuscode, headers)):
822 content_disposition = headers["content-disposition"][0]
823 self.failUnless(content_disposition == 'attachment; filename="bar.txt"', content_disposition)
824 self.failUnlessIsBarDotTxt(res)
828 def test_GET_FILEURL_missing(self):
829 d = self.GET(self.public_url + "/foo/missing")
830 d.addBoth(self.should404, "test_GET_FILEURL_missing")
833 def test_PUT_overwrite_only_files(self):
834 # create a directory, put a file in that directory.
835 contents, n, filecap = self.makefile(8)
836 d = self.PUT(self.public_url + "/foo/dir?t=mkdir", "")
837 d.addCallback(lambda res:
838 self.PUT(self.public_url + "/foo/dir/file1.txt",
839 self.NEWFILE_CONTENTS))
840 # try to overwrite the file with replace=only-files
842 d.addCallback(lambda res:
843 self.PUT(self.public_url + "/foo/dir/file1.txt?t=uri&replace=only-files",
845 d.addCallback(lambda res:
846 self.shouldFail2(error.Error, "PUT_bad_t", "409 Conflict",
847 "There was already a child by that name, and you asked me "
849 self.PUT, self.public_url + "/foo/dir?t=uri&replace=only-files",
853 def test_PUT_NEWFILEURL(self):
854 d = self.PUT(self.public_url + "/foo/new.txt", self.NEWFILE_CONTENTS)
855 # TODO: we lose the response code, so we can't check this
856 #self.failUnlessReallyEqual(responsecode, 201)
857 d.addCallback(self.failUnlessURIMatchesROChild, self._foo_node, u"new.txt")
858 d.addCallback(lambda res:
859 self.failUnlessChildContentsAre(self._foo_node, u"new.txt",
860 self.NEWFILE_CONTENTS))
863 def test_PUT_NEWFILEURL_not_mutable(self):
864 d = self.PUT(self.public_url + "/foo/new.txt?mutable=false",
865 self.NEWFILE_CONTENTS)
866 # TODO: we lose the response code, so we can't check this
867 #self.failUnlessReallyEqual(responsecode, 201)
868 d.addCallback(self.failUnlessURIMatchesROChild, self._foo_node, u"new.txt")
869 d.addCallback(lambda res:
870 self.failUnlessChildContentsAre(self._foo_node, u"new.txt",
871 self.NEWFILE_CONTENTS))
874 def test_PUT_NEWFILEURL_range_bad(self):
875 headers = {"content-range": "bytes 1-10/%d" % len(self.NEWFILE_CONTENTS)}
876 target = self.public_url + "/foo/new.txt"
877 d = self.shouldFail2(error.Error, "test_PUT_NEWFILEURL_range_bad",
878 "501 Not Implemented",
879 "Content-Range in PUT not yet supported",
880 # (and certainly not for immutable files)
881 self.PUT, target, self.NEWFILE_CONTENTS[1:11],
883 d.addCallback(lambda res:
884 self.failIfNodeHasChild(self._foo_node, u"new.txt"))
887 def test_PUT_NEWFILEURL_mutable(self):
888 d = self.PUT(self.public_url + "/foo/new.txt?mutable=true",
889 self.NEWFILE_CONTENTS)
890 # TODO: we lose the response code, so we can't check this
891 #self.failUnlessReallyEqual(responsecode, 201)
893 u = uri.from_string_mutable_filenode(res)
894 self.failUnless(u.is_mutable())
895 self.failIf(u.is_readonly())
897 d.addCallback(_check_uri)
898 d.addCallback(self.failUnlessURIMatchesRWChild, self._foo_node, u"new.txt")
899 d.addCallback(lambda res:
900 self.failUnlessMutableChildContentsAre(self._foo_node,
902 self.NEWFILE_CONTENTS))
905 def test_PUT_NEWFILEURL_mutable_toobig(self):
906 d = self.shouldFail2(error.Error, "test_PUT_NEWFILEURL_mutable_toobig",
907 "413 Request Entity Too Large",
908 "SDMF is limited to one segment, and 10001 > 10000",
910 self.public_url + "/foo/new.txt?mutable=true",
911 "b" * (self.s.MUTABLE_SIZELIMIT+1))
914 def test_PUT_NEWFILEURL_replace(self):
915 d = self.PUT(self.public_url + "/foo/bar.txt", self.NEWFILE_CONTENTS)
916 # TODO: we lose the response code, so we can't check this
917 #self.failUnlessReallyEqual(responsecode, 200)
918 d.addCallback(self.failUnlessURIMatchesROChild, self._foo_node, u"bar.txt")
919 d.addCallback(lambda res:
920 self.failUnlessChildContentsAre(self._foo_node, u"bar.txt",
921 self.NEWFILE_CONTENTS))
924 def test_PUT_NEWFILEURL_bad_t(self):
925 d = self.shouldFail2(error.Error, "PUT_bad_t", "400 Bad Request",
926 "PUT to a file: bad t=bogus",
927 self.PUT, self.public_url + "/foo/bar.txt?t=bogus",
931 def test_PUT_NEWFILEURL_no_replace(self):
932 d = self.PUT(self.public_url + "/foo/bar.txt?replace=false",
933 self.NEWFILE_CONTENTS)
934 d.addBoth(self.shouldFail, error.Error, "PUT_NEWFILEURL_no_replace",
936 "There was already a child by that name, and you asked me "
940 def test_PUT_NEWFILEURL_mkdirs(self):
941 d = self.PUT(self.public_url + "/foo/newdir/new.txt", self.NEWFILE_CONTENTS)
943 d.addCallback(self.failUnlessURIMatchesROChild, fn, u"newdir/new.txt")
944 d.addCallback(lambda res: self.failIfNodeHasChild(fn, u"new.txt"))
945 d.addCallback(lambda res: self.failUnlessNodeHasChild(fn, u"newdir"))
946 d.addCallback(lambda res:
947 self.failUnlessChildContentsAre(fn, u"newdir/new.txt",
948 self.NEWFILE_CONTENTS))
951 def test_PUT_NEWFILEURL_blocked(self):
952 d = self.PUT(self.public_url + "/foo/blockingfile/new.txt",
953 self.NEWFILE_CONTENTS)
954 d.addBoth(self.shouldFail, error.Error, "PUT_NEWFILEURL_blocked",
956 "Unable to create directory 'blockingfile': a file was in the way")
959 def test_PUT_NEWFILEURL_emptyname(self):
960 # an empty pathname component (i.e. a double-slash) is disallowed
961 d = self.shouldFail2(error.Error, "test_PUT_NEWFILEURL_emptyname",
963 "The webapi does not allow empty pathname components",
964 self.PUT, self.public_url + "/foo//new.txt", "")
967 def test_DELETE_FILEURL(self):
968 d = self.DELETE(self.public_url + "/foo/bar.txt")
969 d.addCallback(lambda res:
970 self.failIfNodeHasChild(self._foo_node, u"bar.txt"))
973 def test_DELETE_FILEURL_missing(self):
974 d = self.DELETE(self.public_url + "/foo/missing")
975 d.addBoth(self.should404, "test_DELETE_FILEURL_missing")
978 def test_DELETE_FILEURL_missing2(self):
979 d = self.DELETE(self.public_url + "/missing/missing")
980 d.addBoth(self.should404, "test_DELETE_FILEURL_missing2")
983 def failUnlessHasBarDotTxtMetadata(self, res):
984 data = simplejson.loads(res)
985 self.failUnless(isinstance(data, list))
986 self.failUnlessIn("metadata", data[1])
987 self.failUnlessIn("tahoe", data[1]["metadata"])
988 self.failUnlessIn("linkcrtime", data[1]["metadata"]["tahoe"])
989 self.failUnlessIn("linkmotime", data[1]["metadata"]["tahoe"])
990 self.failUnlessReallyEqual(data[1]["metadata"]["tahoe"]["linkcrtime"],
991 self._bar_txt_metadata["tahoe"]["linkcrtime"])
993 def test_GET_FILEURL_json(self):
994 # twisted.web.http.parse_qs ignores any query args without an '=', so
995 # I can't do "GET /path?json", I have to do "GET /path/t=json"
996 # instead. This may make it tricky to emulate the S3 interface
998 d = self.GET(self.public_url + "/foo/bar.txt?t=json")
1000 self.failUnlessIsBarJSON(data)
1001 self.failUnlessHasBarDotTxtMetadata(data)
1003 d.addCallback(_check1)
1006 def test_GET_FILEURL_json_missing(self):
1007 d = self.GET(self.public_url + "/foo/missing?json")
1008 d.addBoth(self.should404, "test_GET_FILEURL_json_missing")
1011 def test_GET_FILEURL_uri(self):
1012 d = self.GET(self.public_url + "/foo/bar.txt?t=uri")
1014 self.failUnlessReallyEqual(res, self._bar_txt_uri)
1015 d.addCallback(_check)
1016 d.addCallback(lambda res:
1017 self.GET(self.public_url + "/foo/bar.txt?t=readonly-uri"))
1019 # for now, for files, uris and readonly-uris are the same
1020 self.failUnlessReallyEqual(res, self._bar_txt_uri)
1021 d.addCallback(_check2)
1024 def test_GET_FILEURL_badtype(self):
1025 d = self.shouldHTTPError("GET t=bogus", 400, "Bad Request",
1028 self.public_url + "/foo/bar.txt?t=bogus")
1031 def test_CSS_FILE(self):
1032 d = self.GET("/tahoe_css", followRedirect=True)
1034 CSS_STYLE=re.compile('toolbar\s{.+text-align:\scenter.+toolbar-item.+display:\sinline',re.DOTALL)
1035 self.failUnless(CSS_STYLE.search(res), res)
1036 d.addCallback(_check)
1039 def test_GET_FILEURL_uri_missing(self):
1040 d = self.GET(self.public_url + "/foo/missing?t=uri")
1041 d.addBoth(self.should404, "test_GET_FILEURL_uri_missing")
1044 def test_GET_DIRECTORY_html_banner(self):
1045 d = self.GET(self.public_url + "/foo", followRedirect=True)
1047 self.failUnlessIn('<div class="toolbar-item"><a href="../../..">Return to Welcome page</a></div>',res)
1048 d.addCallback(_check)
1051 def test_GET_DIRURL(self):
1052 # the addSlash means we get a redirect here
1053 # from /uri/$URI/foo/ , we need ../../../ to get back to the root
1055 d = self.GET(self.public_url + "/foo", followRedirect=True)
1057 self.failUnless(('<a href="%s">Return to Welcome page' % ROOT)
1059 # the FILE reference points to a URI, but it should end in bar.txt
1060 bar_url = ("%s/file/%s/@@named=/bar.txt" %
1061 (ROOT, urllib.quote(self._bar_txt_uri)))
1062 get_bar = "".join([r'<td>FILE</td>',
1064 r'<a href="%s">bar.txt</a>' % bar_url,
1066 r'\s+<td>%d</td>' % len(self.BAR_CONTENTS),
1068 self.failUnless(re.search(get_bar, res), res)
1069 for line in res.split("\n"):
1070 # find the line that contains the delete button for bar.txt
1071 if ("form action" in line and
1072 'value="delete"' in line and
1073 'value="bar.txt"' in line):
1074 # the form target should use a relative URL
1075 foo_url = urllib.quote("%s/uri/%s/" % (ROOT, self._foo_uri))
1076 self.failUnless(('action="%s"' % foo_url) in line, line)
1077 # and the when_done= should too
1078 #done_url = urllib.quote(???)
1079 #self.failUnless(('name="when_done" value="%s"' % done_url)
1083 self.fail("unable to find delete-bar.txt line", res)
1085 # the DIR reference just points to a URI
1086 sub_url = ("%s/uri/%s/" % (ROOT, urllib.quote(self._sub_uri)))
1087 get_sub = ((r'<td>DIR</td>')
1088 +r'\s+<td><a href="%s">sub</a></td>' % sub_url)
1089 self.failUnless(re.search(get_sub, res), res)
1090 d.addCallback(_check)
1092 # look at a readonly directory
1093 d.addCallback(lambda res:
1094 self.GET(self.public_url + "/reedownlee", followRedirect=True))
1096 self.failUnless("(read-only)" in res, res)
1097 self.failIf("Upload a file" in res, res)
1098 d.addCallback(_check2)
1100 # and at a directory that contains a readonly directory
1101 d.addCallback(lambda res:
1102 self.GET(self.public_url, followRedirect=True))
1104 self.failUnless(re.search('<td>DIR-RO</td>'
1105 r'\s+<td><a href="[\.\/]+/uri/URI%3ADIR2-RO%3A[^"]+">reedownlee</a></td>', res), res)
1106 d.addCallback(_check3)
1108 # and an empty directory
1109 d.addCallback(lambda res: self.GET(self.public_url + "/foo/empty/"))
1111 self.failUnless("directory is empty" in res, res)
1112 MKDIR_BUTTON_RE=re.compile('<input type="hidden" name="t" value="mkdir" />.*<legend class="freeform-form-label">Create a new directory in this directory</legend>.*<input type="submit" value="Create" />', re.I)
1113 self.failUnless(MKDIR_BUTTON_RE.search(res), res)
1114 d.addCallback(_check4)
1116 # and at a literal directory
1117 tiny_litdir_uri = "URI:DIR2-LIT:gqytunj2onug64tufqzdcosvkjetutcjkq5gw4tvm5vwszdgnz5hgyzufqydulbshj5x2lbm" # contains one child which is itself also LIT
1118 d.addCallback(lambda res:
1119 self.GET("/uri/" + tiny_litdir_uri + "/", followRedirect=True))
1121 self.failUnless('(immutable)' in res, res)
1122 self.failUnless(re.search('<td>FILE</td>'
1123 r'\s+<td><a href="[\.\/]+/file/URI%3ALIT%3Akrugkidfnzsc4/@@named=/short">short</a></td>', res), res)
1124 d.addCallback(_check5)
1127 def test_GET_DIRURL_badtype(self):
1128 d = self.shouldHTTPError("test_GET_DIRURL_badtype",
1132 self.public_url + "/foo?t=bogus")
1135 def test_GET_DIRURL_json(self):
1136 d = self.GET(self.public_url + "/foo?t=json")
1137 d.addCallback(self.failUnlessIsFooJSON)
1141 def test_POST_DIRURL_manifest_no_ophandle(self):
1142 d = self.shouldFail2(error.Error,
1143 "test_POST_DIRURL_manifest_no_ophandle",
1145 "slow operation requires ophandle=",
1146 self.POST, self.public_url, t="start-manifest")
1149 def test_POST_DIRURL_manifest(self):
1150 d = defer.succeed(None)
1151 def getman(ignored, output):
1152 d = self.POST(self.public_url + "/foo/?t=start-manifest&ophandle=125",
1153 followRedirect=True)
1154 d.addCallback(self.wait_for_operation, "125")
1155 d.addCallback(self.get_operation_results, "125", output)
1157 d.addCallback(getman, None)
1158 def _got_html(manifest):
1159 self.failUnless("Manifest of SI=" in manifest)
1160 self.failUnless("<td>sub</td>" in manifest)
1161 self.failUnless(self._sub_uri in manifest)
1162 self.failUnless("<td>sub/baz.txt</td>" in manifest)
1163 d.addCallback(_got_html)
1165 # both t=status and unadorned GET should be identical
1166 d.addCallback(lambda res: self.GET("/operations/125"))
1167 d.addCallback(_got_html)
1169 d.addCallback(getman, "html")
1170 d.addCallback(_got_html)
1171 d.addCallback(getman, "text")
1172 def _got_text(manifest):
1173 self.failUnless("\nsub " + self._sub_uri + "\n" in manifest)
1174 self.failUnless("\nsub/baz.txt URI:CHK:" in manifest)
1175 d.addCallback(_got_text)
1176 d.addCallback(getman, "JSON")
1178 data = res["manifest"]
1180 for (path_list, cap) in data:
1181 got[tuple(path_list)] = cap
1182 self.failUnlessReallyEqual(to_str(got[(u"sub",)]), self._sub_uri)
1183 self.failUnless((u"sub",u"baz.txt") in got)
1184 self.failUnless("finished" in res)
1185 self.failUnless("origin" in res)
1186 self.failUnless("storage-index" in res)
1187 self.failUnless("verifycaps" in res)
1188 self.failUnless("stats" in res)
1189 d.addCallback(_got_json)
1192 def test_POST_DIRURL_deepsize_no_ophandle(self):
1193 d = self.shouldFail2(error.Error,
1194 "test_POST_DIRURL_deepsize_no_ophandle",
1196 "slow operation requires ophandle=",
1197 self.POST, self.public_url, t="start-deep-size")
1200 def test_POST_DIRURL_deepsize(self):
1201 d = self.POST(self.public_url + "/foo/?t=start-deep-size&ophandle=126",
1202 followRedirect=True)
1203 d.addCallback(self.wait_for_operation, "126")
1204 d.addCallback(self.get_operation_results, "126", "json")
1205 def _got_json(data):
1206 self.failUnlessReallyEqual(data["finished"], True)
1208 self.failUnless(size > 1000)
1209 d.addCallback(_got_json)
1210 d.addCallback(self.get_operation_results, "126", "text")
1212 mo = re.search(r'^size: (\d+)$', res, re.M)
1213 self.failUnless(mo, res)
1214 size = int(mo.group(1))
1215 # with directories, the size varies.
1216 self.failUnless(size > 1000)
1217 d.addCallback(_got_text)
1220 def test_POST_DIRURL_deepstats_no_ophandle(self):
1221 d = self.shouldFail2(error.Error,
1222 "test_POST_DIRURL_deepstats_no_ophandle",
1224 "slow operation requires ophandle=",
1225 self.POST, self.public_url, t="start-deep-stats")
1228 def test_POST_DIRURL_deepstats(self):
1229 d = self.POST(self.public_url + "/foo/?t=start-deep-stats&ophandle=127",
1230 followRedirect=True)
1231 d.addCallback(self.wait_for_operation, "127")
1232 d.addCallback(self.get_operation_results, "127", "json")
1233 def _got_json(stats):
1234 expected = {"count-immutable-files": 3,
1235 "count-mutable-files": 0,
1236 "count-literal-files": 0,
1238 "count-directories": 3,
1239 "size-immutable-files": 57,
1240 "size-literal-files": 0,
1241 #"size-directories": 1912, # varies
1242 #"largest-directory": 1590,
1243 "largest-directory-children": 5,
1244 "largest-immutable-file": 19,
1246 for k,v in expected.iteritems():
1247 self.failUnlessReallyEqual(stats[k], v,
1248 "stats[%s] was %s, not %s" %
1250 self.failUnlessReallyEqual(stats["size-files-histogram"],
1252 d.addCallback(_got_json)
1255 def test_POST_DIRURL_stream_manifest(self):
1256 d = self.POST(self.public_url + "/foo/?t=stream-manifest")
1258 self.failUnless(res.endswith("\n"))
1259 units = [simplejson.loads(t) for t in res[:-1].split("\n")]
1260 self.failUnlessReallyEqual(len(units), 7)
1261 self.failUnlessEqual(units[-1]["type"], "stats")
1263 self.failUnlessEqual(first["path"], [])
1264 self.failUnlessReallyEqual(to_str(first["cap"]), self._foo_uri)
1265 self.failUnlessEqual(first["type"], "directory")
1266 baz = [u for u in units[:-1] if to_str(u["cap"]) == self._baz_file_uri][0]
1267 self.failUnlessEqual(baz["path"], ["sub", "baz.txt"])
1268 self.failIfEqual(baz["storage-index"], None)
1269 self.failIfEqual(baz["verifycap"], None)
1270 self.failIfEqual(baz["repaircap"], None)
1272 d.addCallback(_check)
1275 def test_GET_DIRURL_uri(self):
1276 d = self.GET(self.public_url + "/foo?t=uri")
1278 self.failUnlessReallyEqual(to_str(res), self._foo_uri)
1279 d.addCallback(_check)
1282 def test_GET_DIRURL_readonly_uri(self):
1283 d = self.GET(self.public_url + "/foo?t=readonly-uri")
1285 self.failUnlessReallyEqual(to_str(res), self._foo_readonly_uri)
1286 d.addCallback(_check)
1289 def test_PUT_NEWDIRURL(self):
1290 d = self.PUT(self.public_url + "/foo/newdir?t=mkdir", "")
1291 d.addCallback(lambda res:
1292 self.failUnlessNodeHasChild(self._foo_node, u"newdir"))
1293 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
1294 d.addCallback(self.failUnlessNodeKeysAre, [])
1297 def test_POST_NEWDIRURL(self):
1298 d = self.POST2(self.public_url + "/foo/newdir?t=mkdir", "")
1299 d.addCallback(lambda res:
1300 self.failUnlessNodeHasChild(self._foo_node, u"newdir"))
1301 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
1302 d.addCallback(self.failUnlessNodeKeysAre, [])
1305 def test_POST_NEWDIRURL_emptyname(self):
1306 # an empty pathname component (i.e. a double-slash) is disallowed
1307 d = self.shouldFail2(error.Error, "test_POST_NEWDIRURL_emptyname",
1309 "The webapi does not allow empty pathname components, i.e. a double slash",
1310 self.POST, self.public_url + "//?t=mkdir")
1313 def test_POST_NEWDIRURL_initial_children(self):
1314 (newkids, caps) = self._create_initial_children()
1315 d = self.POST2(self.public_url + "/foo/newdir?t=mkdir-with-children",
1316 simplejson.dumps(newkids))
1318 n = self.s.create_node_from_uri(uri.strip())
1319 d2 = self.failUnlessNodeKeysAre(n, newkids.keys())
1320 d2.addCallback(lambda ign:
1321 self.failUnlessROChildURIIs(n, u"child-imm",
1323 d2.addCallback(lambda ign:
1324 self.failUnlessRWChildURIIs(n, u"child-mutable",
1326 d2.addCallback(lambda ign:
1327 self.failUnlessROChildURIIs(n, u"child-mutable-ro",
1329 d2.addCallback(lambda ign:
1330 self.failUnlessROChildURIIs(n, u"unknownchild-ro",
1331 caps['unknown_rocap']))
1332 d2.addCallback(lambda ign:
1333 self.failUnlessRWChildURIIs(n, u"unknownchild-rw",
1334 caps['unknown_rwcap']))
1335 d2.addCallback(lambda ign:
1336 self.failUnlessROChildURIIs(n, u"unknownchild-imm",
1337 caps['unknown_immcap']))
1338 d2.addCallback(lambda ign:
1339 self.failUnlessRWChildURIIs(n, u"dirchild",
1341 d2.addCallback(lambda ign:
1342 self.failUnlessROChildURIIs(n, u"dirchild-lit",
1344 d2.addCallback(lambda ign:
1345 self.failUnlessROChildURIIs(n, u"dirchild-empty",
1346 caps['emptydircap']))
1348 d.addCallback(_check)
1349 d.addCallback(lambda res:
1350 self.failUnlessNodeHasChild(self._foo_node, u"newdir"))
1351 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
1352 d.addCallback(self.failUnlessNodeKeysAre, newkids.keys())
1353 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
1354 d.addCallback(self.failUnlessROChildURIIs, u"child-imm", caps['filecap1'])
1357 def test_POST_NEWDIRURL_immutable(self):
1358 (newkids, caps) = self._create_immutable_children()
1359 d = self.POST2(self.public_url + "/foo/newdir?t=mkdir-immutable",
1360 simplejson.dumps(newkids))
1362 n = self.s.create_node_from_uri(uri.strip())
1363 d2 = self.failUnlessNodeKeysAre(n, newkids.keys())
1364 d2.addCallback(lambda ign:
1365 self.failUnlessROChildURIIs(n, u"child-imm",
1367 d2.addCallback(lambda ign:
1368 self.failUnlessROChildURIIs(n, u"unknownchild-imm",
1369 caps['unknown_immcap']))
1370 d2.addCallback(lambda ign:
1371 self.failUnlessROChildURIIs(n, u"dirchild-imm",
1373 d2.addCallback(lambda ign:
1374 self.failUnlessROChildURIIs(n, u"dirchild-lit",
1376 d2.addCallback(lambda ign:
1377 self.failUnlessROChildURIIs(n, u"dirchild-empty",
1378 caps['emptydircap']))
1380 d.addCallback(_check)
1381 d.addCallback(lambda res:
1382 self.failUnlessNodeHasChild(self._foo_node, u"newdir"))
1383 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
1384 d.addCallback(self.failUnlessNodeKeysAre, newkids.keys())
1385 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
1386 d.addCallback(self.failUnlessROChildURIIs, u"child-imm", caps['filecap1'])
1387 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
1388 d.addCallback(self.failUnlessROChildURIIs, u"unknownchild-imm", caps['unknown_immcap'])
1389 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
1390 d.addCallback(self.failUnlessROChildURIIs, u"dirchild-imm", caps['immdircap'])
1391 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
1392 d.addCallback(self.failUnlessROChildURIIs, u"dirchild-lit", caps['litdircap'])
1393 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
1394 d.addCallback(self.failUnlessROChildURIIs, u"dirchild-empty", caps['emptydircap'])
1395 d.addErrback(self.explain_web_error)
1398 def test_POST_NEWDIRURL_immutable_bad(self):
1399 (newkids, caps) = self._create_initial_children()
1400 d = self.shouldFail2(error.Error, "test_POST_NEWDIRURL_immutable_bad",
1402 "needed to be immutable but was not",
1404 self.public_url + "/foo/newdir?t=mkdir-immutable",
1405 simplejson.dumps(newkids))
1408 def test_PUT_NEWDIRURL_exists(self):
1409 d = self.PUT(self.public_url + "/foo/sub?t=mkdir", "")
1410 d.addCallback(lambda res:
1411 self.failUnlessNodeHasChild(self._foo_node, u"sub"))
1412 d.addCallback(lambda res: self._foo_node.get(u"sub"))
1413 d.addCallback(self.failUnlessNodeKeysAre, [u"baz.txt"])
1416 def test_PUT_NEWDIRURL_blocked(self):
1417 d = self.shouldFail2(error.Error, "PUT_NEWDIRURL_blocked",
1418 "409 Conflict", "Unable to create directory 'bar.txt': a file was in the way",
1420 self.public_url + "/foo/bar.txt/sub?t=mkdir", "")
1421 d.addCallback(lambda res:
1422 self.failUnlessNodeHasChild(self._foo_node, u"sub"))
1423 d.addCallback(lambda res: self._foo_node.get(u"sub"))
1424 d.addCallback(self.failUnlessNodeKeysAre, [u"baz.txt"])
1427 def test_PUT_NEWDIRURL_mkdir_p(self):
1428 d = defer.succeed(None)
1429 d.addCallback(lambda res: self.POST(self.public_url + "/foo", t='mkdir', name='mkp'))
1430 d.addCallback(lambda res: self.failUnlessNodeHasChild(self._foo_node, u"mkp"))
1431 d.addCallback(lambda res: self._foo_node.get(u"mkp"))
1432 def mkdir_p(mkpnode):
1433 url = '/uri/%s?t=mkdir-p&path=/sub1/sub2' % urllib.quote(mkpnode.get_uri())
1435 def made_subsub(ssuri):
1436 d = self._foo_node.get_child_at_path(u"mkp/sub1/sub2")
1437 d.addCallback(lambda ssnode: self.failUnlessReallyEqual(ssnode.get_uri(), ssuri))
1439 d.addCallback(lambda uri2: self.failUnlessReallyEqual(uri2, ssuri))
1441 d.addCallback(made_subsub)
1443 d.addCallback(mkdir_p)
1446 def test_PUT_NEWDIRURL_mkdirs(self):
1447 d = self.PUT(self.public_url + "/foo/subdir/newdir?t=mkdir", "")
1448 d.addCallback(lambda res:
1449 self.failIfNodeHasChild(self._foo_node, u"newdir"))
1450 d.addCallback(lambda res:
1451 self.failUnlessNodeHasChild(self._foo_node, u"subdir"))
1452 d.addCallback(lambda res:
1453 self._foo_node.get_child_at_path(u"subdir/newdir"))
1454 d.addCallback(self.failUnlessNodeKeysAre, [])
1457 def test_DELETE_DIRURL(self):
1458 d = self.DELETE(self.public_url + "/foo")
1459 d.addCallback(lambda res:
1460 self.failIfNodeHasChild(self.public_root, u"foo"))
1463 def test_DELETE_DIRURL_missing(self):
1464 d = self.DELETE(self.public_url + "/foo/missing")
1465 d.addBoth(self.should404, "test_DELETE_DIRURL_missing")
1466 d.addCallback(lambda res:
1467 self.failUnlessNodeHasChild(self.public_root, u"foo"))
1470 def test_DELETE_DIRURL_missing2(self):
1471 d = self.DELETE(self.public_url + "/missing")
1472 d.addBoth(self.should404, "test_DELETE_DIRURL_missing2")
1475 def dump_root(self):
1477 w = webish.DirnodeWalkerMixin()
1478 def visitor(childpath, childnode, metadata):
1480 d = w.walk(self.public_root, visitor)
1483 def failUnlessNodeKeysAre(self, node, expected_keys):
1484 for k in expected_keys:
1485 assert isinstance(k, unicode)
1487 def _check(children):
1488 self.failUnlessReallyEqual(sorted(children.keys()), sorted(expected_keys))
1489 d.addCallback(_check)
1491 def failUnlessNodeHasChild(self, node, name):
1492 assert isinstance(name, unicode)
1494 def _check(children):
1495 self.failUnless(name in children)
1496 d.addCallback(_check)
1498 def failIfNodeHasChild(self, node, name):
1499 assert isinstance(name, unicode)
1501 def _check(children):
1502 self.failIf(name in children)
1503 d.addCallback(_check)
1506 def failUnlessChildContentsAre(self, node, name, expected_contents):
1507 assert isinstance(name, unicode)
1508 d = node.get_child_at_path(name)
1509 d.addCallback(lambda node: download_to_data(node))
1510 def _check(contents):
1511 self.failUnlessReallyEqual(contents, expected_contents)
1512 d.addCallback(_check)
1515 def failUnlessMutableChildContentsAre(self, node, name, expected_contents):
1516 assert isinstance(name, unicode)
1517 d = node.get_child_at_path(name)
1518 d.addCallback(lambda node: node.download_best_version())
1519 def _check(contents):
1520 self.failUnlessReallyEqual(contents, expected_contents)
1521 d.addCallback(_check)
1524 def failUnlessRWChildURIIs(self, node, name, expected_uri):
1525 assert isinstance(name, unicode)
1526 d = node.get_child_at_path(name)
1528 self.failUnless(child.is_unknown() or not child.is_readonly())
1529 self.failUnlessReallyEqual(child.get_uri(), expected_uri.strip())
1530 self.failUnlessReallyEqual(child.get_write_uri(), expected_uri.strip())
1531 expected_ro_uri = self._make_readonly(expected_uri)
1533 self.failUnlessReallyEqual(child.get_readonly_uri(), expected_ro_uri.strip())
1534 d.addCallback(_check)
1537 def failUnlessROChildURIIs(self, node, name, expected_uri):
1538 assert isinstance(name, unicode)
1539 d = node.get_child_at_path(name)
1541 self.failUnless(child.is_unknown() or child.is_readonly())
1542 self.failUnlessReallyEqual(child.get_write_uri(), None)
1543 self.failUnlessReallyEqual(child.get_uri(), expected_uri.strip())
1544 self.failUnlessReallyEqual(child.get_readonly_uri(), expected_uri.strip())
1545 d.addCallback(_check)
1548 def failUnlessURIMatchesRWChild(self, got_uri, node, name):
1549 assert isinstance(name, unicode)
1550 d = node.get_child_at_path(name)
1552 self.failUnless(child.is_unknown() or not child.is_readonly())
1553 self.failUnlessReallyEqual(child.get_uri(), got_uri.strip())
1554 self.failUnlessReallyEqual(child.get_write_uri(), got_uri.strip())
1555 expected_ro_uri = self._make_readonly(got_uri)
1557 self.failUnlessReallyEqual(child.get_readonly_uri(), expected_ro_uri.strip())
1558 d.addCallback(_check)
1561 def failUnlessURIMatchesROChild(self, got_uri, node, name):
1562 assert isinstance(name, unicode)
1563 d = node.get_child_at_path(name)
1565 self.failUnless(child.is_unknown() or child.is_readonly())
1566 self.failUnlessReallyEqual(child.get_write_uri(), None)
1567 self.failUnlessReallyEqual(got_uri.strip(), child.get_uri())
1568 self.failUnlessReallyEqual(got_uri.strip(), child.get_readonly_uri())
1569 d.addCallback(_check)
1572 def failUnlessCHKURIHasContents(self, got_uri, contents):
1573 self.failUnless(FakeCHKFileNode.all_contents[got_uri] == contents)
1575 def test_POST_upload(self):
1576 d = self.POST(self.public_url + "/foo", t="upload",
1577 file=("new.txt", self.NEWFILE_CONTENTS))
1579 d.addCallback(self.failUnlessURIMatchesROChild, fn, u"new.txt")
1580 d.addCallback(lambda res:
1581 self.failUnlessChildContentsAre(fn, u"new.txt",
1582 self.NEWFILE_CONTENTS))
1585 def test_POST_upload_unicode(self):
1586 filename = u"n\u00e9wer.txt" # n e-acute w e r . t x t
1587 d = self.POST(self.public_url + "/foo", t="upload",
1588 file=(filename, self.NEWFILE_CONTENTS))
1590 d.addCallback(self.failUnlessURIMatchesROChild, fn, filename)
1591 d.addCallback(lambda res:
1592 self.failUnlessChildContentsAre(fn, filename,
1593 self.NEWFILE_CONTENTS))
1594 target_url = self.public_url + "/foo/" + filename.encode("utf-8")
1595 d.addCallback(lambda res: self.GET(target_url))
1596 d.addCallback(lambda contents: self.failUnlessReallyEqual(contents,
1597 self.NEWFILE_CONTENTS,
1601 def test_POST_upload_unicode_named(self):
1602 filename = u"n\u00e9wer.txt" # n e-acute w e r . t x t
1603 d = self.POST(self.public_url + "/foo", t="upload",
1605 file=("overridden", self.NEWFILE_CONTENTS))
1607 d.addCallback(self.failUnlessURIMatchesROChild, fn, filename)
1608 d.addCallback(lambda res:
1609 self.failUnlessChildContentsAre(fn, filename,
1610 self.NEWFILE_CONTENTS))
1611 target_url = self.public_url + "/foo/" + filename.encode("utf-8")
1612 d.addCallback(lambda res: self.GET(target_url))
1613 d.addCallback(lambda contents: self.failUnlessReallyEqual(contents,
1614 self.NEWFILE_CONTENTS,
1618 def test_POST_upload_no_link(self):
1619 d = self.POST("/uri", t="upload",
1620 file=("new.txt", self.NEWFILE_CONTENTS))
1621 def _check_upload_results(page):
1622 # this should be a page which describes the results of the upload
1623 # that just finished.
1624 self.failUnless("Upload Results:" in page)
1625 self.failUnless("URI:" in page)
1626 uri_re = re.compile("URI: <tt><span>(.*)</span>")
1627 mo = uri_re.search(page)
1628 self.failUnless(mo, page)
1629 new_uri = mo.group(1)
1631 d.addCallback(_check_upload_results)
1632 d.addCallback(self.failUnlessCHKURIHasContents, self.NEWFILE_CONTENTS)
1635 def test_POST_upload_no_link_whendone(self):
1636 d = self.POST("/uri", t="upload", when_done="/",
1637 file=("new.txt", self.NEWFILE_CONTENTS))
1638 d.addBoth(self.shouldRedirect, "/")
1641 def shouldRedirect2(self, which, checker, callable, *args, **kwargs):
1642 d = defer.maybeDeferred(callable, *args, **kwargs)
1644 if isinstance(res, failure.Failure):
1645 res.trap(error.PageRedirect)
1646 statuscode = res.value.status
1647 target = res.value.location
1648 return checker(statuscode, target)
1649 self.fail("%s: callable was supposed to redirect, not return '%s'"
1654 def test_POST_upload_no_link_whendone_results(self):
1655 def check(statuscode, target):
1656 self.failUnlessReallyEqual(statuscode, str(http.FOUND))
1657 self.failUnless(target.startswith(self.webish_url), target)
1658 return client.getPage(target, method="GET")
1659 d = self.shouldRedirect2("test_POST_upload_no_link_whendone_results",
1661 self.POST, "/uri", t="upload",
1662 when_done="/uri/%(uri)s",
1663 file=("new.txt", self.NEWFILE_CONTENTS))
1664 d.addCallback(lambda res:
1665 self.failUnlessReallyEqual(res, self.NEWFILE_CONTENTS))
1668 def test_POST_upload_no_link_mutable(self):
1669 d = self.POST("/uri", t="upload", mutable="true",
1670 file=("new.txt", self.NEWFILE_CONTENTS))
1671 def _check(filecap):
1672 filecap = filecap.strip()
1673 self.failUnless(filecap.startswith("URI:SSK:"), filecap)
1674 self.filecap = filecap
1675 u = uri.WriteableSSKFileURI.init_from_string(filecap)
1676 self.failUnless(u.get_storage_index() in FakeMutableFileNode.all_contents)
1677 n = self.s.create_node_from_uri(filecap)
1678 return n.download_best_version()
1679 d.addCallback(_check)
1681 self.failUnlessReallyEqual(data, self.NEWFILE_CONTENTS)
1682 return self.GET("/uri/%s" % urllib.quote(self.filecap))
1683 d.addCallback(_check2)
1685 self.failUnlessReallyEqual(data, self.NEWFILE_CONTENTS)
1686 return self.GET("/file/%s" % urllib.quote(self.filecap))
1687 d.addCallback(_check3)
1689 self.failUnlessReallyEqual(data, self.NEWFILE_CONTENTS)
1690 d.addCallback(_check4)
1693 def test_POST_upload_no_link_mutable_toobig(self):
1694 d = self.shouldFail2(error.Error,
1695 "test_POST_upload_no_link_mutable_toobig",
1696 "413 Request Entity Too Large",
1697 "SDMF is limited to one segment, and 10001 > 10000",
1699 "/uri", t="upload", mutable="true",
1701 "b" * (self.s.MUTABLE_SIZELIMIT+1)) )
1704 def test_POST_upload_mutable(self):
1705 # this creates a mutable file
1706 d = self.POST(self.public_url + "/foo", t="upload", mutable="true",
1707 file=("new.txt", self.NEWFILE_CONTENTS))
1709 d.addCallback(self.failUnlessURIMatchesRWChild, fn, u"new.txt")
1710 d.addCallback(lambda res:
1711 self.failUnlessMutableChildContentsAre(fn, u"new.txt",
1712 self.NEWFILE_CONTENTS))
1713 d.addCallback(lambda res: self._foo_node.get(u"new.txt"))
1715 self.failUnless(IMutableFileNode.providedBy(newnode))
1716 self.failUnless(newnode.is_mutable())
1717 self.failIf(newnode.is_readonly())
1718 self._mutable_node = newnode
1719 self._mutable_uri = newnode.get_uri()
1722 # now upload it again and make sure that the URI doesn't change
1723 NEWER_CONTENTS = self.NEWFILE_CONTENTS + "newer\n"
1724 d.addCallback(lambda res:
1725 self.POST(self.public_url + "/foo", t="upload",
1727 file=("new.txt", NEWER_CONTENTS)))
1728 d.addCallback(self.failUnlessURIMatchesRWChild, fn, u"new.txt")
1729 d.addCallback(lambda res:
1730 self.failUnlessMutableChildContentsAre(fn, u"new.txt",
1732 d.addCallback(lambda res: self._foo_node.get(u"new.txt"))
1734 self.failUnless(IMutableFileNode.providedBy(newnode))
1735 self.failUnless(newnode.is_mutable())
1736 self.failIf(newnode.is_readonly())
1737 self.failUnlessReallyEqual(self._mutable_uri, newnode.get_uri())
1738 d.addCallback(_got2)
1740 # upload a second time, using PUT instead of POST
1741 NEW2_CONTENTS = NEWER_CONTENTS + "overwrite with PUT\n"
1742 d.addCallback(lambda res:
1743 self.PUT(self.public_url + "/foo/new.txt", NEW2_CONTENTS))
1744 d.addCallback(self.failUnlessURIMatchesRWChild, fn, u"new.txt")
1745 d.addCallback(lambda res:
1746 self.failUnlessMutableChildContentsAre(fn, u"new.txt",
1749 # finally list the directory, since mutable files are displayed
1750 # slightly differently
1752 d.addCallback(lambda res:
1753 self.GET(self.public_url + "/foo/",
1754 followRedirect=True))
1755 def _check_page(res):
1756 # TODO: assert more about the contents
1757 self.failUnless("SSK" in res)
1759 d.addCallback(_check_page)
1761 d.addCallback(lambda res: self._foo_node.get(u"new.txt"))
1763 self.failUnless(IMutableFileNode.providedBy(newnode))
1764 self.failUnless(newnode.is_mutable())
1765 self.failIf(newnode.is_readonly())
1766 self.failUnlessReallyEqual(self._mutable_uri, newnode.get_uri())
1767 d.addCallback(_got3)
1769 # look at the JSON form of the enclosing directory
1770 d.addCallback(lambda res:
1771 self.GET(self.public_url + "/foo/?t=json",
1772 followRedirect=True))
1773 def _check_page_json(res):
1774 parsed = simplejson.loads(res)
1775 self.failUnlessEqual(parsed[0], "dirnode")
1776 children = dict( [(unicode(name),value)
1778 in parsed[1]["children"].iteritems()] )
1779 self.failUnless(u"new.txt" in children)
1780 new_json = children[u"new.txt"]
1781 self.failUnlessEqual(new_json[0], "filenode")
1782 self.failUnless(new_json[1]["mutable"])
1783 self.failUnlessReallyEqual(to_str(new_json[1]["rw_uri"]), self._mutable_uri)
1784 ro_uri = self._mutable_node.get_readonly().to_string()
1785 self.failUnlessReallyEqual(to_str(new_json[1]["ro_uri"]), ro_uri)
1786 d.addCallback(_check_page_json)
1788 # and the JSON form of the file
1789 d.addCallback(lambda res:
1790 self.GET(self.public_url + "/foo/new.txt?t=json"))
1791 def _check_file_json(res):
1792 parsed = simplejson.loads(res)
1793 self.failUnlessEqual(parsed[0], "filenode")
1794 self.failUnless(parsed[1]["mutable"])
1795 self.failUnlessReallyEqual(to_str(parsed[1]["rw_uri"]), self._mutable_uri)
1796 ro_uri = self._mutable_node.get_readonly().to_string()
1797 self.failUnlessReallyEqual(to_str(parsed[1]["ro_uri"]), ro_uri)
1798 d.addCallback(_check_file_json)
1800 # and look at t=uri and t=readonly-uri
1801 d.addCallback(lambda res:
1802 self.GET(self.public_url + "/foo/new.txt?t=uri"))
1803 d.addCallback(lambda res: self.failUnlessReallyEqual(res, self._mutable_uri))
1804 d.addCallback(lambda res:
1805 self.GET(self.public_url + "/foo/new.txt?t=readonly-uri"))
1806 def _check_ro_uri(res):
1807 ro_uri = self._mutable_node.get_readonly().to_string()
1808 self.failUnlessReallyEqual(res, ro_uri)
1809 d.addCallback(_check_ro_uri)
1811 # make sure we can get to it from /uri/URI
1812 d.addCallback(lambda res:
1813 self.GET("/uri/%s" % urllib.quote(self._mutable_uri)))
1814 d.addCallback(lambda res:
1815 self.failUnlessReallyEqual(res, NEW2_CONTENTS))
1817 # and that HEAD computes the size correctly
1818 d.addCallback(lambda res:
1819 self.HEAD(self.public_url + "/foo/new.txt",
1820 return_response=True))
1821 def _got_headers((res, status, headers)):
1822 self.failUnlessReallyEqual(res, "")
1823 self.failUnlessReallyEqual(headers["content-length"][0],
1824 str(len(NEW2_CONTENTS)))
1825 self.failUnlessReallyEqual(headers["content-type"], ["text/plain"])
1826 d.addCallback(_got_headers)
1828 # make sure that size errors are displayed correctly for overwrite
1829 d.addCallback(lambda res:
1830 self.shouldFail2(error.Error,
1831 "test_POST_upload_mutable-toobig",
1832 "413 Request Entity Too Large",
1833 "SDMF is limited to one segment, and 10001 > 10000",
1835 self.public_url + "/foo", t="upload",
1838 "b" * (self.s.MUTABLE_SIZELIMIT+1)),
1841 d.addErrback(self.dump_error)
1844 def test_POST_upload_mutable_toobig(self):
1845 d = self.shouldFail2(error.Error,
1846 "test_POST_upload_mutable_toobig",
1847 "413 Request Entity Too Large",
1848 "SDMF is limited to one segment, and 10001 > 10000",
1850 self.public_url + "/foo",
1851 t="upload", mutable="true",
1853 "b" * (self.s.MUTABLE_SIZELIMIT+1)) )
1856 def dump_error(self, f):
1857 # if the web server returns an error code (like 400 Bad Request),
1858 # web.client.getPage puts the HTTP response body into the .response
1859 # attribute of the exception object that it gives back. It does not
1860 # appear in the Failure's repr(), so the ERROR that trial displays
1861 # will be rather terse and unhelpful. addErrback this method to the
1862 # end of your chain to get more information out of these errors.
1863 if f.check(error.Error):
1864 print "web.error.Error:"
1866 print f.value.response
1869 def test_POST_upload_replace(self):
1870 d = self.POST(self.public_url + "/foo", t="upload",
1871 file=("bar.txt", self.NEWFILE_CONTENTS))
1873 d.addCallback(self.failUnlessURIMatchesROChild, fn, u"bar.txt")
1874 d.addCallback(lambda res:
1875 self.failUnlessChildContentsAre(fn, u"bar.txt",
1876 self.NEWFILE_CONTENTS))
1879 def test_POST_upload_no_replace_ok(self):
1880 d = self.POST(self.public_url + "/foo?replace=false", t="upload",
1881 file=("new.txt", self.NEWFILE_CONTENTS))
1882 d.addCallback(lambda res: self.GET(self.public_url + "/foo/new.txt"))
1883 d.addCallback(lambda res: self.failUnlessReallyEqual(res,
1884 self.NEWFILE_CONTENTS))
1887 def test_POST_upload_no_replace_queryarg(self):
1888 d = self.POST(self.public_url + "/foo?replace=false", t="upload",
1889 file=("bar.txt", self.NEWFILE_CONTENTS))
1890 d.addBoth(self.shouldFail, error.Error,
1891 "POST_upload_no_replace_queryarg",
1893 "There was already a child by that name, and you asked me "
1894 "to not replace it")
1895 d.addCallback(lambda res: self.GET(self.public_url + "/foo/bar.txt"))
1896 d.addCallback(self.failUnlessIsBarDotTxt)
1899 def test_POST_upload_no_replace_field(self):
1900 d = self.POST(self.public_url + "/foo", t="upload", replace="false",
1901 file=("bar.txt", self.NEWFILE_CONTENTS))
1902 d.addBoth(self.shouldFail, error.Error, "POST_upload_no_replace_field",
1904 "There was already a child by that name, and you asked me "
1905 "to not replace it")
1906 d.addCallback(lambda res: self.GET(self.public_url + "/foo/bar.txt"))
1907 d.addCallback(self.failUnlessIsBarDotTxt)
1910 def test_POST_upload_whendone(self):
1911 d = self.POST(self.public_url + "/foo", t="upload", when_done="/THERE",
1912 file=("new.txt", self.NEWFILE_CONTENTS))
1913 d.addBoth(self.shouldRedirect, "/THERE")
1915 d.addCallback(lambda res:
1916 self.failUnlessChildContentsAre(fn, u"new.txt",
1917 self.NEWFILE_CONTENTS))
1920 def test_POST_upload_named(self):
1922 d = self.POST(self.public_url + "/foo", t="upload",
1923 name="new.txt", file=self.NEWFILE_CONTENTS)
1924 d.addCallback(self.failUnlessURIMatchesROChild, fn, u"new.txt")
1925 d.addCallback(lambda res:
1926 self.failUnlessChildContentsAre(fn, u"new.txt",
1927 self.NEWFILE_CONTENTS))
1930 def test_POST_upload_named_badfilename(self):
1931 d = self.POST(self.public_url + "/foo", t="upload",
1932 name="slashes/are/bad.txt", file=self.NEWFILE_CONTENTS)
1933 d.addBoth(self.shouldFail, error.Error,
1934 "test_POST_upload_named_badfilename",
1936 "name= may not contain a slash",
1938 # make sure that nothing was added
1939 d.addCallback(lambda res:
1940 self.failUnlessNodeKeysAre(self._foo_node,
1941 [u"bar.txt", u"blockingfile",
1942 u"empty", u"n\u00fc.txt",
1946 def test_POST_FILEURL_check(self):
1947 bar_url = self.public_url + "/foo/bar.txt"
1948 d = self.POST(bar_url, t="check")
1950 self.failUnless("Healthy :" in res)
1951 d.addCallback(_check)
1952 redir_url = "http://allmydata.org/TARGET"
1953 def _check2(statuscode, target):
1954 self.failUnlessReallyEqual(statuscode, str(http.FOUND))
1955 self.failUnlessReallyEqual(target, redir_url)
1956 d.addCallback(lambda res:
1957 self.shouldRedirect2("test_POST_FILEURL_check",
1961 when_done=redir_url))
1962 d.addCallback(lambda res:
1963 self.POST(bar_url, t="check", return_to=redir_url))
1965 self.failUnless("Healthy :" in res)
1966 self.failUnless("Return to file" in res)
1967 self.failUnless(redir_url in res)
1968 d.addCallback(_check3)
1970 d.addCallback(lambda res:
1971 self.POST(bar_url, t="check", output="JSON"))
1972 def _check_json(res):
1973 data = simplejson.loads(res)
1974 self.failUnless("storage-index" in data)
1975 self.failUnless(data["results"]["healthy"])
1976 d.addCallback(_check_json)
1980 def test_POST_FILEURL_check_and_repair(self):
1981 bar_url = self.public_url + "/foo/bar.txt"
1982 d = self.POST(bar_url, t="check", repair="true")
1984 self.failUnless("Healthy :" in res)
1985 d.addCallback(_check)
1986 redir_url = "http://allmydata.org/TARGET"
1987 def _check2(statuscode, target):
1988 self.failUnlessReallyEqual(statuscode, str(http.FOUND))
1989 self.failUnlessReallyEqual(target, redir_url)
1990 d.addCallback(lambda res:
1991 self.shouldRedirect2("test_POST_FILEURL_check_and_repair",
1994 t="check", repair="true",
1995 when_done=redir_url))
1996 d.addCallback(lambda res:
1997 self.POST(bar_url, t="check", return_to=redir_url))
1999 self.failUnless("Healthy :" in res)
2000 self.failUnless("Return to file" in res)
2001 self.failUnless(redir_url in res)
2002 d.addCallback(_check3)
2005 def test_POST_DIRURL_check(self):
2006 foo_url = self.public_url + "/foo/"
2007 d = self.POST(foo_url, t="check")
2009 self.failUnless("Healthy :" in res, res)
2010 d.addCallback(_check)
2011 redir_url = "http://allmydata.org/TARGET"
2012 def _check2(statuscode, target):
2013 self.failUnlessReallyEqual(statuscode, str(http.FOUND))
2014 self.failUnlessReallyEqual(target, redir_url)
2015 d.addCallback(lambda res:
2016 self.shouldRedirect2("test_POST_DIRURL_check",
2020 when_done=redir_url))
2021 d.addCallback(lambda res:
2022 self.POST(foo_url, t="check", return_to=redir_url))
2024 self.failUnless("Healthy :" in res, res)
2025 self.failUnless("Return to file/directory" in res)
2026 self.failUnless(redir_url in res)
2027 d.addCallback(_check3)
2029 d.addCallback(lambda res:
2030 self.POST(foo_url, t="check", output="JSON"))
2031 def _check_json(res):
2032 data = simplejson.loads(res)
2033 self.failUnless("storage-index" in data)
2034 self.failUnless(data["results"]["healthy"])
2035 d.addCallback(_check_json)
2039 def test_POST_DIRURL_check_and_repair(self):
2040 foo_url = self.public_url + "/foo/"
2041 d = self.POST(foo_url, t="check", repair="true")
2043 self.failUnless("Healthy :" in res, res)
2044 d.addCallback(_check)
2045 redir_url = "http://allmydata.org/TARGET"
2046 def _check2(statuscode, target):
2047 self.failUnlessReallyEqual(statuscode, str(http.FOUND))
2048 self.failUnlessReallyEqual(target, redir_url)
2049 d.addCallback(lambda res:
2050 self.shouldRedirect2("test_POST_DIRURL_check_and_repair",
2053 t="check", repair="true",
2054 when_done=redir_url))
2055 d.addCallback(lambda res:
2056 self.POST(foo_url, t="check", return_to=redir_url))
2058 self.failUnless("Healthy :" in res)
2059 self.failUnless("Return to file/directory" in res)
2060 self.failUnless(redir_url in res)
2061 d.addCallback(_check3)
2064 def wait_for_operation(self, ignored, ophandle):
2065 url = "/operations/" + ophandle
2066 url += "?t=status&output=JSON"
2069 data = simplejson.loads(res)
2070 if not data["finished"]:
2071 d = self.stall(delay=1.0)
2072 d.addCallback(self.wait_for_operation, ophandle)
2078 def get_operation_results(self, ignored, ophandle, output=None):
2079 url = "/operations/" + ophandle
2082 url += "&output=" + output
2085 if output and output.lower() == "json":
2086 return simplejson.loads(res)
2091 def test_POST_DIRURL_deepcheck_no_ophandle(self):
2092 d = self.shouldFail2(error.Error,
2093 "test_POST_DIRURL_deepcheck_no_ophandle",
2095 "slow operation requires ophandle=",
2096 self.POST, self.public_url, t="start-deep-check")
2099 def test_POST_DIRURL_deepcheck(self):
2100 def _check_redirect(statuscode, target):
2101 self.failUnlessReallyEqual(statuscode, str(http.FOUND))
2102 self.failUnless(target.endswith("/operations/123"))
2103 d = self.shouldRedirect2("test_POST_DIRURL_deepcheck", _check_redirect,
2104 self.POST, self.public_url,
2105 t="start-deep-check", ophandle="123")
2106 d.addCallback(self.wait_for_operation, "123")
2107 def _check_json(data):
2108 self.failUnlessReallyEqual(data["finished"], True)
2109 self.failUnlessReallyEqual(data["count-objects-checked"], 8)
2110 self.failUnlessReallyEqual(data["count-objects-healthy"], 8)
2111 d.addCallback(_check_json)
2112 d.addCallback(self.get_operation_results, "123", "html")
2113 def _check_html(res):
2114 self.failUnless("Objects Checked: <span>8</span>" in res)
2115 self.failUnless("Objects Healthy: <span>8</span>" in res)
2116 d.addCallback(_check_html)
2118 d.addCallback(lambda res:
2119 self.GET("/operations/123/"))
2120 d.addCallback(_check_html) # should be the same as without the slash
2122 d.addCallback(lambda res:
2123 self.shouldFail2(error.Error, "one", "404 Not Found",
2124 "No detailed results for SI bogus",
2125 self.GET, "/operations/123/bogus"))
2127 foo_si = self._foo_node.get_storage_index()
2128 foo_si_s = base32.b2a(foo_si)
2129 d.addCallback(lambda res:
2130 self.GET("/operations/123/%s?output=JSON" % foo_si_s))
2131 def _check_foo_json(res):
2132 data = simplejson.loads(res)
2133 self.failUnlessEqual(data["storage-index"], foo_si_s)
2134 self.failUnless(data["results"]["healthy"])
2135 d.addCallback(_check_foo_json)
2138 def test_POST_DIRURL_deepcheck_and_repair(self):
2139 d = self.POST(self.public_url, t="start-deep-check", repair="true",
2140 ophandle="124", output="json", followRedirect=True)
2141 d.addCallback(self.wait_for_operation, "124")
2142 def _check_json(data):
2143 self.failUnlessReallyEqual(data["finished"], True)
2144 self.failUnlessReallyEqual(data["count-objects-checked"], 8)
2145 self.failUnlessReallyEqual(data["count-objects-healthy-pre-repair"], 8)
2146 self.failUnlessReallyEqual(data["count-objects-unhealthy-pre-repair"], 0)
2147 self.failUnlessReallyEqual(data["count-corrupt-shares-pre-repair"], 0)
2148 self.failUnlessReallyEqual(data["count-repairs-attempted"], 0)
2149 self.failUnlessReallyEqual(data["count-repairs-successful"], 0)
2150 self.failUnlessReallyEqual(data["count-repairs-unsuccessful"], 0)
2151 self.failUnlessReallyEqual(data["count-objects-healthy-post-repair"], 8)
2152 self.failUnlessReallyEqual(data["count-objects-unhealthy-post-repair"], 0)
2153 self.failUnlessReallyEqual(data["count-corrupt-shares-post-repair"], 0)
2154 d.addCallback(_check_json)
2155 d.addCallback(self.get_operation_results, "124", "html")
2156 def _check_html(res):
2157 self.failUnless("Objects Checked: <span>8</span>" in res)
2159 self.failUnless("Objects Healthy (before repair): <span>8</span>" in res)
2160 self.failUnless("Objects Unhealthy (before repair): <span>0</span>" in res)
2161 self.failUnless("Corrupt Shares (before repair): <span>0</span>" in res)
2163 self.failUnless("Repairs Attempted: <span>0</span>" in res)
2164 self.failUnless("Repairs Successful: <span>0</span>" in res)
2165 self.failUnless("Repairs Unsuccessful: <span>0</span>" in res)
2167 self.failUnless("Objects Healthy (after repair): <span>8</span>" in res)
2168 self.failUnless("Objects Unhealthy (after repair): <span>0</span>" in res)
2169 self.failUnless("Corrupt Shares (after repair): <span>0</span>" in res)
2170 d.addCallback(_check_html)
2173 def test_POST_FILEURL_bad_t(self):
2174 d = self.shouldFail2(error.Error, "POST_bad_t", "400 Bad Request",
2175 "POST to file: bad t=bogus",
2176 self.POST, self.public_url + "/foo/bar.txt",
2180 def test_POST_mkdir(self): # return value?
2181 d = self.POST(self.public_url + "/foo", t="mkdir", name="newdir")
2182 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
2183 d.addCallback(self.failUnlessNodeKeysAre, [])
2186 def test_POST_mkdir_initial_children(self):
2187 (newkids, caps) = self._create_initial_children()
2188 d = self.POST2(self.public_url +
2189 "/foo?t=mkdir-with-children&name=newdir",
2190 simplejson.dumps(newkids))
2191 d.addCallback(lambda res:
2192 self.failUnlessNodeHasChild(self._foo_node, u"newdir"))
2193 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
2194 d.addCallback(self.failUnlessNodeKeysAre, newkids.keys())
2195 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
2196 d.addCallback(self.failUnlessROChildURIIs, u"child-imm", caps['filecap1'])
2199 def test_POST_mkdir_immutable(self):
2200 (newkids, caps) = self._create_immutable_children()
2201 d = self.POST2(self.public_url +
2202 "/foo?t=mkdir-immutable&name=newdir",
2203 simplejson.dumps(newkids))
2204 d.addCallback(lambda res:
2205 self.failUnlessNodeHasChild(self._foo_node, u"newdir"))
2206 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
2207 d.addCallback(self.failUnlessNodeKeysAre, newkids.keys())
2208 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
2209 d.addCallback(self.failUnlessROChildURIIs, u"child-imm", caps['filecap1'])
2210 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
2211 d.addCallback(self.failUnlessROChildURIIs, u"unknownchild-imm", caps['unknown_immcap'])
2212 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
2213 d.addCallback(self.failUnlessROChildURIIs, u"dirchild-imm", caps['immdircap'])
2214 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
2215 d.addCallback(self.failUnlessROChildURIIs, u"dirchild-lit", caps['litdircap'])
2216 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
2217 d.addCallback(self.failUnlessROChildURIIs, u"dirchild-empty", caps['emptydircap'])
2220 def test_POST_mkdir_immutable_bad(self):
2221 (newkids, caps) = self._create_initial_children()
2222 d = self.shouldFail2(error.Error, "test_POST_mkdir_immutable_bad",
2224 "needed to be immutable but was not",
2227 "/foo?t=mkdir-immutable&name=newdir",
2228 simplejson.dumps(newkids))
2231 def test_POST_mkdir_2(self):
2232 d = self.POST(self.public_url + "/foo/newdir?t=mkdir", "")
2233 d.addCallback(lambda res:
2234 self.failUnlessNodeHasChild(self._foo_node, u"newdir"))
2235 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
2236 d.addCallback(self.failUnlessNodeKeysAre, [])
2239 def test_POST_mkdirs_2(self):
2240 d = self.POST(self.public_url + "/foo/bardir/newdir?t=mkdir", "")
2241 d.addCallback(lambda res:
2242 self.failUnlessNodeHasChild(self._foo_node, u"bardir"))
2243 d.addCallback(lambda res: self._foo_node.get(u"bardir"))
2244 d.addCallback(lambda bardirnode: bardirnode.get(u"newdir"))
2245 d.addCallback(self.failUnlessNodeKeysAre, [])
2248 def test_POST_mkdir_no_parentdir_noredirect(self):
2249 d = self.POST("/uri?t=mkdir")
2250 def _after_mkdir(res):
2251 uri.DirectoryURI.init_from_string(res)
2252 d.addCallback(_after_mkdir)
2255 def test_POST_mkdir_no_parentdir_noredirect2(self):
2256 # make sure form-based arguments (as on the welcome page) still work
2257 d = self.POST("/uri", t="mkdir")
2258 def _after_mkdir(res):
2259 uri.DirectoryURI.init_from_string(res)
2260 d.addCallback(_after_mkdir)
2261 d.addErrback(self.explain_web_error)
2264 def test_POST_mkdir_no_parentdir_redirect(self):
2265 d = self.POST("/uri?t=mkdir&redirect_to_result=true")
2266 d.addBoth(self.shouldRedirect, None, statuscode='303')
2267 def _check_target(target):
2268 target = urllib.unquote(target)
2269 self.failUnless(target.startswith("uri/URI:DIR2:"), target)
2270 d.addCallback(_check_target)
2273 def test_POST_mkdir_no_parentdir_redirect2(self):
2274 d = self.POST("/uri", t="mkdir", redirect_to_result="true")
2275 d.addBoth(self.shouldRedirect, None, statuscode='303')
2276 def _check_target(target):
2277 target = urllib.unquote(target)
2278 self.failUnless(target.startswith("uri/URI:DIR2:"), target)
2279 d.addCallback(_check_target)
2280 d.addErrback(self.explain_web_error)
2283 def _make_readonly(self, u):
2284 ro_uri = uri.from_string(u).get_readonly()
2287 return ro_uri.to_string()
2289 def _create_initial_children(self):
2290 contents, n, filecap1 = self.makefile(12)
2291 md1 = {"metakey1": "metavalue1"}
2292 filecap2 = make_mutable_file_uri()
2293 node3 = self.s.create_node_from_uri(make_mutable_file_uri())
2294 filecap3 = node3.get_readonly_uri()
2295 node4 = self.s.create_node_from_uri(make_mutable_file_uri())
2296 dircap = DirectoryNode(node4, None, None).get_uri()
2297 litdircap = "URI:DIR2-LIT:ge3dumj2mewdcotyfqydulbshj5x2lbm"
2298 emptydircap = "URI:DIR2-LIT:"
2299 newkids = {u"child-imm": ["filenode", {"rw_uri": filecap1,
2300 "ro_uri": self._make_readonly(filecap1),
2301 "metadata": md1, }],
2302 u"child-mutable": ["filenode", {"rw_uri": filecap2,
2303 "ro_uri": self._make_readonly(filecap2)}],
2304 u"child-mutable-ro": ["filenode", {"ro_uri": filecap3}],
2305 u"unknownchild-rw": ["unknown", {"rw_uri": unknown_rwcap,
2306 "ro_uri": unknown_rocap}],
2307 u"unknownchild-ro": ["unknown", {"ro_uri": unknown_rocap}],
2308 u"unknownchild-imm": ["unknown", {"ro_uri": unknown_immcap}],
2309 u"dirchild": ["dirnode", {"rw_uri": dircap,
2310 "ro_uri": self._make_readonly(dircap)}],
2311 u"dirchild-lit": ["dirnode", {"ro_uri": litdircap}],
2312 u"dirchild-empty": ["dirnode", {"ro_uri": emptydircap}],
2314 return newkids, {'filecap1': filecap1,
2315 'filecap2': filecap2,
2316 'filecap3': filecap3,
2317 'unknown_rwcap': unknown_rwcap,
2318 'unknown_rocap': unknown_rocap,
2319 'unknown_immcap': unknown_immcap,
2321 'litdircap': litdircap,
2322 'emptydircap': emptydircap}
2324 def _create_immutable_children(self):
2325 contents, n, filecap1 = self.makefile(12)
2326 md1 = {"metakey1": "metavalue1"}
2327 tnode = create_chk_filenode("immutable directory contents\n"*10)
2328 dnode = DirectoryNode(tnode, None, None)
2329 assert not dnode.is_mutable()
2330 immdircap = dnode.get_uri()
2331 litdircap = "URI:DIR2-LIT:ge3dumj2mewdcotyfqydulbshj5x2lbm"
2332 emptydircap = "URI:DIR2-LIT:"
2333 newkids = {u"child-imm": ["filenode", {"ro_uri": filecap1,
2334 "metadata": md1, }],
2335 u"unknownchild-imm": ["unknown", {"ro_uri": unknown_immcap}],
2336 u"dirchild-imm": ["dirnode", {"ro_uri": immdircap}],
2337 u"dirchild-lit": ["dirnode", {"ro_uri": litdircap}],
2338 u"dirchild-empty": ["dirnode", {"ro_uri": emptydircap}],
2340 return newkids, {'filecap1': filecap1,
2341 'unknown_immcap': unknown_immcap,
2342 'immdircap': immdircap,
2343 'litdircap': litdircap,
2344 'emptydircap': emptydircap}
2346 def test_POST_mkdir_no_parentdir_initial_children(self):
2347 (newkids, caps) = self._create_initial_children()
2348 d = self.POST2("/uri?t=mkdir-with-children", simplejson.dumps(newkids))
2349 def _after_mkdir(res):
2350 self.failUnless(res.startswith("URI:DIR"), res)
2351 n = self.s.create_node_from_uri(res)
2352 d2 = self.failUnlessNodeKeysAre(n, newkids.keys())
2353 d2.addCallback(lambda ign:
2354 self.failUnlessROChildURIIs(n, u"child-imm",
2356 d2.addCallback(lambda ign:
2357 self.failUnlessRWChildURIIs(n, u"child-mutable",
2359 d2.addCallback(lambda ign:
2360 self.failUnlessROChildURIIs(n, u"child-mutable-ro",
2362 d2.addCallback(lambda ign:
2363 self.failUnlessRWChildURIIs(n, u"unknownchild-rw",
2364 caps['unknown_rwcap']))
2365 d2.addCallback(lambda ign:
2366 self.failUnlessROChildURIIs(n, u"unknownchild-ro",
2367 caps['unknown_rocap']))
2368 d2.addCallback(lambda ign:
2369 self.failUnlessROChildURIIs(n, u"unknownchild-imm",
2370 caps['unknown_immcap']))
2371 d2.addCallback(lambda ign:
2372 self.failUnlessRWChildURIIs(n, u"dirchild",
2375 d.addCallback(_after_mkdir)
2378 def test_POST_mkdir_no_parentdir_unexpected_children(self):
2379 # the regular /uri?t=mkdir operation is specified to ignore its body.
2380 # Only t=mkdir-with-children pays attention to it.
2381 (newkids, caps) = self._create_initial_children()
2382 d = self.shouldHTTPError("POST t=mkdir unexpected children",
2384 "t=mkdir does not accept children=, "
2385 "try t=mkdir-with-children instead",
2386 self.POST2, "/uri?t=mkdir", # without children
2387 simplejson.dumps(newkids))
2390 def test_POST_noparent_bad(self):
2391 d = self.shouldHTTPError("POST /uri?t=bogus", 400, "Bad Request",
2392 "/uri accepts only PUT, PUT?t=mkdir, "
2393 "POST?t=upload, and POST?t=mkdir",
2394 self.POST, "/uri?t=bogus")
2397 def test_POST_mkdir_no_parentdir_immutable(self):
2398 (newkids, caps) = self._create_immutable_children()
2399 d = self.POST2("/uri?t=mkdir-immutable", simplejson.dumps(newkids))
2400 def _after_mkdir(res):
2401 self.failUnless(res.startswith("URI:DIR"), res)
2402 n = self.s.create_node_from_uri(res)
2403 d2 = self.failUnlessNodeKeysAre(n, newkids.keys())
2404 d2.addCallback(lambda ign:
2405 self.failUnlessROChildURIIs(n, u"child-imm",
2407 d2.addCallback(lambda ign:
2408 self.failUnlessROChildURIIs(n, u"unknownchild-imm",
2409 caps['unknown_immcap']))
2410 d2.addCallback(lambda ign:
2411 self.failUnlessROChildURIIs(n, u"dirchild-imm",
2413 d2.addCallback(lambda ign:
2414 self.failUnlessROChildURIIs(n, u"dirchild-lit",
2416 d2.addCallback(lambda ign:
2417 self.failUnlessROChildURIIs(n, u"dirchild-empty",
2418 caps['emptydircap']))
2420 d.addCallback(_after_mkdir)
2423 def test_POST_mkdir_no_parentdir_immutable_bad(self):
2424 (newkids, caps) = self._create_initial_children()
2425 d = self.shouldFail2(error.Error,
2426 "test_POST_mkdir_no_parentdir_immutable_bad",
2428 "needed to be immutable but was not",
2430 "/uri?t=mkdir-immutable",
2431 simplejson.dumps(newkids))
2434 def test_welcome_page_mkdir_button(self):
2435 # Fetch the welcome page.
2437 def _after_get_welcome_page(res):
2438 MKDIR_BUTTON_RE = re.compile(
2439 '<form action="([^"]*)" method="post".*?'
2440 '<input type="hidden" name="t" value="([^"]*)" />'
2441 '<input type="hidden" name="([^"]*)" value="([^"]*)" />'
2442 '<input type="submit" value="Create a directory" />',
2444 mo = MKDIR_BUTTON_RE.search(res)
2445 formaction = mo.group(1)
2447 formaname = mo.group(3)
2448 formavalue = mo.group(4)
2449 return (formaction, formt, formaname, formavalue)
2450 d.addCallback(_after_get_welcome_page)
2451 def _after_parse_form(res):
2452 (formaction, formt, formaname, formavalue) = res
2453 return self.POST("/%s?t=%s&%s=%s" % (formaction, formt, formaname, formavalue))
2454 d.addCallback(_after_parse_form)
2455 d.addBoth(self.shouldRedirect, None, statuscode='303')
2458 def test_POST_mkdir_replace(self): # return value?
2459 d = self.POST(self.public_url + "/foo", t="mkdir", name="sub")
2460 d.addCallback(lambda res: self._foo_node.get(u"sub"))
2461 d.addCallback(self.failUnlessNodeKeysAre, [])
2464 def test_POST_mkdir_no_replace_queryarg(self): # return value?
2465 d = self.POST(self.public_url + "/foo?replace=false", t="mkdir", name="sub")
2466 d.addBoth(self.shouldFail, error.Error,
2467 "POST_mkdir_no_replace_queryarg",
2469 "There was already a child by that name, and you asked me "
2470 "to not replace it")
2471 d.addCallback(lambda res: self._foo_node.get(u"sub"))
2472 d.addCallback(self.failUnlessNodeKeysAre, [u"baz.txt"])
2475 def test_POST_mkdir_no_replace_field(self): # return value?
2476 d = self.POST(self.public_url + "/foo", t="mkdir", name="sub",
2478 d.addBoth(self.shouldFail, error.Error, "POST_mkdir_no_replace_field",
2480 "There was already a child by that name, and you asked me "
2481 "to not replace it")
2482 d.addCallback(lambda res: self._foo_node.get(u"sub"))
2483 d.addCallback(self.failUnlessNodeKeysAre, [u"baz.txt"])
2486 def test_POST_mkdir_whendone_field(self):
2487 d = self.POST(self.public_url + "/foo",
2488 t="mkdir", name="newdir", when_done="/THERE")
2489 d.addBoth(self.shouldRedirect, "/THERE")
2490 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
2491 d.addCallback(self.failUnlessNodeKeysAre, [])
2494 def test_POST_mkdir_whendone_queryarg(self):
2495 d = self.POST(self.public_url + "/foo?when_done=/THERE",
2496 t="mkdir", name="newdir")
2497 d.addBoth(self.shouldRedirect, "/THERE")
2498 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
2499 d.addCallback(self.failUnlessNodeKeysAre, [])
2502 def test_POST_bad_t(self):
2503 d = self.shouldFail2(error.Error, "POST_bad_t", "400 Bad Request",
2504 "POST to a directory with bad t=BOGUS",
2505 self.POST, self.public_url + "/foo", t="BOGUS")
2508 def test_POST_set_children(self, command_name="set_children"):
2509 contents9, n9, newuri9 = self.makefile(9)
2510 contents10, n10, newuri10 = self.makefile(10)
2511 contents11, n11, newuri11 = self.makefile(11)
2514 "atomic_added_1": [ "filenode", { "rw_uri": "%s",
2517 "ctime": 1002777696.7564139,
2518 "mtime": 1002777696.7564139
2521 "atomic_added_2": [ "filenode", { "rw_uri": "%s",
2524 "ctime": 1002777696.7564139,
2525 "mtime": 1002777696.7564139
2528 "atomic_added_3": [ "filenode", { "rw_uri": "%s",
2531 "ctime": 1002777696.7564139,
2532 "mtime": 1002777696.7564139
2535 }""" % (newuri9, newuri10, newuri11)
2537 url = self.webish_url + self.public_url + "/foo" + "?t=" + command_name
2539 d = client.getPage(url, method="POST", postdata=reqbody)
2541 self.failUnlessURIMatchesROChild(newuri9, self._foo_node, u"atomic_added_1")
2542 self.failUnlessURIMatchesROChild(newuri10, self._foo_node, u"atomic_added_2")
2543 self.failUnlessURIMatchesROChild(newuri11, self._foo_node, u"atomic_added_3")
2545 d.addCallback(_then)
2546 d.addErrback(self.dump_error)
2549 def test_POST_set_children_with_hyphen(self):
2550 return self.test_POST_set_children(command_name="set-children")
2552 def test_POST_link_uri(self):
2553 contents, n, newuri = self.makefile(8)
2554 d = self.POST(self.public_url + "/foo", t="uri", name="new.txt", uri=newuri)
2555 d.addCallback(self.failUnlessURIMatchesROChild, self._foo_node, u"new.txt")
2556 d.addCallback(lambda res:
2557 self.failUnlessChildContentsAre(self._foo_node, u"new.txt",
2561 def test_POST_link_uri_replace(self):
2562 contents, n, newuri = self.makefile(8)
2563 d = self.POST(self.public_url + "/foo", t="uri", name="bar.txt", uri=newuri)
2564 d.addCallback(self.failUnlessURIMatchesROChild, self._foo_node, u"bar.txt")
2565 d.addCallback(lambda res:
2566 self.failUnlessChildContentsAre(self._foo_node, u"bar.txt",
2570 def test_POST_link_uri_unknown_bad(self):
2571 d = self.POST(self.public_url + "/foo", t="uri", name="future.txt", uri=unknown_rwcap)
2572 d.addBoth(self.shouldFail, error.Error,
2573 "POST_link_uri_unknown_bad",
2575 "unknown cap in a write slot")
2578 def test_POST_link_uri_unknown_ro_good(self):
2579 d = self.POST(self.public_url + "/foo", t="uri", name="future-ro.txt", uri=unknown_rocap)
2580 d.addCallback(self.failUnlessURIMatchesROChild, self._foo_node, u"future-ro.txt")
2583 def test_POST_link_uri_unknown_imm_good(self):
2584 d = self.POST(self.public_url + "/foo", t="uri", name="future-imm.txt", uri=unknown_immcap)
2585 d.addCallback(self.failUnlessURIMatchesROChild, self._foo_node, u"future-imm.txt")
2588 def test_POST_link_uri_no_replace_queryarg(self):
2589 contents, n, newuri = self.makefile(8)
2590 d = self.POST(self.public_url + "/foo?replace=false", t="uri",
2591 name="bar.txt", uri=newuri)
2592 d.addBoth(self.shouldFail, error.Error,
2593 "POST_link_uri_no_replace_queryarg",
2595 "There was already a child by that name, and you asked me "
2596 "to not replace it")
2597 d.addCallback(lambda res: self.GET(self.public_url + "/foo/bar.txt"))
2598 d.addCallback(self.failUnlessIsBarDotTxt)
2601 def test_POST_link_uri_no_replace_field(self):
2602 contents, n, newuri = self.makefile(8)
2603 d = self.POST(self.public_url + "/foo", t="uri", replace="false",
2604 name="bar.txt", uri=newuri)
2605 d.addBoth(self.shouldFail, error.Error,
2606 "POST_link_uri_no_replace_field",
2608 "There was already a child by that name, and you asked me "
2609 "to not replace it")
2610 d.addCallback(lambda res: self.GET(self.public_url + "/foo/bar.txt"))
2611 d.addCallback(self.failUnlessIsBarDotTxt)
2614 def test_POST_delete(self):
2615 d = self.POST(self.public_url + "/foo", t="delete", name="bar.txt")
2616 d.addCallback(lambda res: self._foo_node.list())
2617 def _check(children):
2618 self.failIf(u"bar.txt" in children)
2619 d.addCallback(_check)
2622 def test_POST_rename_file(self):
2623 d = self.POST(self.public_url + "/foo", t="rename",
2624 from_name="bar.txt", to_name='wibble.txt')
2625 d.addCallback(lambda res:
2626 self.failIfNodeHasChild(self._foo_node, u"bar.txt"))
2627 d.addCallback(lambda res:
2628 self.failUnlessNodeHasChild(self._foo_node, u"wibble.txt"))
2629 d.addCallback(lambda res: self.GET(self.public_url + "/foo/wibble.txt"))
2630 d.addCallback(self.failUnlessIsBarDotTxt)
2631 d.addCallback(lambda res: self.GET(self.public_url + "/foo/wibble.txt?t=json"))
2632 d.addCallback(self.failUnlessIsBarJSON)
2635 def test_POST_rename_file_redundant(self):
2636 d = self.POST(self.public_url + "/foo", t="rename",
2637 from_name="bar.txt", to_name='bar.txt')
2638 d.addCallback(lambda res:
2639 self.failUnlessNodeHasChild(self._foo_node, u"bar.txt"))
2640 d.addCallback(lambda res: self.GET(self.public_url + "/foo/bar.txt"))
2641 d.addCallback(self.failUnlessIsBarDotTxt)
2642 d.addCallback(lambda res: self.GET(self.public_url + "/foo/bar.txt?t=json"))
2643 d.addCallback(self.failUnlessIsBarJSON)
2646 def test_POST_rename_file_replace(self):
2647 # rename a file and replace a directory with it
2648 d = self.POST(self.public_url + "/foo", t="rename",
2649 from_name="bar.txt", to_name='empty')
2650 d.addCallback(lambda res:
2651 self.failIfNodeHasChild(self._foo_node, u"bar.txt"))
2652 d.addCallback(lambda res:
2653 self.failUnlessNodeHasChild(self._foo_node, u"empty"))
2654 d.addCallback(lambda res: self.GET(self.public_url + "/foo/empty"))
2655 d.addCallback(self.failUnlessIsBarDotTxt)
2656 d.addCallback(lambda res: self.GET(self.public_url + "/foo/empty?t=json"))
2657 d.addCallback(self.failUnlessIsBarJSON)
2660 def test_POST_rename_file_no_replace_queryarg(self):
2661 # rename a file and replace a directory with it
2662 d = self.POST(self.public_url + "/foo?replace=false", t="rename",
2663 from_name="bar.txt", to_name='empty')
2664 d.addBoth(self.shouldFail, error.Error,
2665 "POST_rename_file_no_replace_queryarg",
2667 "There was already a child by that name, and you asked me "
2668 "to not replace it")
2669 d.addCallback(lambda res: self.GET(self.public_url + "/foo/empty?t=json"))
2670 d.addCallback(self.failUnlessIsEmptyJSON)
2673 def test_POST_rename_file_no_replace_field(self):
2674 # rename a file and replace a directory with it
2675 d = self.POST(self.public_url + "/foo", t="rename", replace="false",
2676 from_name="bar.txt", to_name='empty')
2677 d.addBoth(self.shouldFail, error.Error,
2678 "POST_rename_file_no_replace_field",
2680 "There was already a child by that name, and you asked me "
2681 "to not replace it")
2682 d.addCallback(lambda res: self.GET(self.public_url + "/foo/empty?t=json"))
2683 d.addCallback(self.failUnlessIsEmptyJSON)
2686 def failUnlessIsEmptyJSON(self, res):
2687 data = simplejson.loads(res)
2688 self.failUnlessEqual(data[0], "dirnode", data)
2689 self.failUnlessReallyEqual(len(data[1]["children"]), 0)
2691 def test_POST_rename_file_slash_fail(self):
2692 d = self.POST(self.public_url + "/foo", t="rename",
2693 from_name="bar.txt", to_name='kirk/spock.txt')
2694 d.addBoth(self.shouldFail, error.Error,
2695 "test_POST_rename_file_slash_fail",
2697 "to_name= may not contain a slash",
2699 d.addCallback(lambda res:
2700 self.failUnlessNodeHasChild(self._foo_node, u"bar.txt"))
2703 def test_POST_rename_dir(self):
2704 d = self.POST(self.public_url, t="rename",
2705 from_name="foo", to_name='plunk')
2706 d.addCallback(lambda res:
2707 self.failIfNodeHasChild(self.public_root, u"foo"))
2708 d.addCallback(lambda res:
2709 self.failUnlessNodeHasChild(self.public_root, u"plunk"))
2710 d.addCallback(lambda res: self.GET(self.public_url + "/plunk?t=json"))
2711 d.addCallback(self.failUnlessIsFooJSON)
2714 def shouldRedirect(self, res, target=None, statuscode=None, which=""):
2715 """ If target is not None then the redirection has to go to target. If
2716 statuscode is not None then the redirection has to be accomplished with
2717 that HTTP status code."""
2718 if not isinstance(res, failure.Failure):
2719 to_where = (target is None) and "somewhere" or ("to " + target)
2720 self.fail("%s: we were expecting to get redirected %s, not get an"
2721 " actual page: %s" % (which, to_where, res))
2722 res.trap(error.PageRedirect)
2723 if statuscode is not None:
2724 self.failUnlessReallyEqual(res.value.status, statuscode,
2725 "%s: not a redirect" % which)
2726 if target is not None:
2727 # the PageRedirect does not seem to capture the uri= query arg
2728 # properly, so we can't check for it.
2729 realtarget = self.webish_url + target
2730 self.failUnlessReallyEqual(res.value.location, realtarget,
2731 "%s: wrong target" % which)
2732 return res.value.location
2734 def test_GET_URI_form(self):
2735 base = "/uri?uri=%s" % self._bar_txt_uri
2736 # this is supposed to give us a redirect to /uri/$URI, plus arguments
2737 targetbase = "/uri/%s" % urllib.quote(self._bar_txt_uri)
2739 d.addBoth(self.shouldRedirect, targetbase)
2740 d.addCallback(lambda res: self.GET(base+"&filename=bar.txt"))
2741 d.addBoth(self.shouldRedirect, targetbase+"?filename=bar.txt")
2742 d.addCallback(lambda res: self.GET(base+"&t=json"))
2743 d.addBoth(self.shouldRedirect, targetbase+"?t=json")
2744 d.addCallback(self.log, "about to get file by uri")
2745 d.addCallback(lambda res: self.GET(base, followRedirect=True))
2746 d.addCallback(self.failUnlessIsBarDotTxt)
2747 d.addCallback(self.log, "got file by uri, about to get dir by uri")
2748 d.addCallback(lambda res: self.GET("/uri?uri=%s&t=json" % self._foo_uri,
2749 followRedirect=True))
2750 d.addCallback(self.failUnlessIsFooJSON)
2751 d.addCallback(self.log, "got dir by uri")
2755 def test_GET_URI_form_bad(self):
2756 d = self.shouldFail2(error.Error, "test_GET_URI_form_bad",
2757 "400 Bad Request", "GET /uri requires uri=",
2761 def test_GET_rename_form(self):
2762 d = self.GET(self.public_url + "/foo?t=rename-form&name=bar.txt",
2763 followRedirect=True)
2765 self.failUnless('name="when_done" value="."' in res, res)
2766 self.failUnless(re.search(r'name="from_name" value="bar\.txt"', res))
2767 d.addCallback(_check)
2770 def log(self, res, msg):
2771 #print "MSG: %s RES: %s" % (msg, res)
2775 def test_GET_URI_URL(self):
2776 base = "/uri/%s" % self._bar_txt_uri
2778 d.addCallback(self.failUnlessIsBarDotTxt)
2779 d.addCallback(lambda res: self.GET(base+"?filename=bar.txt"))
2780 d.addCallback(self.failUnlessIsBarDotTxt)
2781 d.addCallback(lambda res: self.GET(base+"?filename=bar.txt&save=true"))
2782 d.addCallback(self.failUnlessIsBarDotTxt)
2785 def test_GET_URI_URL_dir(self):
2786 base = "/uri/%s?t=json" % self._foo_uri
2788 d.addCallback(self.failUnlessIsFooJSON)
2791 def test_GET_URI_URL_missing(self):
2792 base = "/uri/%s" % self._bad_file_uri
2793 d = self.shouldHTTPError("test_GET_URI_URL_missing",
2794 http.GONE, None, "NotEnoughSharesError",
2796 # TODO: how can we exercise both sides of WebDownloadTarget.fail
2797 # here? we must arrange for a download to fail after target.open()
2798 # has been called, and then inspect the response to see that it is
2799 # shorter than we expected.
2802 def test_PUT_DIRURL_uri(self):
2803 d = self.s.create_dirnode()
2805 new_uri = dn.get_uri()
2806 # replace /foo with a new (empty) directory
2807 d = self.PUT(self.public_url + "/foo?t=uri", new_uri)
2808 d.addCallback(lambda res:
2809 self.failUnlessReallyEqual(res.strip(), new_uri))
2810 d.addCallback(lambda res:
2811 self.failUnlessRWChildURIIs(self.public_root,
2815 d.addCallback(_made_dir)
2818 def test_PUT_DIRURL_uri_noreplace(self):
2819 d = self.s.create_dirnode()
2821 new_uri = dn.get_uri()
2822 # replace /foo with a new (empty) directory, but ask that
2823 # replace=false, so it should fail
2824 d = self.shouldFail2(error.Error, "test_PUT_DIRURL_uri_noreplace",
2825 "409 Conflict", "There was already a child by that name, and you asked me to not replace it",
2827 self.public_url + "/foo?t=uri&replace=false",
2829 d.addCallback(lambda res:
2830 self.failUnlessRWChildURIIs(self.public_root,
2834 d.addCallback(_made_dir)
2837 def test_PUT_DIRURL_bad_t(self):
2838 d = self.shouldFail2(error.Error, "test_PUT_DIRURL_bad_t",
2839 "400 Bad Request", "PUT to a directory",
2840 self.PUT, self.public_url + "/foo?t=BOGUS", "")
2841 d.addCallback(lambda res:
2842 self.failUnlessRWChildURIIs(self.public_root,
2847 def test_PUT_NEWFILEURL_uri(self):
2848 contents, n, new_uri = self.makefile(8)
2849 d = self.PUT(self.public_url + "/foo/new.txt?t=uri", new_uri)
2850 d.addCallback(lambda res: self.failUnlessReallyEqual(res.strip(), new_uri))
2851 d.addCallback(lambda res:
2852 self.failUnlessChildContentsAre(self._foo_node, u"new.txt",
2856 def test_PUT_NEWFILEURL_uri_replace(self):
2857 contents, n, new_uri = self.makefile(8)
2858 d = self.PUT(self.public_url + "/foo/bar.txt?t=uri", new_uri)
2859 d.addCallback(lambda res: self.failUnlessReallyEqual(res.strip(), new_uri))
2860 d.addCallback(lambda res:
2861 self.failUnlessChildContentsAre(self._foo_node, u"bar.txt",
2865 def test_PUT_NEWFILEURL_uri_no_replace(self):
2866 contents, n, new_uri = self.makefile(8)
2867 d = self.PUT(self.public_url + "/foo/bar.txt?t=uri&replace=false", new_uri)
2868 d.addBoth(self.shouldFail, error.Error, "PUT_NEWFILEURL_uri_no_replace",
2870 "There was already a child by that name, and you asked me "
2871 "to not replace it")
2874 def test_PUT_NEWFILEURL_uri_unknown_bad(self):
2875 d = self.PUT(self.public_url + "/foo/put-future.txt?t=uri", unknown_rwcap)
2876 d.addBoth(self.shouldFail, error.Error,
2877 "POST_put_uri_unknown_bad",
2879 "unknown cap in a write slot")
2882 def test_PUT_NEWFILEURL_uri_unknown_ro_good(self):
2883 d = self.PUT(self.public_url + "/foo/put-future-ro.txt?t=uri", unknown_rocap)
2884 d.addCallback(self.failUnlessURIMatchesROChild, self._foo_node,
2885 u"put-future-ro.txt")
2888 def test_PUT_NEWFILEURL_uri_unknown_imm_good(self):
2889 d = self.PUT(self.public_url + "/foo/put-future-imm.txt?t=uri", unknown_immcap)
2890 d.addCallback(self.failUnlessURIMatchesROChild, self._foo_node,
2891 u"put-future-imm.txt")
2894 def test_PUT_NEWFILE_URI(self):
2895 file_contents = "New file contents here\n"
2896 d = self.PUT("/uri", file_contents)
2898 assert isinstance(uri, str), uri
2899 self.failUnless(uri in FakeCHKFileNode.all_contents)
2900 self.failUnlessReallyEqual(FakeCHKFileNode.all_contents[uri],
2902 return self.GET("/uri/%s" % uri)
2903 d.addCallback(_check)
2905 self.failUnlessReallyEqual(res, file_contents)
2906 d.addCallback(_check2)
2909 def test_PUT_NEWFILE_URI_not_mutable(self):
2910 file_contents = "New file contents here\n"
2911 d = self.PUT("/uri?mutable=false", file_contents)
2913 assert isinstance(uri, str), uri
2914 self.failUnless(uri in FakeCHKFileNode.all_contents)
2915 self.failUnlessReallyEqual(FakeCHKFileNode.all_contents[uri],
2917 return self.GET("/uri/%s" % uri)
2918 d.addCallback(_check)
2920 self.failUnlessReallyEqual(res, file_contents)
2921 d.addCallback(_check2)
2924 def test_PUT_NEWFILE_URI_only_PUT(self):
2925 d = self.PUT("/uri?t=bogus", "")
2926 d.addBoth(self.shouldFail, error.Error,
2927 "PUT_NEWFILE_URI_only_PUT",
2929 "/uri accepts only PUT, PUT?t=mkdir, POST?t=upload, and POST?t=mkdir")
2932 def test_PUT_NEWFILE_URI_mutable(self):
2933 file_contents = "New file contents here\n"
2934 d = self.PUT("/uri?mutable=true", file_contents)
2935 def _check1(filecap):
2936 filecap = filecap.strip()
2937 self.failUnless(filecap.startswith("URI:SSK:"), filecap)
2938 self.filecap = filecap
2939 u = uri.WriteableSSKFileURI.init_from_string(filecap)
2940 self.failUnless(u.get_storage_index() in FakeMutableFileNode.all_contents)
2941 n = self.s.create_node_from_uri(filecap)
2942 return n.download_best_version()
2943 d.addCallback(_check1)
2945 self.failUnlessReallyEqual(data, file_contents)
2946 return self.GET("/uri/%s" % urllib.quote(self.filecap))
2947 d.addCallback(_check2)
2949 self.failUnlessReallyEqual(res, file_contents)
2950 d.addCallback(_check3)
2953 def test_PUT_mkdir(self):
2954 d = self.PUT("/uri?t=mkdir", "")
2956 n = self.s.create_node_from_uri(uri.strip())
2957 d2 = self.failUnlessNodeKeysAre(n, [])
2958 d2.addCallback(lambda res:
2959 self.GET("/uri/%s?t=json" % uri))
2961 d.addCallback(_check)
2962 d.addCallback(self.failUnlessIsEmptyJSON)
2965 def test_POST_check(self):
2966 d = self.POST(self.public_url + "/foo", t="check", name="bar.txt")
2968 # this returns a string form of the results, which are probably
2969 # None since we're using fake filenodes.
2970 # TODO: verify that the check actually happened, by changing
2971 # FakeCHKFileNode to count how many times .check() has been
2974 d.addCallback(_done)
2977 def test_bad_method(self):
2978 url = self.webish_url + self.public_url + "/foo/bar.txt"
2979 d = self.shouldHTTPError("test_bad_method",
2980 501, "Not Implemented",
2981 "I don't know how to treat a BOGUS request.",
2982 client.getPage, url, method="BOGUS")
2985 def test_short_url(self):
2986 url = self.webish_url + "/uri"
2987 d = self.shouldHTTPError("test_short_url", 501, "Not Implemented",
2988 "I don't know how to treat a DELETE request.",
2989 client.getPage, url, method="DELETE")
2992 def test_ophandle_bad(self):
2993 url = self.webish_url + "/operations/bogus?t=status"
2994 d = self.shouldHTTPError("test_ophandle_bad", 404, "404 Not Found",
2995 "unknown/expired handle 'bogus'",
2996 client.getPage, url)
2999 def test_ophandle_cancel(self):
3000 d = self.POST(self.public_url + "/foo/?t=start-manifest&ophandle=128",
3001 followRedirect=True)
3002 d.addCallback(lambda ignored:
3003 self.GET("/operations/128?t=status&output=JSON"))
3005 data = simplejson.loads(res)
3006 self.failUnless("finished" in data, res)
3007 monitor = self.ws.root.child_operations.handles["128"][0]
3008 d = self.POST("/operations/128?t=cancel&output=JSON")
3010 data = simplejson.loads(res)
3011 self.failUnless("finished" in data, res)
3012 # t=cancel causes the handle to be forgotten
3013 self.failUnless(monitor.is_cancelled())
3014 d.addCallback(_check2)
3016 d.addCallback(_check1)
3017 d.addCallback(lambda ignored:
3018 self.shouldHTTPError("test_ophandle_cancel",
3019 404, "404 Not Found",
3020 "unknown/expired handle '128'",
3022 "/operations/128?t=status&output=JSON"))
3025 def test_ophandle_retainfor(self):
3026 d = self.POST(self.public_url + "/foo/?t=start-manifest&ophandle=129&retain-for=60",
3027 followRedirect=True)
3028 d.addCallback(lambda ignored:
3029 self.GET("/operations/129?t=status&output=JSON&retain-for=0"))
3031 data = simplejson.loads(res)
3032 self.failUnless("finished" in data, res)
3033 d.addCallback(_check1)
3034 # the retain-for=0 will cause the handle to be expired very soon
3035 d.addCallback(lambda ign:
3036 self.clock.advance(2.0))
3037 d.addCallback(lambda ignored:
3038 self.shouldHTTPError("test_ophandle_retainfor",
3039 404, "404 Not Found",
3040 "unknown/expired handle '129'",
3042 "/operations/129?t=status&output=JSON"))
3045 def test_ophandle_release_after_complete(self):
3046 d = self.POST(self.public_url + "/foo/?t=start-manifest&ophandle=130",
3047 followRedirect=True)
3048 d.addCallback(self.wait_for_operation, "130")
3049 d.addCallback(lambda ignored:
3050 self.GET("/operations/130?t=status&output=JSON&release-after-complete=true"))
3051 # the release-after-complete=true will cause the handle to be expired
3052 d.addCallback(lambda ignored:
3053 self.shouldHTTPError("test_ophandle_release_after_complete",
3054 404, "404 Not Found",
3055 "unknown/expired handle '130'",
3057 "/operations/130?t=status&output=JSON"))
3060 def test_uncollected_ophandle_expiration(self):
3061 # uncollected ophandles should expire after 4 days
3062 def _make_uncollected_ophandle(ophandle):
3063 d = self.POST(self.public_url +
3064 "/foo/?t=start-manifest&ophandle=%d" % ophandle,
3065 followRedirect=False)
3066 # When we start the operation, the webapi server will want
3067 # to redirect us to the page for the ophandle, so we get
3068 # confirmation that the operation has started. If the
3069 # manifest operation has finished by the time we get there,
3070 # following that redirect (by setting followRedirect=True
3071 # above) has the side effect of collecting the ophandle that
3072 # we've just created, which means that we can't use the
3073 # ophandle to test the uncollected timeout anymore. So,
3074 # instead, catch the 302 here and don't follow it.
3075 d.addBoth(self.should302, "uncollected_ophandle_creation")
3077 # Create an ophandle, don't collect it, then advance the clock by
3078 # 4 days - 1 second and make sure that the ophandle is still there.
3079 d = _make_uncollected_ophandle(131)
3080 d.addCallback(lambda ign:
3081 self.clock.advance((96*60*60) - 1)) # 96 hours = 4 days
3082 d.addCallback(lambda ign:
3083 self.GET("/operations/131?t=status&output=JSON"))
3085 data = simplejson.loads(res)
3086 self.failUnless("finished" in data, res)
3087 d.addCallback(_check1)
3088 # Create an ophandle, don't collect it, then try to collect it
3089 # after 4 days. It should be gone.
3090 d.addCallback(lambda ign:
3091 _make_uncollected_ophandle(132))
3092 d.addCallback(lambda ign:
3093 self.clock.advance(96*60*60))
3094 d.addCallback(lambda ign:
3095 self.shouldHTTPError("test_uncollected_ophandle_expired_after_100_hours",
3096 404, "404 Not Found",
3097 "unknown/expired handle '132'",
3099 "/operations/132?t=status&output=JSON"))
3102 def test_collected_ophandle_expiration(self):
3103 # collected ophandles should expire after 1 day
3104 def _make_collected_ophandle(ophandle):
3105 d = self.POST(self.public_url +
3106 "/foo/?t=start-manifest&ophandle=%d" % ophandle,
3107 followRedirect=True)
3108 # By following the initial redirect, we collect the ophandle
3109 # we've just created.
3111 # Create a collected ophandle, then collect it after 23 hours
3112 # and 59 seconds to make sure that it is still there.
3113 d = _make_collected_ophandle(133)
3114 d.addCallback(lambda ign:
3115 self.clock.advance((24*60*60) - 1))
3116 d.addCallback(lambda ign:
3117 self.GET("/operations/133?t=status&output=JSON"))
3119 data = simplejson.loads(res)
3120 self.failUnless("finished" in data, res)
3121 d.addCallback(_check1)
3122 # Create another uncollected ophandle, then try to collect it
3123 # after 24 hours to make sure that it is gone.
3124 d.addCallback(lambda ign:
3125 _make_collected_ophandle(134))
3126 d.addCallback(lambda ign:
3127 self.clock.advance(24*60*60))
3128 d.addCallback(lambda ign:
3129 self.shouldHTTPError("test_collected_ophandle_expired_after_1000_minutes",
3130 404, "404 Not Found",
3131 "unknown/expired handle '134'",
3133 "/operations/134?t=status&output=JSON"))
3136 def test_incident(self):
3137 d = self.POST("/report_incident", details="eek")
3139 self.failUnless("Thank you for your report!" in res, res)
3140 d.addCallback(_done)
3143 def test_static(self):
3144 webdir = os.path.join(self.staticdir, "subdir")
3145 fileutil.make_dirs(webdir)
3146 f = open(os.path.join(webdir, "hello.txt"), "wb")
3150 d = self.GET("/static/subdir/hello.txt")
3152 self.failUnlessReallyEqual(res, "hello")
3153 d.addCallback(_check)
3157 class Util(ShouldFailMixin, testutil.ReallyEqualMixin, unittest.TestCase):
3158 def test_load_file(self):
3159 # This will raise an exception unless a well-formed XML file is found under that name.
3160 common.getxmlfile('directory.xhtml').load()
3162 def test_parse_replace_arg(self):
3163 self.failUnlessReallyEqual(common.parse_replace_arg("true"), True)
3164 self.failUnlessReallyEqual(common.parse_replace_arg("false"), False)
3165 self.failUnlessReallyEqual(common.parse_replace_arg("only-files"),
3167 self.shouldFail(AssertionError, "test_parse_replace_arg", "",
3168 common.parse_replace_arg, "only_fles")
3170 def test_abbreviate_time(self):
3171 self.failUnlessReallyEqual(common.abbreviate_time(None), "")
3172 self.failUnlessReallyEqual(common.abbreviate_time(1.234), "1.23s")
3173 self.failUnlessReallyEqual(common.abbreviate_time(0.123), "123ms")
3174 self.failUnlessReallyEqual(common.abbreviate_time(0.00123), "1.2ms")
3175 self.failUnlessReallyEqual(common.abbreviate_time(0.000123), "123us")
3176 self.failUnlessReallyEqual(common.abbreviate_time(-123000), "-123000000000us")
3178 def test_compute_rate(self):
3179 self.failUnlessReallyEqual(common.compute_rate(None, None), None)
3180 self.failUnlessReallyEqual(common.compute_rate(None, 1), None)
3181 self.failUnlessReallyEqual(common.compute_rate(250000, None), None)
3182 self.failUnlessReallyEqual(common.compute_rate(250000, 0), None)
3183 self.failUnlessReallyEqual(common.compute_rate(250000, 10), 25000.0)
3184 self.failUnlessReallyEqual(common.compute_rate(0, 10), 0.0)
3185 self.shouldFail(AssertionError, "test_compute_rate", "",
3186 common.compute_rate, -100, 10)
3187 self.shouldFail(AssertionError, "test_compute_rate", "",
3188 common.compute_rate, 100, -10)
3191 rate = common.compute_rate(10*1000*1000, 1)
3192 self.failUnlessReallyEqual(common.abbreviate_rate(rate), "10.00MBps")
3194 def test_abbreviate_rate(self):
3195 self.failUnlessReallyEqual(common.abbreviate_rate(None), "")
3196 self.failUnlessReallyEqual(common.abbreviate_rate(1234000), "1.23MBps")
3197 self.failUnlessReallyEqual(common.abbreviate_rate(12340), "12.3kBps")
3198 self.failUnlessReallyEqual(common.abbreviate_rate(123), "123Bps")
3200 def test_abbreviate_size(self):
3201 self.failUnlessReallyEqual(common.abbreviate_size(None), "")
3202 self.failUnlessReallyEqual(common.abbreviate_size(1.23*1000*1000*1000), "1.23GB")
3203 self.failUnlessReallyEqual(common.abbreviate_size(1.23*1000*1000), "1.23MB")
3204 self.failUnlessReallyEqual(common.abbreviate_size(1230), "1.2kB")
3205 self.failUnlessReallyEqual(common.abbreviate_size(123), "123B")
3207 def test_plural(self):
3209 return "%d second%s" % (s, status.plural(s))
3210 self.failUnlessReallyEqual(convert(0), "0 seconds")
3211 self.failUnlessReallyEqual(convert(1), "1 second")
3212 self.failUnlessReallyEqual(convert(2), "2 seconds")
3214 return "has share%s: %s" % (status.plural(s), ",".join(s))
3215 self.failUnlessReallyEqual(convert2([]), "has shares: ")
3216 self.failUnlessReallyEqual(convert2(["1"]), "has share: 1")
3217 self.failUnlessReallyEqual(convert2(["1","2"]), "has shares: 1,2")
3220 class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMixin, unittest.TestCase):
3222 def CHECK(self, ign, which, args, clientnum=0):
3223 fileurl = self.fileurls[which]
3224 url = fileurl + "?" + args
3225 return self.GET(url, method="POST", clientnum=clientnum)
3227 def test_filecheck(self):
3228 self.basedir = "web/Grid/filecheck"
3230 c0 = self.g.clients[0]
3233 d = c0.upload(upload.Data(DATA, convergence=""))
3234 def _stash_uri(ur, which):
3235 self.uris[which] = ur.uri
3236 d.addCallback(_stash_uri, "good")
3237 d.addCallback(lambda ign:
3238 c0.upload(upload.Data(DATA+"1", convergence="")))
3239 d.addCallback(_stash_uri, "sick")
3240 d.addCallback(lambda ign:
3241 c0.upload(upload.Data(DATA+"2", convergence="")))
3242 d.addCallback(_stash_uri, "dead")
3243 def _stash_mutable_uri(n, which):
3244 self.uris[which] = n.get_uri()
3245 assert isinstance(self.uris[which], str)
3246 d.addCallback(lambda ign: c0.create_mutable_file(DATA+"3"))
3247 d.addCallback(_stash_mutable_uri, "corrupt")
3248 d.addCallback(lambda ign:
3249 c0.upload(upload.Data("literal", convergence="")))
3250 d.addCallback(_stash_uri, "small")
3251 d.addCallback(lambda ign: c0.create_immutable_dirnode({}))
3252 d.addCallback(_stash_mutable_uri, "smalldir")
3254 def _compute_fileurls(ignored):
3256 for which in self.uris:
3257 self.fileurls[which] = "uri/" + urllib.quote(self.uris[which])
3258 d.addCallback(_compute_fileurls)
3260 def _clobber_shares(ignored):
3261 good_shares = self.find_uri_shares(self.uris["good"])
3262 self.failUnlessReallyEqual(len(good_shares), 10)
3263 sick_shares = self.find_uri_shares(self.uris["sick"])
3264 os.unlink(sick_shares[0][2])
3265 dead_shares = self.find_uri_shares(self.uris["dead"])
3266 for i in range(1, 10):
3267 os.unlink(dead_shares[i][2])
3268 c_shares = self.find_uri_shares(self.uris["corrupt"])
3269 cso = CorruptShareOptions()
3270 cso.stdout = StringIO()
3271 cso.parseOptions([c_shares[0][2]])
3273 d.addCallback(_clobber_shares)
3275 d.addCallback(self.CHECK, "good", "t=check")
3276 def _got_html_good(res):
3277 self.failUnless("Healthy" in res, res)
3278 self.failIf("Not Healthy" in res, res)
3279 d.addCallback(_got_html_good)
3280 d.addCallback(self.CHECK, "good", "t=check&return_to=somewhere")
3281 def _got_html_good_return_to(res):
3282 self.failUnless("Healthy" in res, res)
3283 self.failIf("Not Healthy" in res, res)
3284 self.failUnless('<a href="somewhere">Return to file'
3286 d.addCallback(_got_html_good_return_to)
3287 d.addCallback(self.CHECK, "good", "t=check&output=json")
3288 def _got_json_good(res):
3289 r = simplejson.loads(res)
3290 self.failUnlessEqual(r["summary"], "Healthy")
3291 self.failUnless(r["results"]["healthy"])
3292 self.failIf(r["results"]["needs-rebalancing"])
3293 self.failUnless(r["results"]["recoverable"])
3294 d.addCallback(_got_json_good)
3296 d.addCallback(self.CHECK, "small", "t=check")
3297 def _got_html_small(res):
3298 self.failUnless("Literal files are always healthy" in res, res)
3299 self.failIf("Not Healthy" in res, res)
3300 d.addCallback(_got_html_small)
3301 d.addCallback(self.CHECK, "small", "t=check&return_to=somewhere")
3302 def _got_html_small_return_to(res):
3303 self.failUnless("Literal files are always healthy" in res, res)
3304 self.failIf("Not Healthy" in res, res)
3305 self.failUnless('<a href="somewhere">Return to file'
3307 d.addCallback(_got_html_small_return_to)
3308 d.addCallback(self.CHECK, "small", "t=check&output=json")
3309 def _got_json_small(res):
3310 r = simplejson.loads(res)
3311 self.failUnlessEqual(r["storage-index"], "")
3312 self.failUnless(r["results"]["healthy"])
3313 d.addCallback(_got_json_small)
3315 d.addCallback(self.CHECK, "smalldir", "t=check")
3316 def _got_html_smalldir(res):
3317 self.failUnless("Literal files are always healthy" in res, res)
3318 self.failIf("Not Healthy" in res, res)
3319 d.addCallback(_got_html_smalldir)
3320 d.addCallback(self.CHECK, "smalldir", "t=check&output=json")
3321 def _got_json_smalldir(res):
3322 r = simplejson.loads(res)
3323 self.failUnlessEqual(r["storage-index"], "")
3324 self.failUnless(r["results"]["healthy"])
3325 d.addCallback(_got_json_smalldir)
3327 d.addCallback(self.CHECK, "sick", "t=check")
3328 def _got_html_sick(res):
3329 self.failUnless("Not Healthy" in res, res)
3330 d.addCallback(_got_html_sick)
3331 d.addCallback(self.CHECK, "sick", "t=check&output=json")
3332 def _got_json_sick(res):
3333 r = simplejson.loads(res)
3334 self.failUnlessEqual(r["summary"],
3335 "Not Healthy: 9 shares (enc 3-of-10)")
3336 self.failIf(r["results"]["healthy"])
3337 self.failIf(r["results"]["needs-rebalancing"])
3338 self.failUnless(r["results"]["recoverable"])
3339 d.addCallback(_got_json_sick)
3341 d.addCallback(self.CHECK, "dead", "t=check")
3342 def _got_html_dead(res):
3343 self.failUnless("Not Healthy" in res, res)
3344 d.addCallback(_got_html_dead)
3345 d.addCallback(self.CHECK, "dead", "t=check&output=json")
3346 def _got_json_dead(res):
3347 r = simplejson.loads(res)
3348 self.failUnlessEqual(r["summary"],
3349 "Not Healthy: 1 shares (enc 3-of-10)")
3350 self.failIf(r["results"]["healthy"])
3351 self.failIf(r["results"]["needs-rebalancing"])
3352 self.failIf(r["results"]["recoverable"])
3353 d.addCallback(_got_json_dead)
3355 d.addCallback(self.CHECK, "corrupt", "t=check&verify=true")
3356 def _got_html_corrupt(res):
3357 self.failUnless("Not Healthy! : Unhealthy" in res, res)
3358 d.addCallback(_got_html_corrupt)
3359 d.addCallback(self.CHECK, "corrupt", "t=check&verify=true&output=json")
3360 def _got_json_corrupt(res):
3361 r = simplejson.loads(res)
3362 self.failUnless("Unhealthy: 9 shares (enc 3-of-10)" in r["summary"],
3364 self.failIf(r["results"]["healthy"])
3365 self.failUnless(r["results"]["recoverable"])
3366 self.failUnlessReallyEqual(r["results"]["count-shares-good"], 9)
3367 self.failUnlessReallyEqual(r["results"]["count-corrupt-shares"], 1)
3368 d.addCallback(_got_json_corrupt)
3370 d.addErrback(self.explain_web_error)
3373 def test_repair_html(self):
3374 self.basedir = "web/Grid/repair_html"
3376 c0 = self.g.clients[0]
3379 d = c0.upload(upload.Data(DATA, convergence=""))
3380 def _stash_uri(ur, which):
3381 self.uris[which] = ur.uri
3382 d.addCallback(_stash_uri, "good")
3383 d.addCallback(lambda ign:
3384 c0.upload(upload.Data(DATA+"1", convergence="")))
3385 d.addCallback(_stash_uri, "sick")
3386 d.addCallback(lambda ign:
3387 c0.upload(upload.Data(DATA+"2", convergence="")))
3388 d.addCallback(_stash_uri, "dead")
3389 def _stash_mutable_uri(n, which):
3390 self.uris[which] = n.get_uri()
3391 assert isinstance(self.uris[which], str)
3392 d.addCallback(lambda ign: c0.create_mutable_file(DATA+"3"))
3393 d.addCallback(_stash_mutable_uri, "corrupt")
3395 def _compute_fileurls(ignored):
3397 for which in self.uris:
3398 self.fileurls[which] = "uri/" + urllib.quote(self.uris[which])
3399 d.addCallback(_compute_fileurls)
3401 def _clobber_shares(ignored):
3402 good_shares = self.find_uri_shares(self.uris["good"])
3403 self.failUnlessReallyEqual(len(good_shares), 10)
3404 sick_shares = self.find_uri_shares(self.uris["sick"])
3405 os.unlink(sick_shares[0][2])
3406 dead_shares = self.find_uri_shares(self.uris["dead"])
3407 for i in range(1, 10):
3408 os.unlink(dead_shares[i][2])
3409 c_shares = self.find_uri_shares(self.uris["corrupt"])
3410 cso = CorruptShareOptions()
3411 cso.stdout = StringIO()
3412 cso.parseOptions([c_shares[0][2]])
3414 d.addCallback(_clobber_shares)
3416 d.addCallback(self.CHECK, "good", "t=check&repair=true")
3417 def _got_html_good(res):
3418 self.failUnless("Healthy" in res, res)
3419 self.failIf("Not Healthy" in res, res)
3420 self.failUnless("No repair necessary" in res, res)
3421 d.addCallback(_got_html_good)
3423 d.addCallback(self.CHECK, "sick", "t=check&repair=true")
3424 def _got_html_sick(res):
3425 self.failUnless("Healthy : healthy" in res, res)
3426 self.failIf("Not Healthy" in res, res)
3427 self.failUnless("Repair successful" in res, res)
3428 d.addCallback(_got_html_sick)
3430 # repair of a dead file will fail, of course, but it isn't yet
3431 # clear how this should be reported. Right now it shows up as
3434 #d.addCallback(self.CHECK, "dead", "t=check&repair=true")
3435 #def _got_html_dead(res):
3437 # self.failUnless("Healthy : healthy" in res, res)
3438 # self.failIf("Not Healthy" in res, res)
3439 # self.failUnless("No repair necessary" in res, res)
3440 #d.addCallback(_got_html_dead)
3442 d.addCallback(self.CHECK, "corrupt", "t=check&verify=true&repair=true")
3443 def _got_html_corrupt(res):
3444 self.failUnless("Healthy : Healthy" in res, res)
3445 self.failIf("Not Healthy" in res, res)
3446 self.failUnless("Repair successful" in res, res)
3447 d.addCallback(_got_html_corrupt)
3449 d.addErrback(self.explain_web_error)
3452 def test_repair_json(self):
3453 self.basedir = "web/Grid/repair_json"
3455 c0 = self.g.clients[0]
3458 d = c0.upload(upload.Data(DATA+"1", convergence=""))
3459 def _stash_uri(ur, which):
3460 self.uris[which] = ur.uri
3461 d.addCallback(_stash_uri, "sick")
3463 def _compute_fileurls(ignored):
3465 for which in self.uris:
3466 self.fileurls[which] = "uri/" + urllib.quote(self.uris[which])
3467 d.addCallback(_compute_fileurls)
3469 def _clobber_shares(ignored):
3470 sick_shares = self.find_uri_shares(self.uris["sick"])
3471 os.unlink(sick_shares[0][2])
3472 d.addCallback(_clobber_shares)
3474 d.addCallback(self.CHECK, "sick", "t=check&repair=true&output=json")
3475 def _got_json_sick(res):
3476 r = simplejson.loads(res)
3477 self.failUnlessReallyEqual(r["repair-attempted"], True)
3478 self.failUnlessReallyEqual(r["repair-successful"], True)
3479 self.failUnlessEqual(r["pre-repair-results"]["summary"],
3480 "Not Healthy: 9 shares (enc 3-of-10)")
3481 self.failIf(r["pre-repair-results"]["results"]["healthy"])
3482 self.failUnlessEqual(r["post-repair-results"]["summary"], "healthy")
3483 self.failUnless(r["post-repair-results"]["results"]["healthy"])
3484 d.addCallback(_got_json_sick)
3486 d.addErrback(self.explain_web_error)
3489 def test_unknown(self, immutable=False):
3490 self.basedir = "web/Grid/unknown"
3492 self.basedir = "web/Grid/unknown-immutable"
3495 c0 = self.g.clients[0]
3499 # the future cap format may contain slashes, which must be tolerated
3500 expected_info_url = "uri/%s?t=info" % urllib.quote(unknown_rwcap,
3504 name = u"future-imm"
3505 future_node = UnknownNode(None, unknown_immcap, deep_immutable=True)
3506 d = c0.create_immutable_dirnode({name: (future_node, {})})
3509 future_node = UnknownNode(unknown_rwcap, unknown_rocap)
3510 d = c0.create_dirnode()
3512 def _stash_root_and_create_file(n):
3514 self.rooturl = "uri/" + urllib.quote(n.get_uri()) + "/"
3515 self.rourl = "uri/" + urllib.quote(n.get_readonly_uri()) + "/"
3517 return self.rootnode.set_node(name, future_node)
3518 d.addCallback(_stash_root_and_create_file)
3520 # make sure directory listing tolerates unknown nodes
3521 d.addCallback(lambda ign: self.GET(self.rooturl))
3522 def _check_directory_html(res, expected_type_suffix):
3523 pattern = re.compile(r'<td>\?%s</td>[ \t\n\r]*'
3524 '<td>%s</td>' % (expected_type_suffix, str(name)),
3526 self.failUnless(re.search(pattern, res), res)
3527 # find the More Info link for name, should be relative
3528 mo = re.search(r'<a href="([^"]+)">More Info</a>', res)
3529 info_url = mo.group(1)
3530 self.failUnlessReallyEqual(info_url, "%s?t=info" % (str(name),))
3532 d.addCallback(_check_directory_html, "-IMM")
3534 d.addCallback(_check_directory_html, "")
3536 d.addCallback(lambda ign: self.GET(self.rooturl+"?t=json"))
3537 def _check_directory_json(res, expect_rw_uri):
3538 data = simplejson.loads(res)
3539 self.failUnlessEqual(data[0], "dirnode")
3540 f = data[1]["children"][name]
3541 self.failUnlessEqual(f[0], "unknown")
3543 self.failUnlessReallyEqual(to_str(f[1]["rw_uri"]), unknown_rwcap, data)
3545 self.failIfIn("rw_uri", f[1])
3547 self.failUnlessReallyEqual(to_str(f[1]["ro_uri"]), unknown_immcap, data)
3549 self.failUnlessReallyEqual(to_str(f[1]["ro_uri"]), unknown_rocap, data)
3550 self.failUnless("metadata" in f[1])
3551 d.addCallback(_check_directory_json, expect_rw_uri=not immutable)
3553 def _check_info(res, expect_rw_uri, expect_ro_uri):
3554 self.failUnlessIn("Object Type: <span>unknown</span>", res)
3556 self.failUnlessIn(unknown_rwcap, res)
3559 self.failUnlessIn(unknown_immcap, res)
3561 self.failUnlessIn(unknown_rocap, res)
3563 self.failIfIn(unknown_rocap, res)
3564 self.failIfIn("Raw data as", res)
3565 self.failIfIn("Directory writecap", res)
3566 self.failIfIn("Checker Operations", res)
3567 self.failIfIn("Mutable File Operations", res)
3568 self.failIfIn("Directory Operations", res)
3570 # FIXME: these should have expect_rw_uri=not immutable; I don't know
3571 # why they fail. Possibly related to ticket #922.
3573 d.addCallback(lambda ign: self.GET(expected_info_url))
3574 d.addCallback(_check_info, expect_rw_uri=False, expect_ro_uri=False)
3575 d.addCallback(lambda ign: self.GET("%s%s?t=info" % (self.rooturl, str(name))))
3576 d.addCallback(_check_info, expect_rw_uri=False, expect_ro_uri=True)
3578 def _check_json(res, expect_rw_uri):
3579 data = simplejson.loads(res)
3580 self.failUnlessEqual(data[0], "unknown")
3582 self.failUnlessReallyEqual(to_str(data[1]["rw_uri"]), unknown_rwcap, data)
3584 self.failIfIn("rw_uri", data[1])
3587 self.failUnlessReallyEqual(to_str(data[1]["ro_uri"]), unknown_immcap, data)
3588 self.failUnlessReallyEqual(data[1]["mutable"], False)
3590 self.failUnlessReallyEqual(to_str(data[1]["ro_uri"]), unknown_rocap, data)
3591 self.failUnlessReallyEqual(data[1]["mutable"], True)
3593 self.failUnlessReallyEqual(to_str(data[1]["ro_uri"]), unknown_rocap, data)
3594 self.failIf("mutable" in data[1], data[1])
3596 # TODO: check metadata contents
3597 self.failUnless("metadata" in data[1])
3599 d.addCallback(lambda ign: self.GET("%s%s?t=json" % (self.rooturl, str(name))))
3600 d.addCallback(_check_json, expect_rw_uri=not immutable)
3602 # and make sure that a read-only version of the directory can be
3603 # rendered too. This version will not have unknown_rwcap, whether
3604 # or not future_node was immutable.
3605 d.addCallback(lambda ign: self.GET(self.rourl))
3607 d.addCallback(_check_directory_html, "-IMM")
3609 d.addCallback(_check_directory_html, "-RO")
3611 d.addCallback(lambda ign: self.GET(self.rourl+"?t=json"))
3612 d.addCallback(_check_directory_json, expect_rw_uri=False)
3614 d.addCallback(lambda ign: self.GET("%s%s?t=json" % (self.rourl, str(name))))
3615 d.addCallback(_check_json, expect_rw_uri=False)
3617 # TODO: check that getting t=info from the Info link in the ro directory
3618 # works, and does not include the writecap URI.
3621 def test_immutable_unknown(self):
3622 return self.test_unknown(immutable=True)
3624 def test_mutant_dirnodes_are_omitted(self):
3625 self.basedir = "web/Grid/mutant_dirnodes_are_omitted"
3628 c = self.g.clients[0]
3633 lonely_uri = "URI:LIT:n5xgk" # LIT for "one"
3634 mut_write_uri = "URI:SSK:vfvcbdfbszyrsaxchgevhmmlii:euw4iw7bbnkrrwpzuburbhppuxhc3gwxv26f6imekhz7zyw2ojnq"
3635 mut_read_uri = "URI:SSK-RO:e3mdrzfwhoq42hy5ubcz6rp3o4:ybyibhnp3vvwuq2vaw2ckjmesgkklfs6ghxleztqidihjyofgw7q"
3637 # This method tests mainly dirnode, but we'd have to duplicate code in order to
3638 # test the dirnode and web layers separately.
3640 # 'lonely' is a valid LIT child, 'ro' is a mutant child with an SSK-RO readcap,
3641 # and 'write-in-ro' is a mutant child with an SSK writecap in the ro_uri field.
3642 # When the directory is read, the mutants should be silently disposed of, leaving
3643 # their lonely sibling.
3644 # We don't test the case of a retrieving a cap from the encrypted rw_uri field,
3645 # because immutable directories don't have a writecap and therefore that field
3646 # isn't (and can't be) decrypted.
3647 # TODO: The field still exists in the netstring. Technically we should check what
3648 # happens if something is put there (_unpack_contents should raise ValueError),
3649 # but that can wait.
3651 lonely_child = nm.create_from_cap(lonely_uri)
3652 mutant_ro_child = nm.create_from_cap(mut_read_uri)
3653 mutant_write_in_ro_child = nm.create_from_cap(mut_write_uri)
3655 def _by_hook_or_by_crook():
3657 for n in [mutant_ro_child, mutant_write_in_ro_child]:
3658 n.is_allowed_in_immutable_directory = _by_hook_or_by_crook
3660 mutant_write_in_ro_child.get_write_uri = lambda: None
3661 mutant_write_in_ro_child.get_readonly_uri = lambda: mut_write_uri
3663 kids = {u"lonely": (lonely_child, {}),
3664 u"ro": (mutant_ro_child, {}),
3665 u"write-in-ro": (mutant_write_in_ro_child, {}),
3667 d = c.create_immutable_dirnode(kids)
3670 self.failUnless(isinstance(dn, dirnode.DirectoryNode))
3671 self.failIf(dn.is_mutable())
3672 self.failUnless(dn.is_readonly())
3673 # This checks that if we somehow ended up calling dn._decrypt_rwcapdata, it would fail.
3674 self.failIf(hasattr(dn._node, 'get_writekey'))
3676 self.failUnless("RO-IMM" in rep)
3678 self.failUnlessIn("CHK", cap.to_string())
3681 self.rooturl = "uri/" + urllib.quote(dn.get_uri()) + "/"
3682 return download_to_data(dn._node)
3683 d.addCallback(_created)
3685 def _check_data(data):
3686 # Decode the netstring representation of the directory to check that all children
3687 # are present. This is a bit of an abstraction violation, but there's not really
3688 # any other way to do it given that the real DirectoryNode._unpack_contents would
3689 # strip the mutant children out (which is what we're trying to test, later).
3692 while position < len(data):
3693 entries, position = split_netstring(data, 1, position)
3695 (name_utf8, ro_uri, rwcapdata, metadata_s), subpos = split_netstring(entry, 4)
3696 name = name_utf8.decode("utf-8")
3697 self.failUnless(rwcapdata == "")
3698 self.failUnless(name in kids)
3699 (expected_child, ign) = kids[name]
3700 self.failUnlessReallyEqual(ro_uri, expected_child.get_readonly_uri())
3703 self.failUnlessReallyEqual(numkids, 3)
3704 return self.rootnode.list()
3705 d.addCallback(_check_data)
3707 # Now when we use the real directory listing code, the mutants should be absent.
3708 def _check_kids(children):
3709 self.failUnlessReallyEqual(sorted(children.keys()), [u"lonely"])
3710 lonely_node, lonely_metadata = children[u"lonely"]
3712 self.failUnlessReallyEqual(lonely_node.get_write_uri(), None)
3713 self.failUnlessReallyEqual(lonely_node.get_readonly_uri(), lonely_uri)
3714 d.addCallback(_check_kids)
3716 d.addCallback(lambda ign: nm.create_from_cap(self.cap.to_string()))
3717 d.addCallback(lambda n: n.list())
3718 d.addCallback(_check_kids) # again with dirnode recreated from cap
3720 # Make sure the lonely child can be listed in HTML...
3721 d.addCallback(lambda ign: self.GET(self.rooturl))
3722 def _check_html(res):
3723 self.failIfIn("URI:SSK", res)
3724 get_lonely = "".join([r'<td>FILE</td>',
3726 r'<a href="[^"]+%s[^"]+">lonely</a>' % (urllib.quote(lonely_uri),),
3728 r'\s+<td>%d</td>' % len("one"),
3730 self.failUnless(re.search(get_lonely, res), res)
3732 # find the More Info link for name, should be relative
3733 mo = re.search(r'<a href="([^"]+)">More Info</a>', res)
3734 info_url = mo.group(1)
3735 self.failUnless(info_url.endswith(urllib.quote(lonely_uri) + "?t=info"), info_url)
3736 d.addCallback(_check_html)
3739 d.addCallback(lambda ign: self.GET(self.rooturl+"?t=json"))
3740 def _check_json(res):
3741 data = simplejson.loads(res)
3742 self.failUnlessEqual(data[0], "dirnode")
3743 listed_children = data[1]["children"]
3744 self.failUnlessReallyEqual(sorted(listed_children.keys()), [u"lonely"])
3745 ll_type, ll_data = listed_children[u"lonely"]
3746 self.failUnlessEqual(ll_type, "filenode")
3747 self.failIf("rw_uri" in ll_data)
3748 self.failUnlessReallyEqual(to_str(ll_data["ro_uri"]), lonely_uri)
3749 d.addCallback(_check_json)
3752 def test_deep_check(self):
3753 self.basedir = "web/Grid/deep_check"
3755 c0 = self.g.clients[0]
3759 d = c0.create_dirnode()
3760 def _stash_root_and_create_file(n):
3762 self.fileurls["root"] = "uri/" + urllib.quote(n.get_uri()) + "/"
3763 return n.add_file(u"good", upload.Data(DATA, convergence=""))
3764 d.addCallback(_stash_root_and_create_file)
3765 def _stash_uri(fn, which):
3766 self.uris[which] = fn.get_uri()
3768 d.addCallback(_stash_uri, "good")
3769 d.addCallback(lambda ign:
3770 self.rootnode.add_file(u"small",
3771 upload.Data("literal",
3773 d.addCallback(_stash_uri, "small")
3774 d.addCallback(lambda ign:
3775 self.rootnode.add_file(u"sick",
3776 upload.Data(DATA+"1",
3778 d.addCallback(_stash_uri, "sick")
3780 # this tests that deep-check and stream-manifest will ignore
3781 # UnknownNode instances. Hopefully this will also cover deep-stats.
3782 future_node = UnknownNode(unknown_rwcap, unknown_rocap)
3783 d.addCallback(lambda ign: self.rootnode.set_node(u"future", future_node))
3785 def _clobber_shares(ignored):
3786 self.delete_shares_numbered(self.uris["sick"], [0,1])
3787 d.addCallback(_clobber_shares)
3795 d.addCallback(self.CHECK, "root", "t=stream-deep-check")
3798 units = [simplejson.loads(line)
3799 for line in res.splitlines()
3802 print "response is:", res
3803 print "undecodeable line was '%s'" % line
3805 self.failUnlessReallyEqual(len(units), 5+1)
3806 # should be parent-first
3808 self.failUnlessEqual(u0["path"], [])
3809 self.failUnlessEqual(u0["type"], "directory")
3810 self.failUnlessReallyEqual(to_str(u0["cap"]), self.rootnode.get_uri())
3811 u0cr = u0["check-results"]
3812 self.failUnlessReallyEqual(u0cr["results"]["count-shares-good"], 10)
3814 ugood = [u for u in units
3815 if u["type"] == "file" and u["path"] == [u"good"]][0]
3816 self.failUnlessReallyEqual(to_str(ugood["cap"]), self.uris["good"])
3817 ugoodcr = ugood["check-results"]
3818 self.failUnlessReallyEqual(ugoodcr["results"]["count-shares-good"], 10)
3821 self.failUnlessEqual(stats["type"], "stats")
3823 self.failUnlessReallyEqual(s["count-immutable-files"], 2)
3824 self.failUnlessReallyEqual(s["count-literal-files"], 1)
3825 self.failUnlessReallyEqual(s["count-directories"], 1)
3826 self.failUnlessReallyEqual(s["count-unknown"], 1)
3827 d.addCallback(_done)
3829 d.addCallback(self.CHECK, "root", "t=stream-manifest")
3830 def _check_manifest(res):
3831 self.failUnless(res.endswith("\n"))
3832 units = [simplejson.loads(t) for t in res[:-1].split("\n")]
3833 self.failUnlessReallyEqual(len(units), 5+1)
3834 self.failUnlessEqual(units[-1]["type"], "stats")
3836 self.failUnlessEqual(first["path"], [])
3837 self.failUnlessEqual(to_str(first["cap"]), self.rootnode.get_uri())
3838 self.failUnlessEqual(first["type"], "directory")
3839 stats = units[-1]["stats"]
3840 self.failUnlessReallyEqual(stats["count-immutable-files"], 2)
3841 self.failUnlessReallyEqual(stats["count-literal-files"], 1)
3842 self.failUnlessReallyEqual(stats["count-mutable-files"], 0)
3843 self.failUnlessReallyEqual(stats["count-immutable-files"], 2)
3844 self.failUnlessReallyEqual(stats["count-unknown"], 1)
3845 d.addCallback(_check_manifest)
3847 # now add root/subdir and root/subdir/grandchild, then make subdir
3848 # unrecoverable, then see what happens
3850 d.addCallback(lambda ign:
3851 self.rootnode.create_subdirectory(u"subdir"))
3852 d.addCallback(_stash_uri, "subdir")
3853 d.addCallback(lambda subdir_node:
3854 subdir_node.add_file(u"grandchild",
3855 upload.Data(DATA+"2",
3857 d.addCallback(_stash_uri, "grandchild")
3859 d.addCallback(lambda ign:
3860 self.delete_shares_numbered(self.uris["subdir"],
3868 # root/subdir [unrecoverable]
3869 # root/subdir/grandchild
3871 # how should a streaming-JSON API indicate fatal error?
3872 # answer: emit ERROR: instead of a JSON string
3874 d.addCallback(self.CHECK, "root", "t=stream-manifest")
3875 def _check_broken_manifest(res):
3876 lines = res.splitlines()
3878 for (i,line) in enumerate(lines)
3879 if line.startswith("ERROR:")]
3881 self.fail("no ERROR: in output: %s" % (res,))
3882 first_error = error_lines[0]
3883 error_line = lines[first_error]
3884 error_msg = lines[first_error+1:]
3885 error_msg_s = "\n".join(error_msg) + "\n"
3886 self.failUnlessIn("ERROR: UnrecoverableFileError(no recoverable versions)",
3888 self.failUnless(len(error_msg) > 2, error_msg_s) # some traceback
3889 units = [simplejson.loads(line) for line in lines[:first_error]]
3890 self.failUnlessReallyEqual(len(units), 6) # includes subdir
3891 last_unit = units[-1]
3892 self.failUnlessEqual(last_unit["path"], ["subdir"])
3893 d.addCallback(_check_broken_manifest)
3895 d.addCallback(self.CHECK, "root", "t=stream-deep-check")
3896 def _check_broken_deepcheck(res):
3897 lines = res.splitlines()
3899 for (i,line) in enumerate(lines)
3900 if line.startswith("ERROR:")]
3902 self.fail("no ERROR: in output: %s" % (res,))
3903 first_error = error_lines[0]
3904 error_line = lines[first_error]
3905 error_msg = lines[first_error+1:]
3906 error_msg_s = "\n".join(error_msg) + "\n"
3907 self.failUnlessIn("ERROR: UnrecoverableFileError(no recoverable versions)",
3909 self.failUnless(len(error_msg) > 2, error_msg_s) # some traceback
3910 units = [simplejson.loads(line) for line in lines[:first_error]]
3911 self.failUnlessReallyEqual(len(units), 6) # includes subdir
3912 last_unit = units[-1]
3913 self.failUnlessEqual(last_unit["path"], ["subdir"])
3914 r = last_unit["check-results"]["results"]
3915 self.failUnlessReallyEqual(r["count-recoverable-versions"], 0)
3916 self.failUnlessReallyEqual(r["count-shares-good"], 1)
3917 self.failUnlessReallyEqual(r["recoverable"], False)
3918 d.addCallback(_check_broken_deepcheck)
3920 d.addErrback(self.explain_web_error)
3923 def test_deep_check_and_repair(self):
3924 self.basedir = "web/Grid/deep_check_and_repair"
3926 c0 = self.g.clients[0]
3930 d = c0.create_dirnode()
3931 def _stash_root_and_create_file(n):
3933 self.fileurls["root"] = "uri/" + urllib.quote(n.get_uri()) + "/"
3934 return n.add_file(u"good", upload.Data(DATA, convergence=""))
3935 d.addCallback(_stash_root_and_create_file)
3936 def _stash_uri(fn, which):
3937 self.uris[which] = fn.get_uri()
3938 d.addCallback(_stash_uri, "good")
3939 d.addCallback(lambda ign:
3940 self.rootnode.add_file(u"small",
3941 upload.Data("literal",
3943 d.addCallback(_stash_uri, "small")
3944 d.addCallback(lambda ign:
3945 self.rootnode.add_file(u"sick",
3946 upload.Data(DATA+"1",
3948 d.addCallback(_stash_uri, "sick")
3949 #d.addCallback(lambda ign:
3950 # self.rootnode.add_file(u"dead",
3951 # upload.Data(DATA+"2",
3953 #d.addCallback(_stash_uri, "dead")
3955 #d.addCallback(lambda ign: c0.create_mutable_file("mutable"))
3956 #d.addCallback(lambda fn: self.rootnode.set_node(u"corrupt", fn))
3957 #d.addCallback(_stash_uri, "corrupt")
3959 def _clobber_shares(ignored):
3960 good_shares = self.find_uri_shares(self.uris["good"])
3961 self.failUnlessReallyEqual(len(good_shares), 10)
3962 sick_shares = self.find_uri_shares(self.uris["sick"])
3963 os.unlink(sick_shares[0][2])
3964 #dead_shares = self.find_uri_shares(self.uris["dead"])
3965 #for i in range(1, 10):
3966 # os.unlink(dead_shares[i][2])
3968 #c_shares = self.find_uri_shares(self.uris["corrupt"])
3969 #cso = CorruptShareOptions()
3970 #cso.stdout = StringIO()
3971 #cso.parseOptions([c_shares[0][2]])
3973 d.addCallback(_clobber_shares)
3976 # root/good CHK, 10 shares
3978 # root/sick CHK, 9 shares
3980 d.addCallback(self.CHECK, "root", "t=stream-deep-check&repair=true")
3982 units = [simplejson.loads(line)
3983 for line in res.splitlines()
3985 self.failUnlessReallyEqual(len(units), 4+1)
3986 # should be parent-first
3988 self.failUnlessEqual(u0["path"], [])
3989 self.failUnlessEqual(u0["type"], "directory")
3990 self.failUnlessReallyEqual(to_str(u0["cap"]), self.rootnode.get_uri())
3991 u0crr = u0["check-and-repair-results"]
3992 self.failUnlessReallyEqual(u0crr["repair-attempted"], False)
3993 self.failUnlessReallyEqual(u0crr["pre-repair-results"]["results"]["count-shares-good"], 10)
3995 ugood = [u for u in units
3996 if u["type"] == "file" and u["path"] == [u"good"]][0]
3997 self.failUnlessEqual(to_str(ugood["cap"]), self.uris["good"])
3998 ugoodcrr = ugood["check-and-repair-results"]
3999 self.failUnlessReallyEqual(ugoodcrr["repair-attempted"], False)
4000 self.failUnlessReallyEqual(ugoodcrr["pre-repair-results"]["results"]["count-shares-good"], 10)
4002 usick = [u for u in units
4003 if u["type"] == "file" and u["path"] == [u"sick"]][0]
4004 self.failUnlessReallyEqual(to_str(usick["cap"]), self.uris["sick"])
4005 usickcrr = usick["check-and-repair-results"]
4006 self.failUnlessReallyEqual(usickcrr["repair-attempted"], True)
4007 self.failUnlessReallyEqual(usickcrr["repair-successful"], True)
4008 self.failUnlessReallyEqual(usickcrr["pre-repair-results"]["results"]["count-shares-good"], 9)
4009 self.failUnlessReallyEqual(usickcrr["post-repair-results"]["results"]["count-shares-good"], 10)
4012 self.failUnlessEqual(stats["type"], "stats")
4014 self.failUnlessReallyEqual(s["count-immutable-files"], 2)
4015 self.failUnlessReallyEqual(s["count-literal-files"], 1)
4016 self.failUnlessReallyEqual(s["count-directories"], 1)
4017 d.addCallback(_done)
4019 d.addErrback(self.explain_web_error)
4022 def _count_leases(self, ignored, which):
4023 u = self.uris[which]
4024 shares = self.find_uri_shares(u)
4026 for shnum, serverid, fn in shares:
4027 sf = get_share_file(fn)
4028 num_leases = len(list(sf.get_leases()))
4029 lease_counts.append( (fn, num_leases) )
4032 def _assert_leasecount(self, lease_counts, expected):
4033 for (fn, num_leases) in lease_counts:
4034 if num_leases != expected:
4035 self.fail("expected %d leases, have %d, on %s" %
4036 (expected, num_leases, fn))
4038 def test_add_lease(self):
4039 self.basedir = "web/Grid/add_lease"
4040 self.set_up_grid(num_clients=2)
4041 c0 = self.g.clients[0]
4044 d = c0.upload(upload.Data(DATA, convergence=""))
4045 def _stash_uri(ur, which):
4046 self.uris[which] = ur.uri
4047 d.addCallback(_stash_uri, "one")
4048 d.addCallback(lambda ign:
4049 c0.upload(upload.Data(DATA+"1", convergence="")))
4050 d.addCallback(_stash_uri, "two")
4051 def _stash_mutable_uri(n, which):
4052 self.uris[which] = n.get_uri()
4053 assert isinstance(self.uris[which], str)
4054 d.addCallback(lambda ign: c0.create_mutable_file(DATA+"2"))
4055 d.addCallback(_stash_mutable_uri, "mutable")
4057 def _compute_fileurls(ignored):
4059 for which in self.uris:
4060 self.fileurls[which] = "uri/" + urllib.quote(self.uris[which])
4061 d.addCallback(_compute_fileurls)
4063 d.addCallback(self._count_leases, "one")
4064 d.addCallback(self._assert_leasecount, 1)
4065 d.addCallback(self._count_leases, "two")
4066 d.addCallback(self._assert_leasecount, 1)
4067 d.addCallback(self._count_leases, "mutable")
4068 d.addCallback(self._assert_leasecount, 1)
4070 d.addCallback(self.CHECK, "one", "t=check") # no add-lease
4071 def _got_html_good(res):
4072 self.failUnless("Healthy" in res, res)
4073 self.failIf("Not Healthy" in res, res)
4074 d.addCallback(_got_html_good)
4076 d.addCallback(self._count_leases, "one")
4077 d.addCallback(self._assert_leasecount, 1)
4078 d.addCallback(self._count_leases, "two")
4079 d.addCallback(self._assert_leasecount, 1)
4080 d.addCallback(self._count_leases, "mutable")
4081 d.addCallback(self._assert_leasecount, 1)
4083 # this CHECK uses the original client, which uses the same
4084 # lease-secrets, so it will just renew the original lease
4085 d.addCallback(self.CHECK, "one", "t=check&add-lease=true")
4086 d.addCallback(_got_html_good)
4088 d.addCallback(self._count_leases, "one")
4089 d.addCallback(self._assert_leasecount, 1)
4090 d.addCallback(self._count_leases, "two")
4091 d.addCallback(self._assert_leasecount, 1)
4092 d.addCallback(self._count_leases, "mutable")
4093 d.addCallback(self._assert_leasecount, 1)
4095 # this CHECK uses an alternate client, which adds a second lease
4096 d.addCallback(self.CHECK, "one", "t=check&add-lease=true", clientnum=1)
4097 d.addCallback(_got_html_good)
4099 d.addCallback(self._count_leases, "one")
4100 d.addCallback(self._assert_leasecount, 2)
4101 d.addCallback(self._count_leases, "two")
4102 d.addCallback(self._assert_leasecount, 1)
4103 d.addCallback(self._count_leases, "mutable")
4104 d.addCallback(self._assert_leasecount, 1)
4106 d.addCallback(self.CHECK, "mutable", "t=check&add-lease=true")
4107 d.addCallback(_got_html_good)
4109 d.addCallback(self._count_leases, "one")
4110 d.addCallback(self._assert_leasecount, 2)
4111 d.addCallback(self._count_leases, "two")
4112 d.addCallback(self._assert_leasecount, 1)
4113 d.addCallback(self._count_leases, "mutable")
4114 d.addCallback(self._assert_leasecount, 1)
4116 d.addCallback(self.CHECK, "mutable", "t=check&add-lease=true",
4118 d.addCallback(_got_html_good)
4120 d.addCallback(self._count_leases, "one")
4121 d.addCallback(self._assert_leasecount, 2)
4122 d.addCallback(self._count_leases, "two")
4123 d.addCallback(self._assert_leasecount, 1)
4124 d.addCallback(self._count_leases, "mutable")
4125 d.addCallback(self._assert_leasecount, 2)
4127 d.addErrback(self.explain_web_error)
4130 def test_deep_add_lease(self):
4131 self.basedir = "web/Grid/deep_add_lease"
4132 self.set_up_grid(num_clients=2)
4133 c0 = self.g.clients[0]
4137 d = c0.create_dirnode()
4138 def _stash_root_and_create_file(n):
4140 self.uris["root"] = n.get_uri()
4141 self.fileurls["root"] = "uri/" + urllib.quote(n.get_uri()) + "/"
4142 return n.add_file(u"one", upload.Data(DATA, convergence=""))
4143 d.addCallback(_stash_root_and_create_file)
4144 def _stash_uri(fn, which):
4145 self.uris[which] = fn.get_uri()
4146 d.addCallback(_stash_uri, "one")
4147 d.addCallback(lambda ign:
4148 self.rootnode.add_file(u"small",
4149 upload.Data("literal",
4151 d.addCallback(_stash_uri, "small")
4153 d.addCallback(lambda ign: c0.create_mutable_file("mutable"))
4154 d.addCallback(lambda fn: self.rootnode.set_node(u"mutable", fn))
4155 d.addCallback(_stash_uri, "mutable")
4157 d.addCallback(self.CHECK, "root", "t=stream-deep-check") # no add-lease
4159 units = [simplejson.loads(line)
4160 for line in res.splitlines()
4162 # root, one, small, mutable, stats
4163 self.failUnlessReallyEqual(len(units), 4+1)
4164 d.addCallback(_done)
4166 d.addCallback(self._count_leases, "root")
4167 d.addCallback(self._assert_leasecount, 1)
4168 d.addCallback(self._count_leases, "one")
4169 d.addCallback(self._assert_leasecount, 1)
4170 d.addCallback(self._count_leases, "mutable")
4171 d.addCallback(self._assert_leasecount, 1)
4173 d.addCallback(self.CHECK, "root", "t=stream-deep-check&add-lease=true")
4174 d.addCallback(_done)
4176 d.addCallback(self._count_leases, "root")
4177 d.addCallback(self._assert_leasecount, 1)
4178 d.addCallback(self._count_leases, "one")
4179 d.addCallback(self._assert_leasecount, 1)
4180 d.addCallback(self._count_leases, "mutable")
4181 d.addCallback(self._assert_leasecount, 1)
4183 d.addCallback(self.CHECK, "root", "t=stream-deep-check&add-lease=true",
4185 d.addCallback(_done)
4187 d.addCallback(self._count_leases, "root")
4188 d.addCallback(self._assert_leasecount, 2)
4189 d.addCallback(self._count_leases, "one")
4190 d.addCallback(self._assert_leasecount, 2)
4191 d.addCallback(self._count_leases, "mutable")
4192 d.addCallback(self._assert_leasecount, 2)
4194 d.addErrback(self.explain_web_error)
4198 def test_exceptions(self):
4199 self.basedir = "web/Grid/exceptions"
4200 self.set_up_grid(num_clients=1, num_servers=2)
4201 c0 = self.g.clients[0]
4202 c0.DEFAULT_ENCODING_PARAMETERS['happy'] = 2
4205 d = c0.create_dirnode()
4207 self.fileurls["root"] = "uri/" + urllib.quote(n.get_uri()) + "/"
4208 self.fileurls["imaginary"] = self.fileurls["root"] + "imaginary"
4210 d.addCallback(_stash_root)
4211 d.addCallback(lambda ign: c0.upload(upload.Data(DATA, convergence="")))
4213 self.fileurls["1share"] = "uri/" + urllib.quote(ur.uri)
4214 self.delete_shares_numbered(ur.uri, range(1,10))
4216 u = uri.from_string(ur.uri)
4217 u.key = testutil.flip_bit(u.key, 0)
4218 baduri = u.to_string()
4219 self.fileurls["0shares"] = "uri/" + urllib.quote(baduri)
4220 d.addCallback(_stash_bad)
4221 d.addCallback(lambda ign: c0.create_dirnode())
4222 def _mangle_dirnode_1share(n):
4224 url = self.fileurls["dir-1share"] = "uri/" + urllib.quote(u) + "/"
4225 self.fileurls["dir-1share-json"] = url + "?t=json"
4226 self.delete_shares_numbered(u, range(1,10))
4227 d.addCallback(_mangle_dirnode_1share)
4228 d.addCallback(lambda ign: c0.create_dirnode())
4229 def _mangle_dirnode_0share(n):
4231 url = self.fileurls["dir-0share"] = "uri/" + urllib.quote(u) + "/"
4232 self.fileurls["dir-0share-json"] = url + "?t=json"
4233 self.delete_shares_numbered(u, range(0,10))
4234 d.addCallback(_mangle_dirnode_0share)
4236 # NotEnoughSharesError should be reported sensibly, with a
4237 # text/plain explanation of the problem, and perhaps some
4238 # information on which shares *could* be found.
4240 d.addCallback(lambda ignored:
4241 self.shouldHTTPError("GET unrecoverable",
4242 410, "Gone", "NoSharesError",
4243 self.GET, self.fileurls["0shares"]))
4244 def _check_zero_shares(body):
4245 self.failIf("<html>" in body, body)
4246 body = " ".join(body.strip().split())
4247 exp = ("NoSharesError: no shares could be found. "
4248 "Zero shares usually indicates a corrupt URI, or that "
4249 "no servers were connected, but it might also indicate "
4250 "severe corruption. You should perform a filecheck on "
4251 "this object to learn more. The full error message is: "
4252 "no shares (need 3). Last failure: None")
4253 self.failUnlessReallyEqual(exp, body)
4254 d.addCallback(_check_zero_shares)
4257 d.addCallback(lambda ignored:
4258 self.shouldHTTPError("GET 1share",
4259 410, "Gone", "NotEnoughSharesError",
4260 self.GET, self.fileurls["1share"]))
4261 def _check_one_share(body):
4262 self.failIf("<html>" in body, body)
4263 body = " ".join(body.strip().split())
4264 msgbase = ("NotEnoughSharesError: This indicates that some "
4265 "servers were unavailable, or that shares have been "
4266 "lost to server departure, hard drive failure, or disk "
4267 "corruption. You should perform a filecheck on "
4268 "this object to learn more. The full error message is:"
4270 msg1 = msgbase + (" ran out of shares:"
4273 " overdue= unused= need 3. Last failure: None")
4274 msg2 = msgbase + (" ran out of shares:"
4276 " pending=Share(sh0-on-xgru5)"
4277 " overdue= unused= need 3. Last failure: None")
4278 self.failUnless(body == msg1 or body == msg2, body)
4279 d.addCallback(_check_one_share)
4281 d.addCallback(lambda ignored:
4282 self.shouldHTTPError("GET imaginary",
4283 404, "Not Found", None,
4284 self.GET, self.fileurls["imaginary"]))
4285 def _missing_child(body):
4286 self.failUnless("No such child: imaginary" in body, body)
4287 d.addCallback(_missing_child)
4289 d.addCallback(lambda ignored: self.GET(self.fileurls["dir-0share"]))
4290 def _check_0shares_dir_html(body):
4291 self.failUnless("<html>" in body, body)
4292 # we should see the regular page, but without the child table or
4294 body = " ".join(body.strip().split())
4295 self.failUnlessIn('href="?t=info">More info on this directory',
4297 exp = ("UnrecoverableFileError: the directory (or mutable file) "
4298 "could not be retrieved, because there were insufficient "
4299 "good shares. This might indicate that no servers were "
4300 "connected, insufficient servers were connected, the URI "
4301 "was corrupt, or that shares have been lost due to server "
4302 "departure, hard drive failure, or disk corruption. You "
4303 "should perform a filecheck on this object to learn more.")
4304 self.failUnlessIn(exp, body)
4305 self.failUnlessIn("No upload forms: directory is unreadable", body)
4306 d.addCallback(_check_0shares_dir_html)
4308 d.addCallback(lambda ignored: self.GET(self.fileurls["dir-1share"]))
4309 def _check_1shares_dir_html(body):
4310 # at some point, we'll split UnrecoverableFileError into 0-shares
4311 # and some-shares like we did for immutable files (since there
4312 # are different sorts of advice to offer in each case). For now,
4313 # they present the same way.
4314 self.failUnless("<html>" in body, body)
4315 body = " ".join(body.strip().split())
4316 self.failUnlessIn('href="?t=info">More info on this directory',
4318 exp = ("UnrecoverableFileError: the directory (or mutable file) "
4319 "could not be retrieved, because there were insufficient "
4320 "good shares. This might indicate that no servers were "
4321 "connected, insufficient servers were connected, the URI "
4322 "was corrupt, or that shares have been lost due to server "
4323 "departure, hard drive failure, or disk corruption. You "
4324 "should perform a filecheck on this object to learn more.")
4325 self.failUnlessIn(exp, body)
4326 self.failUnlessIn("No upload forms: directory is unreadable", body)
4327 d.addCallback(_check_1shares_dir_html)
4329 d.addCallback(lambda ignored:
4330 self.shouldHTTPError("GET dir-0share-json",
4331 410, "Gone", "UnrecoverableFileError",
4333 self.fileurls["dir-0share-json"]))
4334 def _check_unrecoverable_file(body):
4335 self.failIf("<html>" in body, body)
4336 body = " ".join(body.strip().split())
4337 exp = ("UnrecoverableFileError: the directory (or mutable file) "
4338 "could not be retrieved, because there were insufficient "
4339 "good shares. This might indicate that no servers were "
4340 "connected, insufficient servers were connected, the URI "
4341 "was corrupt, or that shares have been lost due to server "
4342 "departure, hard drive failure, or disk corruption. You "
4343 "should perform a filecheck on this object to learn more.")
4344 self.failUnlessReallyEqual(exp, body)
4345 d.addCallback(_check_unrecoverable_file)
4347 d.addCallback(lambda ignored:
4348 self.shouldHTTPError("GET dir-1share-json",
4349 410, "Gone", "UnrecoverableFileError",
4351 self.fileurls["dir-1share-json"]))
4352 d.addCallback(_check_unrecoverable_file)
4354 d.addCallback(lambda ignored:
4355 self.shouldHTTPError("GET imaginary",
4356 404, "Not Found", None,
4357 self.GET, self.fileurls["imaginary"]))
4359 # attach a webapi child that throws a random error, to test how it
4361 w = c0.getServiceNamed("webish")
4362 w.root.putChild("ERRORBOOM", ErrorBoom())
4364 # "Accept: */*" : should get a text/html stack trace
4365 # "Accept: text/plain" : should get a text/plain stack trace
4366 # "Accept: text/plain, application/octet-stream" : text/plain (CLI)
4367 # no Accept header: should get a text/html stack trace
4369 d.addCallback(lambda ignored:
4370 self.shouldHTTPError("GET errorboom_html",
4371 500, "Internal Server Error", None,
4372 self.GET, "ERRORBOOM",
4373 headers={"accept": ["*/*"]}))
4374 def _internal_error_html1(body):
4375 self.failUnless("<html>" in body, "expected HTML, not '%s'" % body)
4376 d.addCallback(_internal_error_html1)
4378 d.addCallback(lambda ignored:
4379 self.shouldHTTPError("GET errorboom_text",
4380 500, "Internal Server Error", None,
4381 self.GET, "ERRORBOOM",
4382 headers={"accept": ["text/plain"]}))
4383 def _internal_error_text2(body):
4384 self.failIf("<html>" in body, body)
4385 self.failUnless(body.startswith("Traceback "), body)
4386 d.addCallback(_internal_error_text2)
4388 CLI_accepts = "text/plain, application/octet-stream"
4389 d.addCallback(lambda ignored:
4390 self.shouldHTTPError("GET errorboom_text",
4391 500, "Internal Server Error", None,
4392 self.GET, "ERRORBOOM",
4393 headers={"accept": [CLI_accepts]}))
4394 def _internal_error_text3(body):
4395 self.failIf("<html>" in body, body)
4396 self.failUnless(body.startswith("Traceback "), body)
4397 d.addCallback(_internal_error_text3)
4399 d.addCallback(lambda ignored:
4400 self.shouldHTTPError("GET errorboom_text",
4401 500, "Internal Server Error", None,
4402 self.GET, "ERRORBOOM"))
4403 def _internal_error_html4(body):
4404 self.failUnless("<html>" in body, "expected HTML, not '%s'" % body)
4405 d.addCallback(_internal_error_html4)
4407 def _flush_errors(res):
4408 # Trial: please ignore the CompletelyUnhandledError in the logs
4409 self.flushLoggedErrors(CompletelyUnhandledError)
4411 d.addBoth(_flush_errors)
4415 class CompletelyUnhandledError(Exception):
4417 class ErrorBoom(rend.Page):
4418 def beforeRender(self, ctx):
4419 raise CompletelyUnhandledError("whoops")