2 import os.path, re, urllib, time
4 from StringIO import StringIO
5 from twisted.application import service
6 from twisted.trial import unittest
7 from twisted.internet import defer, reactor
8 from twisted.internet.task import Clock
9 from twisted.web import client, error, http
10 from twisted.python import failure, log
11 from nevow import rend
12 from allmydata import interfaces, uri, webish, dirnode
13 from allmydata.storage.shares import get_share_file
14 from allmydata.storage_client import StorageFarmBroker
15 from allmydata.immutable import upload
16 from allmydata.immutable.downloader.status import DownloadStatus
17 from allmydata.dirnode import DirectoryNode
18 from allmydata.nodemaker import NodeMaker
19 from allmydata.unknown import UnknownNode
20 from allmydata.web import status, common
21 from allmydata.scripts.debug import CorruptShareOptions, corrupt_share
22 from allmydata.util import fileutil, base32
23 from allmydata.util.consumer import download_to_data
24 from allmydata.util.netstring import split_netstring
25 from allmydata.util.encodingutil import to_str
26 from allmydata.test.common import FakeCHKFileNode, FakeMutableFileNode, \
27 create_chk_filenode, WebErrorMixin, ShouldFailMixin, make_mutable_file_uri
28 from allmydata.interfaces import IMutableFileNode
29 from allmydata.mutable import servermap, publish, retrieve
30 import allmydata.test.common_util as testutil
31 from allmydata.test.no_network import GridTestMixin
32 from allmydata.test.common_web import HTTPClientGETFactory, \
34 from allmydata.client import Client, SecretHolder
36 # create a fake uploader/downloader, and a couple of fake dirnodes, then
37 # create a webserver that works against them
39 timeout = 480 # Most of these take longer than 240 seconds on Francois's arm box.
41 unknown_rwcap = u"lafs://from_the_future_rw_\u263A".encode('utf-8')
42 unknown_rocap = u"ro.lafs://readonly_from_the_future_ro_\u263A".encode('utf-8')
43 unknown_immcap = u"imm.lafs://immutable_from_the_future_imm_\u263A".encode('utf-8')
45 class FakeStatsProvider:
47 stats = {'stats': {}, 'counters': {}}
50 class FakeNodeMaker(NodeMaker):
51 def _create_lit(self, cap):
52 return FakeCHKFileNode(cap)
53 def _create_immutable(self, cap):
54 return FakeCHKFileNode(cap)
55 def _create_mutable(self, cap):
56 return FakeMutableFileNode(None, None, None, None).init_from_cap(cap)
57 def create_mutable_file(self, contents="", keysize=None):
58 n = FakeMutableFileNode(None, None, None, None)
59 return n.create(contents)
61 class FakeUploader(service.Service):
63 def upload(self, uploadable, history=None):
64 d = uploadable.get_size()
65 d.addCallback(lambda size: uploadable.read(size))
68 n = create_chk_filenode(data)
69 results = upload.UploadResults()
70 results.uri = n.get_uri()
72 d.addCallback(_got_data)
74 def get_helper_info(self):
78 ds = DownloadStatus("storage_index", 1234)
81 ds.add_segment_request(0, now)
82 # segnum, when, start,len, decodetime
83 ds.add_segment_delivery(0, now+1, 0, 100, 0.5)
84 ds.add_segment_request(1, now+2)
85 ds.add_segment_error(1, now+3)
86 # two outstanding requests
87 ds.add_segment_request(2, now+4)
88 ds.add_segment_request(3, now+5)
90 # simulate a segment which gets delivered faster than a system clock tick (ticket #1166)
91 ds.add_segment_request(4, now)
92 ds.add_segment_delivery(4, now, 0, 140, 0.5)
94 e = ds.add_dyhb_sent("serverid_a", now)
95 e.finished([1,2], now+1)
96 e = ds.add_dyhb_sent("serverid_b", now+2) # left unfinished
98 e = ds.add_read_event(0, 120, now)
99 e.update(60, 0.5, 0.1) # bytes, decrypttime, pausetime
101 e = ds.add_read_event(120, 30, now+2) # left unfinished
103 e = ds.add_request_sent("serverid_a", 1, 100, 20, now)
104 e.finished(20, now+1)
105 e = ds.add_request_sent("serverid_a", 1, 120, 30, now+1) # left unfinished
107 # make sure that add_read_event() can come first too
108 ds1 = DownloadStatus("storage_index", 1234)
109 e = ds1.add_read_event(0, 120, now)
110 e.update(60, 0.5, 0.1) # bytes, decrypttime, pausetime
116 _all_upload_status = [upload.UploadStatus()]
117 _all_download_status = [build_one_ds()]
118 _all_mapupdate_statuses = [servermap.UpdateStatus()]
119 _all_publish_statuses = [publish.PublishStatus()]
120 _all_retrieve_statuses = [retrieve.RetrieveStatus()]
122 def list_all_upload_statuses(self):
123 return self._all_upload_status
124 def list_all_download_statuses(self):
125 return self._all_download_status
126 def list_all_mapupdate_statuses(self):
127 return self._all_mapupdate_statuses
128 def list_all_publish_statuses(self):
129 return self._all_publish_statuses
130 def list_all_retrieve_statuses(self):
131 return self._all_retrieve_statuses
132 def list_all_helper_statuses(self):
135 class FakeClient(Client):
137 # don't upcall to Client.__init__, since we only want to initialize a
139 service.MultiService.__init__(self)
140 self.nodeid = "fake_nodeid"
141 self.nickname = "fake_nickname"
142 self.introducer_furl = "None"
143 self.stats_provider = FakeStatsProvider()
144 self._secret_holder = SecretHolder("lease secret", "convergence secret")
146 self.convergence = "some random string"
147 self.storage_broker = StorageFarmBroker(None, permute_peers=True)
148 self.introducer_client = None
149 self.history = FakeHistory()
150 self.uploader = FakeUploader()
151 self.uploader.setServiceParent(self)
152 self.nodemaker = FakeNodeMaker(None, self._secret_holder, None,
156 def startService(self):
157 return service.MultiService.startService(self)
158 def stopService(self):
159 return service.MultiService.stopService(self)
161 MUTABLE_SIZELIMIT = FakeMutableFileNode.MUTABLE_SIZELIMIT
163 class WebMixin(object):
165 self.s = FakeClient()
166 self.s.startService()
167 self.staticdir = self.mktemp()
169 self.ws = webish.WebishServer(self.s, "0", staticdir=self.staticdir,
171 self.ws.setServiceParent(self.s)
172 self.webish_port = port = self.ws.listener._port.getHost().port
173 self.webish_url = "http://localhost:%d" % port
175 l = [ self.s.create_dirnode() for x in range(6) ]
176 d = defer.DeferredList(l)
178 self.public_root = res[0][1]
179 assert interfaces.IDirectoryNode.providedBy(self.public_root), res
180 self.public_url = "/uri/" + self.public_root.get_uri()
181 self.private_root = res[1][1]
185 self._foo_uri = foo.get_uri()
186 self._foo_readonly_uri = foo.get_readonly_uri()
187 self._foo_verifycap = foo.get_verify_cap().to_string()
188 # NOTE: we ignore the deferred on all set_uri() calls, because we
189 # know the fake nodes do these synchronously
190 self.public_root.set_uri(u"foo", foo.get_uri(),
191 foo.get_readonly_uri())
193 self.BAR_CONTENTS, n, self._bar_txt_uri = self.makefile(0)
194 foo.set_uri(u"bar.txt", self._bar_txt_uri, self._bar_txt_uri)
195 self._bar_txt_verifycap = n.get_verify_cap().to_string()
197 foo.set_uri(u"empty", res[3][1].get_uri(),
198 res[3][1].get_readonly_uri())
199 sub_uri = res[4][1].get_uri()
200 self._sub_uri = sub_uri
201 foo.set_uri(u"sub", sub_uri, sub_uri)
202 sub = self.s.create_node_from_uri(sub_uri)
204 _ign, n, blocking_uri = self.makefile(1)
205 foo.set_uri(u"blockingfile", blocking_uri, blocking_uri)
207 unicode_filename = u"n\u00fc.txt" # n u-umlaut . t x t
208 # ok, unicode calls it LATIN SMALL LETTER U WITH DIAERESIS but I
209 # still think of it as an umlaut
210 foo.set_uri(unicode_filename, self._bar_txt_uri, self._bar_txt_uri)
212 _ign, n, baz_file = self.makefile(2)
213 self._baz_file_uri = baz_file
214 sub.set_uri(u"baz.txt", baz_file, baz_file)
216 _ign, n, self._bad_file_uri = self.makefile(3)
217 # this uri should not be downloadable
218 del FakeCHKFileNode.all_contents[self._bad_file_uri]
221 self.public_root.set_uri(u"reedownlee", rodir.get_readonly_uri(),
222 rodir.get_readonly_uri())
223 rodir.set_uri(u"nor", baz_file, baz_file)
228 # public/foo/blockingfile
231 # public/foo/sub/baz.txt
233 # public/reedownlee/nor
234 self.NEWFILE_CONTENTS = "newfile contents\n"
236 return foo.get_metadata_for(u"bar.txt")
238 def _got_metadata(metadata):
239 self._bar_txt_metadata = metadata
240 d.addCallback(_got_metadata)
243 def makefile(self, number):
244 contents = "contents of file %s\n" % number
245 n = create_chk_filenode(contents)
246 return contents, n, n.get_uri()
249 return self.s.stopService()
251 def failUnlessIsBarDotTxt(self, res):
252 self.failUnlessReallyEqual(res, self.BAR_CONTENTS, res)
254 def failUnlessIsBarJSON(self, res):
255 data = simplejson.loads(res)
256 self.failUnless(isinstance(data, list))
257 self.failUnlessEqual(data[0], "filenode")
258 self.failUnless(isinstance(data[1], dict))
259 self.failIf(data[1]["mutable"])
260 self.failIf("rw_uri" in data[1]) # immutable
261 self.failUnlessReallyEqual(to_str(data[1]["ro_uri"]), self._bar_txt_uri)
262 self.failUnlessReallyEqual(to_str(data[1]["verify_uri"]), self._bar_txt_verifycap)
263 self.failUnlessReallyEqual(data[1]["size"], len(self.BAR_CONTENTS))
265 def failUnlessIsFooJSON(self, res):
266 data = simplejson.loads(res)
267 self.failUnless(isinstance(data, list))
268 self.failUnlessEqual(data[0], "dirnode", res)
269 self.failUnless(isinstance(data[1], dict))
270 self.failUnless(data[1]["mutable"])
271 self.failUnless("rw_uri" in data[1]) # mutable
272 self.failUnlessReallyEqual(to_str(data[1]["rw_uri"]), self._foo_uri)
273 self.failUnlessReallyEqual(to_str(data[1]["ro_uri"]), self._foo_readonly_uri)
274 self.failUnlessReallyEqual(to_str(data[1]["verify_uri"]), self._foo_verifycap)
276 kidnames = sorted([unicode(n) for n in data[1]["children"]])
277 self.failUnlessEqual(kidnames,
278 [u"bar.txt", u"blockingfile", u"empty",
279 u"n\u00fc.txt", u"sub"])
280 kids = dict( [(unicode(name),value)
282 in data[1]["children"].iteritems()] )
283 self.failUnlessEqual(kids[u"sub"][0], "dirnode")
284 self.failUnlessIn("metadata", kids[u"sub"][1])
285 self.failUnlessIn("tahoe", kids[u"sub"][1]["metadata"])
286 tahoe_md = kids[u"sub"][1]["metadata"]["tahoe"]
287 self.failUnlessIn("linkcrtime", tahoe_md)
288 self.failUnlessIn("linkmotime", tahoe_md)
289 self.failUnlessEqual(kids[u"bar.txt"][0], "filenode")
290 self.failUnlessReallyEqual(kids[u"bar.txt"][1]["size"], len(self.BAR_CONTENTS))
291 self.failUnlessReallyEqual(to_str(kids[u"bar.txt"][1]["ro_uri"]), self._bar_txt_uri)
292 self.failUnlessReallyEqual(to_str(kids[u"bar.txt"][1]["verify_uri"]),
293 self._bar_txt_verifycap)
294 self.failUnlessIn("metadata", kids[u"bar.txt"][1])
295 self.failUnlessIn("tahoe", kids[u"bar.txt"][1]["metadata"])
296 self.failUnlessReallyEqual(kids[u"bar.txt"][1]["metadata"]["tahoe"]["linkcrtime"],
297 self._bar_txt_metadata["tahoe"]["linkcrtime"])
298 self.failUnlessReallyEqual(to_str(kids[u"n\u00fc.txt"][1]["ro_uri"]),
301 def GET(self, urlpath, followRedirect=False, return_response=False,
303 # if return_response=True, this fires with (data, statuscode,
304 # respheaders) instead of just data.
305 assert not isinstance(urlpath, unicode)
306 url = self.webish_url + urlpath
307 factory = HTTPClientGETFactory(url, method="GET",
308 followRedirect=followRedirect, **kwargs)
309 reactor.connectTCP("localhost", self.webish_port, factory)
312 return (data, factory.status, factory.response_headers)
314 d.addCallback(_got_data)
315 return factory.deferred
317 def HEAD(self, urlpath, return_response=False, **kwargs):
318 # this requires some surgery, because twisted.web.client doesn't want
319 # to give us back the response headers.
320 factory = HTTPClientHEADFactory(urlpath, method="HEAD", **kwargs)
321 reactor.connectTCP("localhost", self.webish_port, factory)
324 return (data, factory.status, factory.response_headers)
326 d.addCallback(_got_data)
327 return factory.deferred
329 def PUT(self, urlpath, data, **kwargs):
330 url = self.webish_url + urlpath
331 return client.getPage(url, method="PUT", postdata=data, **kwargs)
333 def DELETE(self, urlpath):
334 url = self.webish_url + urlpath
335 return client.getPage(url, method="DELETE")
337 def POST(self, urlpath, followRedirect=False, **fields):
338 sepbase = "boogabooga"
342 form.append('Content-Disposition: form-data; name="_charset"')
346 for name, value in fields.iteritems():
347 if isinstance(value, tuple):
348 filename, value = value
349 form.append('Content-Disposition: form-data; name="%s"; '
350 'filename="%s"' % (name, filename.encode("utf-8")))
352 form.append('Content-Disposition: form-data; name="%s"' % name)
354 if isinstance(value, unicode):
355 value = value.encode("utf-8")
358 assert isinstance(value, str)
365 body = "\r\n".join(form) + "\r\n"
366 headers["content-type"] = "multipart/form-data; boundary=%s" % sepbase
367 return self.POST2(urlpath, body, headers, followRedirect)
369 def POST2(self, urlpath, body="", headers={}, followRedirect=False):
370 url = self.webish_url + urlpath
371 return client.getPage(url, method="POST", postdata=body,
372 headers=headers, followRedirect=followRedirect)
374 def shouldFail(self, res, expected_failure, which,
375 substring=None, response_substring=None):
376 if isinstance(res, failure.Failure):
377 res.trap(expected_failure)
379 self.failUnless(substring in str(res),
380 "substring '%s' not in '%s'"
381 % (substring, str(res)))
382 if response_substring:
383 self.failUnless(response_substring in res.value.response,
384 "response substring '%s' not in '%s'"
385 % (response_substring, res.value.response))
387 self.fail("%s was supposed to raise %s, not get '%s'" %
388 (which, expected_failure, res))
390 def shouldFail2(self, expected_failure, which, substring,
392 callable, *args, **kwargs):
393 assert substring is None or isinstance(substring, str)
394 assert response_substring is None or isinstance(response_substring, str)
395 d = defer.maybeDeferred(callable, *args, **kwargs)
397 if isinstance(res, failure.Failure):
398 res.trap(expected_failure)
400 self.failUnless(substring in str(res),
401 "%s: substring '%s' not in '%s'"
402 % (which, substring, str(res)))
403 if response_substring:
404 self.failUnless(response_substring in res.value.response,
405 "%s: response substring '%s' not in '%s'"
407 response_substring, res.value.response))
409 self.fail("%s was supposed to raise %s, not get '%s'" %
410 (which, expected_failure, res))
414 def should404(self, res, which):
415 if isinstance(res, failure.Failure):
416 res.trap(error.Error)
417 self.failUnlessReallyEqual(res.value.status, "404")
419 self.fail("%s was supposed to Error(404), not get '%s'" %
422 def should302(self, res, which):
423 if isinstance(res, failure.Failure):
424 res.trap(error.Error)
425 self.failUnlessReallyEqual(res.value.status, "302")
427 self.fail("%s was supposed to Error(302), not get '%s'" %
431 class Web(WebMixin, WebErrorMixin, testutil.StallMixin, testutil.ReallyEqualMixin, unittest.TestCase):
432 def test_create(self):
435 def test_welcome(self):
438 self.failUnless('Welcome To Tahoe-LAFS' in res, res)
440 self.s.basedir = 'web/test_welcome'
441 fileutil.make_dirs("web/test_welcome")
442 fileutil.make_dirs("web/test_welcome/private")
444 d.addCallback(_check)
447 def test_provisioning(self):
448 d = self.GET("/provisioning/")
450 self.failUnless('Provisioning Tool' in res)
451 fields = {'filled': True,
452 "num_users": int(50e3),
453 "files_per_user": 1000,
454 "space_per_user": int(1e9),
455 "sharing_ratio": 1.0,
456 "encoding_parameters": "3-of-10-5",
458 "ownership_mode": "A",
459 "download_rate": 100,
464 return self.POST("/provisioning/", **fields)
466 d.addCallback(_check)
468 self.failUnless('Provisioning Tool' in res)
469 self.failUnless("Share space consumed: 167.01TB" in res)
471 fields = {'filled': True,
472 "num_users": int(50e6),
473 "files_per_user": 1000,
474 "space_per_user": int(5e9),
475 "sharing_ratio": 1.0,
476 "encoding_parameters": "25-of-100-50",
477 "num_servers": 30000,
478 "ownership_mode": "E",
479 "drive_failure_model": "U",
481 "download_rate": 1000,
486 return self.POST("/provisioning/", **fields)
487 d.addCallback(_check2)
489 self.failUnless("Share space consumed: huge!" in res)
490 fields = {'filled': True}
491 return self.POST("/provisioning/", **fields)
492 d.addCallback(_check3)
494 self.failUnless("Share space consumed:" in res)
495 d.addCallback(_check4)
498 def test_reliability_tool(self):
500 from allmydata import reliability
501 _hush_pyflakes = reliability
504 raise unittest.SkipTest("reliability tool requires NumPy")
506 d = self.GET("/reliability/")
508 self.failUnless('Reliability Tool' in res)
509 fields = {'drive_lifetime': "8Y",
514 "check_period": "1M",
515 "report_period": "3M",
518 return self.POST("/reliability/", **fields)
520 d.addCallback(_check)
522 self.failUnless('Reliability Tool' in res)
523 r = r'Probability of loss \(no maintenance\):\s+<span>0.033591'
524 self.failUnless(re.search(r, res), res)
525 d.addCallback(_check2)
528 def test_status(self):
529 h = self.s.get_history()
530 dl_num = h.list_all_download_statuses()[0].get_counter()
531 ul_num = h.list_all_upload_statuses()[0].get_counter()
532 mu_num = h.list_all_mapupdate_statuses()[0].get_counter()
533 pub_num = h.list_all_publish_statuses()[0].get_counter()
534 ret_num = h.list_all_retrieve_statuses()[0].get_counter()
535 d = self.GET("/status", followRedirect=True)
537 self.failUnless('Upload and Download Status' in res, res)
538 self.failUnless('"down-%d"' % dl_num in res, res)
539 self.failUnless('"up-%d"' % ul_num in res, res)
540 self.failUnless('"mapupdate-%d"' % mu_num in res, res)
541 self.failUnless('"publish-%d"' % pub_num in res, res)
542 self.failUnless('"retrieve-%d"' % ret_num in res, res)
543 d.addCallback(_check)
544 d.addCallback(lambda res: self.GET("/status/?t=json"))
545 def _check_json(res):
546 data = simplejson.loads(res)
547 self.failUnless(isinstance(data, dict))
548 #active = data["active"]
549 # TODO: test more. We need a way to fake an active operation
551 d.addCallback(_check_json)
553 d.addCallback(lambda res: self.GET("/status/down-%d" % dl_num))
555 self.failUnless("File Download Status" in res, res)
556 d.addCallback(_check_dl)
557 d.addCallback(lambda res: self.GET("/status/down-%d?t=json" % dl_num))
558 def _check_dl_json(res):
559 data = simplejson.loads(res)
560 self.failUnless(isinstance(data, dict))
561 d.addCallback(_check_dl_json)
562 d.addCallback(lambda res: self.GET("/status/up-%d" % ul_num))
564 self.failUnless("File Upload Status" in res, res)
565 d.addCallback(_check_ul)
566 d.addCallback(lambda res: self.GET("/status/mapupdate-%d" % mu_num))
567 def _check_mapupdate(res):
568 self.failUnless("Mutable File Servermap Update Status" in res, res)
569 d.addCallback(_check_mapupdate)
570 d.addCallback(lambda res: self.GET("/status/publish-%d" % pub_num))
571 def _check_publish(res):
572 self.failUnless("Mutable File Publish Status" in res, res)
573 d.addCallback(_check_publish)
574 d.addCallback(lambda res: self.GET("/status/retrieve-%d" % ret_num))
575 def _check_retrieve(res):
576 self.failUnless("Mutable File Retrieve Status" in res, res)
577 d.addCallback(_check_retrieve)
581 def test_status_numbers(self):
582 drrm = status.DownloadResultsRendererMixin()
583 self.failUnlessReallyEqual(drrm.render_time(None, None), "")
584 self.failUnlessReallyEqual(drrm.render_time(None, 2.5), "2.50s")
585 self.failUnlessReallyEqual(drrm.render_time(None, 0.25), "250ms")
586 self.failUnlessReallyEqual(drrm.render_time(None, 0.0021), "2.1ms")
587 self.failUnlessReallyEqual(drrm.render_time(None, 0.000123), "123us")
588 self.failUnlessReallyEqual(drrm.render_rate(None, None), "")
589 self.failUnlessReallyEqual(drrm.render_rate(None, 2500000), "2.50MBps")
590 self.failUnlessReallyEqual(drrm.render_rate(None, 30100), "30.1kBps")
591 self.failUnlessReallyEqual(drrm.render_rate(None, 123), "123Bps")
593 urrm = status.UploadResultsRendererMixin()
594 self.failUnlessReallyEqual(urrm.render_time(None, None), "")
595 self.failUnlessReallyEqual(urrm.render_time(None, 2.5), "2.50s")
596 self.failUnlessReallyEqual(urrm.render_time(None, 0.25), "250ms")
597 self.failUnlessReallyEqual(urrm.render_time(None, 0.0021), "2.1ms")
598 self.failUnlessReallyEqual(urrm.render_time(None, 0.000123), "123us")
599 self.failUnlessReallyEqual(urrm.render_rate(None, None), "")
600 self.failUnlessReallyEqual(urrm.render_rate(None, 2500000), "2.50MBps")
601 self.failUnlessReallyEqual(urrm.render_rate(None, 30100), "30.1kBps")
602 self.failUnlessReallyEqual(urrm.render_rate(None, 123), "123Bps")
604 def test_GET_FILEURL(self):
605 d = self.GET(self.public_url + "/foo/bar.txt")
606 d.addCallback(self.failUnlessIsBarDotTxt)
609 def test_GET_FILEURL_range(self):
610 headers = {"range": "bytes=1-10"}
611 d = self.GET(self.public_url + "/foo/bar.txt", headers=headers,
612 return_response=True)
613 def _got((res, status, headers)):
614 self.failUnlessReallyEqual(int(status), 206)
615 self.failUnless(headers.has_key("content-range"))
616 self.failUnlessReallyEqual(headers["content-range"][0],
617 "bytes 1-10/%d" % len(self.BAR_CONTENTS))
618 self.failUnlessReallyEqual(res, self.BAR_CONTENTS[1:11])
622 def test_GET_FILEURL_partial_range(self):
623 headers = {"range": "bytes=5-"}
624 length = len(self.BAR_CONTENTS)
625 d = self.GET(self.public_url + "/foo/bar.txt", headers=headers,
626 return_response=True)
627 def _got((res, status, headers)):
628 self.failUnlessReallyEqual(int(status), 206)
629 self.failUnless(headers.has_key("content-range"))
630 self.failUnlessReallyEqual(headers["content-range"][0],
631 "bytes 5-%d/%d" % (length-1, length))
632 self.failUnlessReallyEqual(res, self.BAR_CONTENTS[5:])
636 def test_GET_FILEURL_partial_end_range(self):
637 headers = {"range": "bytes=-5"}
638 length = len(self.BAR_CONTENTS)
639 d = self.GET(self.public_url + "/foo/bar.txt", headers=headers,
640 return_response=True)
641 def _got((res, status, headers)):
642 self.failUnlessReallyEqual(int(status), 206)
643 self.failUnless(headers.has_key("content-range"))
644 self.failUnlessReallyEqual(headers["content-range"][0],
645 "bytes %d-%d/%d" % (length-5, length-1, length))
646 self.failUnlessReallyEqual(res, self.BAR_CONTENTS[-5:])
650 def test_GET_FILEURL_partial_range_overrun(self):
651 headers = {"range": "bytes=100-200"}
652 d = self.shouldFail2(error.Error, "test_GET_FILEURL_range_overrun",
653 "416 Requested Range not satisfiable",
654 "First beyond end of file",
655 self.GET, self.public_url + "/foo/bar.txt",
659 def test_HEAD_FILEURL_range(self):
660 headers = {"range": "bytes=1-10"}
661 d = self.HEAD(self.public_url + "/foo/bar.txt", headers=headers,
662 return_response=True)
663 def _got((res, status, headers)):
664 self.failUnlessReallyEqual(res, "")
665 self.failUnlessReallyEqual(int(status), 206)
666 self.failUnless(headers.has_key("content-range"))
667 self.failUnlessReallyEqual(headers["content-range"][0],
668 "bytes 1-10/%d" % len(self.BAR_CONTENTS))
672 def test_HEAD_FILEURL_partial_range(self):
673 headers = {"range": "bytes=5-"}
674 length = len(self.BAR_CONTENTS)
675 d = self.HEAD(self.public_url + "/foo/bar.txt", headers=headers,
676 return_response=True)
677 def _got((res, status, headers)):
678 self.failUnlessReallyEqual(int(status), 206)
679 self.failUnless(headers.has_key("content-range"))
680 self.failUnlessReallyEqual(headers["content-range"][0],
681 "bytes 5-%d/%d" % (length-1, length))
685 def test_HEAD_FILEURL_partial_end_range(self):
686 headers = {"range": "bytes=-5"}
687 length = len(self.BAR_CONTENTS)
688 d = self.HEAD(self.public_url + "/foo/bar.txt", headers=headers,
689 return_response=True)
690 def _got((res, status, headers)):
691 self.failUnlessReallyEqual(int(status), 206)
692 self.failUnless(headers.has_key("content-range"))
693 self.failUnlessReallyEqual(headers["content-range"][0],
694 "bytes %d-%d/%d" % (length-5, length-1, length))
698 def test_HEAD_FILEURL_partial_range_overrun(self):
699 headers = {"range": "bytes=100-200"}
700 d = self.shouldFail2(error.Error, "test_HEAD_FILEURL_range_overrun",
701 "416 Requested Range not satisfiable",
703 self.HEAD, self.public_url + "/foo/bar.txt",
707 def test_GET_FILEURL_range_bad(self):
708 headers = {"range": "BOGUS=fizbop-quarnak"}
709 d = self.GET(self.public_url + "/foo/bar.txt", headers=headers,
710 return_response=True)
711 def _got((res, status, headers)):
712 self.failUnlessReallyEqual(int(status), 200)
713 self.failUnless(not headers.has_key("content-range"))
714 self.failUnlessReallyEqual(res, self.BAR_CONTENTS)
718 def test_HEAD_FILEURL(self):
719 d = self.HEAD(self.public_url + "/foo/bar.txt", return_response=True)
720 def _got((res, status, headers)):
721 self.failUnlessReallyEqual(res, "")
722 self.failUnlessReallyEqual(headers["content-length"][0],
723 str(len(self.BAR_CONTENTS)))
724 self.failUnlessReallyEqual(headers["content-type"], ["text/plain"])
728 def test_GET_FILEURL_named(self):
729 base = "/file/%s" % urllib.quote(self._bar_txt_uri)
730 base2 = "/named/%s" % urllib.quote(self._bar_txt_uri)
731 d = self.GET(base + "/@@name=/blah.txt")
732 d.addCallback(self.failUnlessIsBarDotTxt)
733 d.addCallback(lambda res: self.GET(base + "/blah.txt"))
734 d.addCallback(self.failUnlessIsBarDotTxt)
735 d.addCallback(lambda res: self.GET(base + "/ignore/lots/blah.txt"))
736 d.addCallback(self.failUnlessIsBarDotTxt)
737 d.addCallback(lambda res: self.GET(base2 + "/@@name=/blah.txt"))
738 d.addCallback(self.failUnlessIsBarDotTxt)
739 save_url = base + "?save=true&filename=blah.txt"
740 d.addCallback(lambda res: self.GET(save_url))
741 d.addCallback(self.failUnlessIsBarDotTxt) # TODO: check headers
742 u_filename = u"n\u00e9wer.txt" # n e-acute w e r . t x t
743 u_fn_e = urllib.quote(u_filename.encode("utf-8"))
744 u_url = base + "?save=true&filename=" + u_fn_e
745 d.addCallback(lambda res: self.GET(u_url))
746 d.addCallback(self.failUnlessIsBarDotTxt) # TODO: check headers
749 def test_PUT_FILEURL_named_bad(self):
750 base = "/file/%s" % urllib.quote(self._bar_txt_uri)
751 d = self.shouldFail2(error.Error, "test_PUT_FILEURL_named_bad",
753 "/file can only be used with GET or HEAD",
754 self.PUT, base + "/@@name=/blah.txt", "")
757 def test_GET_DIRURL_named_bad(self):
758 base = "/file/%s" % urllib.quote(self._foo_uri)
759 d = self.shouldFail2(error.Error, "test_PUT_DIRURL_named_bad",
762 self.GET, base + "/@@name=/blah.txt")
765 def test_GET_slash_file_bad(self):
766 d = self.shouldFail2(error.Error, "test_GET_slash_file_bad",
768 "/file must be followed by a file-cap and a name",
772 def test_GET_unhandled_URI_named(self):
773 contents, n, newuri = self.makefile(12)
774 verifier_cap = n.get_verify_cap().to_string()
775 base = "/file/%s" % urllib.quote(verifier_cap)
776 # client.create_node_from_uri() can't handle verify-caps
777 d = self.shouldFail2(error.Error, "GET_unhandled_URI_named",
778 "400 Bad Request", "is not a file-cap",
782 def test_GET_unhandled_URI(self):
783 contents, n, newuri = self.makefile(12)
784 verifier_cap = n.get_verify_cap().to_string()
785 base = "/uri/%s" % urllib.quote(verifier_cap)
786 # client.create_node_from_uri() can't handle verify-caps
787 d = self.shouldFail2(error.Error, "test_GET_unhandled_URI",
789 "GET unknown URI type: can only do t=info",
793 def test_GET_FILE_URI(self):
794 base = "/uri/%s" % urllib.quote(self._bar_txt_uri)
796 d.addCallback(self.failUnlessIsBarDotTxt)
799 def test_GET_FILE_URI_badchild(self):
800 base = "/uri/%s/boguschild" % urllib.quote(self._bar_txt_uri)
801 errmsg = "Files have no children, certainly not named 'boguschild'"
802 d = self.shouldFail2(error.Error, "test_GET_FILE_URI_badchild",
803 "400 Bad Request", errmsg,
807 def test_PUT_FILE_URI_badchild(self):
808 base = "/uri/%s/boguschild" % urllib.quote(self._bar_txt_uri)
809 errmsg = "Cannot create directory 'boguschild', because its parent is a file, not a directory"
810 d = self.shouldFail2(error.Error, "test_GET_FILE_URI_badchild",
811 "400 Bad Request", errmsg,
815 # TODO: version of this with a Unicode filename
816 def test_GET_FILEURL_save(self):
817 d = self.GET(self.public_url + "/foo/bar.txt?filename=bar.txt&save=true",
818 return_response=True)
819 def _got((res, statuscode, headers)):
820 content_disposition = headers["content-disposition"][0]
821 self.failUnless(content_disposition == 'attachment; filename="bar.txt"', content_disposition)
822 self.failUnlessIsBarDotTxt(res)
826 def test_GET_FILEURL_missing(self):
827 d = self.GET(self.public_url + "/foo/missing")
828 d.addBoth(self.should404, "test_GET_FILEURL_missing")
831 def test_PUT_overwrite_only_files(self):
832 # create a directory, put a file in that directory.
833 contents, n, filecap = self.makefile(8)
834 d = self.PUT(self.public_url + "/foo/dir?t=mkdir", "")
835 d.addCallback(lambda res:
836 self.PUT(self.public_url + "/foo/dir/file1.txt",
837 self.NEWFILE_CONTENTS))
838 # try to overwrite the file with replace=only-files
840 d.addCallback(lambda res:
841 self.PUT(self.public_url + "/foo/dir/file1.txt?t=uri&replace=only-files",
843 d.addCallback(lambda res:
844 self.shouldFail2(error.Error, "PUT_bad_t", "409 Conflict",
845 "There was already a child by that name, and you asked me "
847 self.PUT, self.public_url + "/foo/dir?t=uri&replace=only-files",
851 def test_PUT_NEWFILEURL(self):
852 d = self.PUT(self.public_url + "/foo/new.txt", self.NEWFILE_CONTENTS)
853 # TODO: we lose the response code, so we can't check this
854 #self.failUnlessReallyEqual(responsecode, 201)
855 d.addCallback(self.failUnlessURIMatchesROChild, self._foo_node, u"new.txt")
856 d.addCallback(lambda res:
857 self.failUnlessChildContentsAre(self._foo_node, u"new.txt",
858 self.NEWFILE_CONTENTS))
861 def test_PUT_NEWFILEURL_not_mutable(self):
862 d = self.PUT(self.public_url + "/foo/new.txt?mutable=false",
863 self.NEWFILE_CONTENTS)
864 # TODO: we lose the response code, so we can't check this
865 #self.failUnlessReallyEqual(responsecode, 201)
866 d.addCallback(self.failUnlessURIMatchesROChild, self._foo_node, u"new.txt")
867 d.addCallback(lambda res:
868 self.failUnlessChildContentsAre(self._foo_node, u"new.txt",
869 self.NEWFILE_CONTENTS))
872 def test_PUT_NEWFILEURL_range_bad(self):
873 headers = {"content-range": "bytes 1-10/%d" % len(self.NEWFILE_CONTENTS)}
874 target = self.public_url + "/foo/new.txt"
875 d = self.shouldFail2(error.Error, "test_PUT_NEWFILEURL_range_bad",
876 "501 Not Implemented",
877 "Content-Range in PUT not yet supported",
878 # (and certainly not for immutable files)
879 self.PUT, target, self.NEWFILE_CONTENTS[1:11],
881 d.addCallback(lambda res:
882 self.failIfNodeHasChild(self._foo_node, u"new.txt"))
885 def test_PUT_NEWFILEURL_mutable(self):
886 d = self.PUT(self.public_url + "/foo/new.txt?mutable=true",
887 self.NEWFILE_CONTENTS)
888 # TODO: we lose the response code, so we can't check this
889 #self.failUnlessReallyEqual(responsecode, 201)
891 u = uri.from_string_mutable_filenode(res)
892 self.failUnless(u.is_mutable())
893 self.failIf(u.is_readonly())
895 d.addCallback(_check_uri)
896 d.addCallback(self.failUnlessURIMatchesRWChild, self._foo_node, u"new.txt")
897 d.addCallback(lambda res:
898 self.failUnlessMutableChildContentsAre(self._foo_node,
900 self.NEWFILE_CONTENTS))
903 def test_PUT_NEWFILEURL_mutable_toobig(self):
904 d = self.shouldFail2(error.Error, "test_PUT_NEWFILEURL_mutable_toobig",
905 "413 Request Entity Too Large",
906 "SDMF is limited to one segment, and 10001 > 10000",
908 self.public_url + "/foo/new.txt?mutable=true",
909 "b" * (self.s.MUTABLE_SIZELIMIT+1))
912 def test_PUT_NEWFILEURL_replace(self):
913 d = self.PUT(self.public_url + "/foo/bar.txt", self.NEWFILE_CONTENTS)
914 # TODO: we lose the response code, so we can't check this
915 #self.failUnlessReallyEqual(responsecode, 200)
916 d.addCallback(self.failUnlessURIMatchesROChild, self._foo_node, u"bar.txt")
917 d.addCallback(lambda res:
918 self.failUnlessChildContentsAre(self._foo_node, u"bar.txt",
919 self.NEWFILE_CONTENTS))
922 def test_PUT_NEWFILEURL_bad_t(self):
923 d = self.shouldFail2(error.Error, "PUT_bad_t", "400 Bad Request",
924 "PUT to a file: bad t=bogus",
925 self.PUT, self.public_url + "/foo/bar.txt?t=bogus",
929 def test_PUT_NEWFILEURL_no_replace(self):
930 d = self.PUT(self.public_url + "/foo/bar.txt?replace=false",
931 self.NEWFILE_CONTENTS)
932 d.addBoth(self.shouldFail, error.Error, "PUT_NEWFILEURL_no_replace",
934 "There was already a child by that name, and you asked me "
938 def test_PUT_NEWFILEURL_mkdirs(self):
939 d = self.PUT(self.public_url + "/foo/newdir/new.txt", self.NEWFILE_CONTENTS)
941 d.addCallback(self.failUnlessURIMatchesROChild, fn, u"newdir/new.txt")
942 d.addCallback(lambda res: self.failIfNodeHasChild(fn, u"new.txt"))
943 d.addCallback(lambda res: self.failUnlessNodeHasChild(fn, u"newdir"))
944 d.addCallback(lambda res:
945 self.failUnlessChildContentsAre(fn, u"newdir/new.txt",
946 self.NEWFILE_CONTENTS))
949 def test_PUT_NEWFILEURL_blocked(self):
950 d = self.PUT(self.public_url + "/foo/blockingfile/new.txt",
951 self.NEWFILE_CONTENTS)
952 d.addBoth(self.shouldFail, error.Error, "PUT_NEWFILEURL_blocked",
954 "Unable to create directory 'blockingfile': a file was in the way")
957 def test_PUT_NEWFILEURL_emptyname(self):
958 # an empty pathname component (i.e. a double-slash) is disallowed
959 d = self.shouldFail2(error.Error, "test_PUT_NEWFILEURL_emptyname",
961 "The webapi does not allow empty pathname components",
962 self.PUT, self.public_url + "/foo//new.txt", "")
965 def test_DELETE_FILEURL(self):
966 d = self.DELETE(self.public_url + "/foo/bar.txt")
967 d.addCallback(lambda res:
968 self.failIfNodeHasChild(self._foo_node, u"bar.txt"))
971 def test_DELETE_FILEURL_missing(self):
972 d = self.DELETE(self.public_url + "/foo/missing")
973 d.addBoth(self.should404, "test_DELETE_FILEURL_missing")
976 def test_DELETE_FILEURL_missing2(self):
977 d = self.DELETE(self.public_url + "/missing/missing")
978 d.addBoth(self.should404, "test_DELETE_FILEURL_missing2")
981 def failUnlessHasBarDotTxtMetadata(self, res):
982 data = simplejson.loads(res)
983 self.failUnless(isinstance(data, list))
984 self.failUnlessIn("metadata", data[1])
985 self.failUnlessIn("tahoe", data[1]["metadata"])
986 self.failUnlessIn("linkcrtime", data[1]["metadata"]["tahoe"])
987 self.failUnlessIn("linkmotime", data[1]["metadata"]["tahoe"])
988 self.failUnlessReallyEqual(data[1]["metadata"]["tahoe"]["linkcrtime"],
989 self._bar_txt_metadata["tahoe"]["linkcrtime"])
991 def test_GET_FILEURL_json(self):
992 # twisted.web.http.parse_qs ignores any query args without an '=', so
993 # I can't do "GET /path?json", I have to do "GET /path/t=json"
994 # instead. This may make it tricky to emulate the S3 interface
996 d = self.GET(self.public_url + "/foo/bar.txt?t=json")
998 self.failUnlessIsBarJSON(data)
999 self.failUnlessHasBarDotTxtMetadata(data)
1001 d.addCallback(_check1)
1004 def test_GET_FILEURL_json_missing(self):
1005 d = self.GET(self.public_url + "/foo/missing?json")
1006 d.addBoth(self.should404, "test_GET_FILEURL_json_missing")
1009 def test_GET_FILEURL_uri(self):
1010 d = self.GET(self.public_url + "/foo/bar.txt?t=uri")
1012 self.failUnlessReallyEqual(res, self._bar_txt_uri)
1013 d.addCallback(_check)
1014 d.addCallback(lambda res:
1015 self.GET(self.public_url + "/foo/bar.txt?t=readonly-uri"))
1017 # for now, for files, uris and readonly-uris are the same
1018 self.failUnlessReallyEqual(res, self._bar_txt_uri)
1019 d.addCallback(_check2)
1022 def test_GET_FILEURL_badtype(self):
1023 d = self.shouldHTTPError("GET t=bogus", 400, "Bad Request",
1026 self.public_url + "/foo/bar.txt?t=bogus")
1029 def test_CSS_FILE(self):
1030 d = self.GET("/tahoe_css", followRedirect=True)
1032 CSS_STYLE=re.compile('toolbar\s{.+text-align:\scenter.+toolbar-item.+display:\sinline',re.DOTALL)
1033 self.failUnless(CSS_STYLE.search(res), res)
1034 d.addCallback(_check)
1037 def test_GET_FILEURL_uri_missing(self):
1038 d = self.GET(self.public_url + "/foo/missing?t=uri")
1039 d.addBoth(self.should404, "test_GET_FILEURL_uri_missing")
1042 def test_GET_DIRECTORY_html_banner(self):
1043 d = self.GET(self.public_url + "/foo", followRedirect=True)
1045 self.failUnlessIn('<div class="toolbar-item"><a href="../../..">Return to Welcome page</a></div>',res)
1046 d.addCallback(_check)
1049 def test_GET_DIRURL(self):
1050 # the addSlash means we get a redirect here
1051 # from /uri/$URI/foo/ , we need ../../../ to get back to the root
1053 d = self.GET(self.public_url + "/foo", followRedirect=True)
1055 self.failUnless(('<a href="%s">Return to Welcome page' % ROOT)
1057 # the FILE reference points to a URI, but it should end in bar.txt
1058 bar_url = ("%s/file/%s/@@named=/bar.txt" %
1059 (ROOT, urllib.quote(self._bar_txt_uri)))
1060 get_bar = "".join([r'<td>FILE</td>',
1062 r'<a href="%s">bar.txt</a>' % bar_url,
1064 r'\s+<td>%d</td>' % len(self.BAR_CONTENTS),
1066 self.failUnless(re.search(get_bar, res), res)
1067 for line in res.split("\n"):
1068 # find the line that contains the delete button for bar.txt
1069 if ("form action" in line and
1070 'value="delete"' in line and
1071 'value="bar.txt"' in line):
1072 # the form target should use a relative URL
1073 foo_url = urllib.quote("%s/uri/%s/" % (ROOT, self._foo_uri))
1074 self.failUnless(('action="%s"' % foo_url) in line, line)
1075 # and the when_done= should too
1076 #done_url = urllib.quote(???)
1077 #self.failUnless(('name="when_done" value="%s"' % done_url)
1081 self.fail("unable to find delete-bar.txt line", res)
1083 # the DIR reference just points to a URI
1084 sub_url = ("%s/uri/%s/" % (ROOT, urllib.quote(self._sub_uri)))
1085 get_sub = ((r'<td>DIR</td>')
1086 +r'\s+<td><a href="%s">sub</a></td>' % sub_url)
1087 self.failUnless(re.search(get_sub, res), res)
1088 d.addCallback(_check)
1090 # look at a readonly directory
1091 d.addCallback(lambda res:
1092 self.GET(self.public_url + "/reedownlee", followRedirect=True))
1094 self.failUnless("(read-only)" in res, res)
1095 self.failIf("Upload a file" in res, res)
1096 d.addCallback(_check2)
1098 # and at a directory that contains a readonly directory
1099 d.addCallback(lambda res:
1100 self.GET(self.public_url, followRedirect=True))
1102 self.failUnless(re.search('<td>DIR-RO</td>'
1103 r'\s+<td><a href="[\.\/]+/uri/URI%3ADIR2-RO%3A[^"]+">reedownlee</a></td>', res), res)
1104 d.addCallback(_check3)
1106 # and an empty directory
1107 d.addCallback(lambda res: self.GET(self.public_url + "/foo/empty/"))
1109 self.failUnless("directory is empty" in res, res)
1110 MKDIR_BUTTON_RE=re.compile('<input type="hidden" name="t" value="mkdir" />.*<legend class="freeform-form-label">Create a new directory in this directory</legend>.*<input type="submit" value="Create" />', re.I)
1111 self.failUnless(MKDIR_BUTTON_RE.search(res), res)
1112 d.addCallback(_check4)
1114 # and at a literal directory
1115 tiny_litdir_uri = "URI:DIR2-LIT:gqytunj2onug64tufqzdcosvkjetutcjkq5gw4tvm5vwszdgnz5hgyzufqydulbshj5x2lbm" # contains one child which is itself also LIT
1116 d.addCallback(lambda res:
1117 self.GET("/uri/" + tiny_litdir_uri + "/", followRedirect=True))
1119 self.failUnless('(immutable)' in res, res)
1120 self.failUnless(re.search('<td>FILE</td>'
1121 r'\s+<td><a href="[\.\/]+/file/URI%3ALIT%3Akrugkidfnzsc4/@@named=/short">short</a></td>', res), res)
1122 d.addCallback(_check5)
1125 def test_GET_DIRURL_badtype(self):
1126 d = self.shouldHTTPError("test_GET_DIRURL_badtype",
1130 self.public_url + "/foo?t=bogus")
1133 def test_GET_DIRURL_json(self):
1134 d = self.GET(self.public_url + "/foo?t=json")
1135 d.addCallback(self.failUnlessIsFooJSON)
1139 def test_POST_DIRURL_manifest_no_ophandle(self):
1140 d = self.shouldFail2(error.Error,
1141 "test_POST_DIRURL_manifest_no_ophandle",
1143 "slow operation requires ophandle=",
1144 self.POST, self.public_url, t="start-manifest")
1147 def test_POST_DIRURL_manifest(self):
1148 d = defer.succeed(None)
1149 def getman(ignored, output):
1150 d = self.POST(self.public_url + "/foo/?t=start-manifest&ophandle=125",
1151 followRedirect=True)
1152 d.addCallback(self.wait_for_operation, "125")
1153 d.addCallback(self.get_operation_results, "125", output)
1155 d.addCallback(getman, None)
1156 def _got_html(manifest):
1157 self.failUnless("Manifest of SI=" in manifest)
1158 self.failUnless("<td>sub</td>" in manifest)
1159 self.failUnless(self._sub_uri in manifest)
1160 self.failUnless("<td>sub/baz.txt</td>" in manifest)
1161 d.addCallback(_got_html)
1163 # both t=status and unadorned GET should be identical
1164 d.addCallback(lambda res: self.GET("/operations/125"))
1165 d.addCallback(_got_html)
1167 d.addCallback(getman, "html")
1168 d.addCallback(_got_html)
1169 d.addCallback(getman, "text")
1170 def _got_text(manifest):
1171 self.failUnless("\nsub " + self._sub_uri + "\n" in manifest)
1172 self.failUnless("\nsub/baz.txt URI:CHK:" in manifest)
1173 d.addCallback(_got_text)
1174 d.addCallback(getman, "JSON")
1176 data = res["manifest"]
1178 for (path_list, cap) in data:
1179 got[tuple(path_list)] = cap
1180 self.failUnlessReallyEqual(to_str(got[(u"sub",)]), self._sub_uri)
1181 self.failUnless((u"sub",u"baz.txt") in got)
1182 self.failUnless("finished" in res)
1183 self.failUnless("origin" in res)
1184 self.failUnless("storage-index" in res)
1185 self.failUnless("verifycaps" in res)
1186 self.failUnless("stats" in res)
1187 d.addCallback(_got_json)
1190 def test_POST_DIRURL_deepsize_no_ophandle(self):
1191 d = self.shouldFail2(error.Error,
1192 "test_POST_DIRURL_deepsize_no_ophandle",
1194 "slow operation requires ophandle=",
1195 self.POST, self.public_url, t="start-deep-size")
1198 def test_POST_DIRURL_deepsize(self):
1199 d = self.POST(self.public_url + "/foo/?t=start-deep-size&ophandle=126",
1200 followRedirect=True)
1201 d.addCallback(self.wait_for_operation, "126")
1202 d.addCallback(self.get_operation_results, "126", "json")
1203 def _got_json(data):
1204 self.failUnlessReallyEqual(data["finished"], True)
1206 self.failUnless(size > 1000)
1207 d.addCallback(_got_json)
1208 d.addCallback(self.get_operation_results, "126", "text")
1210 mo = re.search(r'^size: (\d+)$', res, re.M)
1211 self.failUnless(mo, res)
1212 size = int(mo.group(1))
1213 # with directories, the size varies.
1214 self.failUnless(size > 1000)
1215 d.addCallback(_got_text)
1218 def test_POST_DIRURL_deepstats_no_ophandle(self):
1219 d = self.shouldFail2(error.Error,
1220 "test_POST_DIRURL_deepstats_no_ophandle",
1222 "slow operation requires ophandle=",
1223 self.POST, self.public_url, t="start-deep-stats")
1226 def test_POST_DIRURL_deepstats(self):
1227 d = self.POST(self.public_url + "/foo/?t=start-deep-stats&ophandle=127",
1228 followRedirect=True)
1229 d.addCallback(self.wait_for_operation, "127")
1230 d.addCallback(self.get_operation_results, "127", "json")
1231 def _got_json(stats):
1232 expected = {"count-immutable-files": 3,
1233 "count-mutable-files": 0,
1234 "count-literal-files": 0,
1236 "count-directories": 3,
1237 "size-immutable-files": 57,
1238 "size-literal-files": 0,
1239 #"size-directories": 1912, # varies
1240 #"largest-directory": 1590,
1241 "largest-directory-children": 5,
1242 "largest-immutable-file": 19,
1244 for k,v in expected.iteritems():
1245 self.failUnlessReallyEqual(stats[k], v,
1246 "stats[%s] was %s, not %s" %
1248 self.failUnlessReallyEqual(stats["size-files-histogram"],
1250 d.addCallback(_got_json)
1253 def test_POST_DIRURL_stream_manifest(self):
1254 d = self.POST(self.public_url + "/foo/?t=stream-manifest")
1256 self.failUnless(res.endswith("\n"))
1257 units = [simplejson.loads(t) for t in res[:-1].split("\n")]
1258 self.failUnlessReallyEqual(len(units), 7)
1259 self.failUnlessEqual(units[-1]["type"], "stats")
1261 self.failUnlessEqual(first["path"], [])
1262 self.failUnlessReallyEqual(to_str(first["cap"]), self._foo_uri)
1263 self.failUnlessEqual(first["type"], "directory")
1264 baz = [u for u in units[:-1] if to_str(u["cap"]) == self._baz_file_uri][0]
1265 self.failUnlessEqual(baz["path"], ["sub", "baz.txt"])
1266 self.failIfEqual(baz["storage-index"], None)
1267 self.failIfEqual(baz["verifycap"], None)
1268 self.failIfEqual(baz["repaircap"], None)
1270 d.addCallback(_check)
1273 def test_GET_DIRURL_uri(self):
1274 d = self.GET(self.public_url + "/foo?t=uri")
1276 self.failUnlessReallyEqual(to_str(res), self._foo_uri)
1277 d.addCallback(_check)
1280 def test_GET_DIRURL_readonly_uri(self):
1281 d = self.GET(self.public_url + "/foo?t=readonly-uri")
1283 self.failUnlessReallyEqual(to_str(res), self._foo_readonly_uri)
1284 d.addCallback(_check)
1287 def test_PUT_NEWDIRURL(self):
1288 d = self.PUT(self.public_url + "/foo/newdir?t=mkdir", "")
1289 d.addCallback(lambda res:
1290 self.failUnlessNodeHasChild(self._foo_node, u"newdir"))
1291 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
1292 d.addCallback(self.failUnlessNodeKeysAre, [])
1295 def test_POST_NEWDIRURL(self):
1296 d = self.POST2(self.public_url + "/foo/newdir?t=mkdir", "")
1297 d.addCallback(lambda res:
1298 self.failUnlessNodeHasChild(self._foo_node, u"newdir"))
1299 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
1300 d.addCallback(self.failUnlessNodeKeysAre, [])
1303 def test_POST_NEWDIRURL_emptyname(self):
1304 # an empty pathname component (i.e. a double-slash) is disallowed
1305 d = self.shouldFail2(error.Error, "test_POST_NEWDIRURL_emptyname",
1307 "The webapi does not allow empty pathname components, i.e. a double slash",
1308 self.POST, self.public_url + "//?t=mkdir")
1311 def test_POST_NEWDIRURL_initial_children(self):
1312 (newkids, caps) = self._create_initial_children()
1313 d = self.POST2(self.public_url + "/foo/newdir?t=mkdir-with-children",
1314 simplejson.dumps(newkids))
1316 n = self.s.create_node_from_uri(uri.strip())
1317 d2 = self.failUnlessNodeKeysAre(n, newkids.keys())
1318 d2.addCallback(lambda ign:
1319 self.failUnlessROChildURIIs(n, u"child-imm",
1321 d2.addCallback(lambda ign:
1322 self.failUnlessRWChildURIIs(n, u"child-mutable",
1324 d2.addCallback(lambda ign:
1325 self.failUnlessROChildURIIs(n, u"child-mutable-ro",
1327 d2.addCallback(lambda ign:
1328 self.failUnlessROChildURIIs(n, u"unknownchild-ro",
1329 caps['unknown_rocap']))
1330 d2.addCallback(lambda ign:
1331 self.failUnlessRWChildURIIs(n, u"unknownchild-rw",
1332 caps['unknown_rwcap']))
1333 d2.addCallback(lambda ign:
1334 self.failUnlessROChildURIIs(n, u"unknownchild-imm",
1335 caps['unknown_immcap']))
1336 d2.addCallback(lambda ign:
1337 self.failUnlessRWChildURIIs(n, u"dirchild",
1339 d2.addCallback(lambda ign:
1340 self.failUnlessROChildURIIs(n, u"dirchild-lit",
1342 d2.addCallback(lambda ign:
1343 self.failUnlessROChildURIIs(n, u"dirchild-empty",
1344 caps['emptydircap']))
1346 d.addCallback(_check)
1347 d.addCallback(lambda res:
1348 self.failUnlessNodeHasChild(self._foo_node, u"newdir"))
1349 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
1350 d.addCallback(self.failUnlessNodeKeysAre, newkids.keys())
1351 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
1352 d.addCallback(self.failUnlessROChildURIIs, u"child-imm", caps['filecap1'])
1355 def test_POST_NEWDIRURL_immutable(self):
1356 (newkids, caps) = self._create_immutable_children()
1357 d = self.POST2(self.public_url + "/foo/newdir?t=mkdir-immutable",
1358 simplejson.dumps(newkids))
1360 n = self.s.create_node_from_uri(uri.strip())
1361 d2 = self.failUnlessNodeKeysAre(n, newkids.keys())
1362 d2.addCallback(lambda ign:
1363 self.failUnlessROChildURIIs(n, u"child-imm",
1365 d2.addCallback(lambda ign:
1366 self.failUnlessROChildURIIs(n, u"unknownchild-imm",
1367 caps['unknown_immcap']))
1368 d2.addCallback(lambda ign:
1369 self.failUnlessROChildURIIs(n, u"dirchild-imm",
1371 d2.addCallback(lambda ign:
1372 self.failUnlessROChildURIIs(n, u"dirchild-lit",
1374 d2.addCallback(lambda ign:
1375 self.failUnlessROChildURIIs(n, u"dirchild-empty",
1376 caps['emptydircap']))
1378 d.addCallback(_check)
1379 d.addCallback(lambda res:
1380 self.failUnlessNodeHasChild(self._foo_node, u"newdir"))
1381 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
1382 d.addCallback(self.failUnlessNodeKeysAre, newkids.keys())
1383 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
1384 d.addCallback(self.failUnlessROChildURIIs, u"child-imm", caps['filecap1'])
1385 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
1386 d.addCallback(self.failUnlessROChildURIIs, u"unknownchild-imm", caps['unknown_immcap'])
1387 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
1388 d.addCallback(self.failUnlessROChildURIIs, u"dirchild-imm", caps['immdircap'])
1389 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
1390 d.addCallback(self.failUnlessROChildURIIs, u"dirchild-lit", caps['litdircap'])
1391 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
1392 d.addCallback(self.failUnlessROChildURIIs, u"dirchild-empty", caps['emptydircap'])
1393 d.addErrback(self.explain_web_error)
1396 def test_POST_NEWDIRURL_immutable_bad(self):
1397 (newkids, caps) = self._create_initial_children()
1398 d = self.shouldFail2(error.Error, "test_POST_NEWDIRURL_immutable_bad",
1400 "needed to be immutable but was not",
1402 self.public_url + "/foo/newdir?t=mkdir-immutable",
1403 simplejson.dumps(newkids))
1406 def test_PUT_NEWDIRURL_exists(self):
1407 d = self.PUT(self.public_url + "/foo/sub?t=mkdir", "")
1408 d.addCallback(lambda res:
1409 self.failUnlessNodeHasChild(self._foo_node, u"sub"))
1410 d.addCallback(lambda res: self._foo_node.get(u"sub"))
1411 d.addCallback(self.failUnlessNodeKeysAre, [u"baz.txt"])
1414 def test_PUT_NEWDIRURL_blocked(self):
1415 d = self.shouldFail2(error.Error, "PUT_NEWDIRURL_blocked",
1416 "409 Conflict", "Unable to create directory 'bar.txt': a file was in the way",
1418 self.public_url + "/foo/bar.txt/sub?t=mkdir", "")
1419 d.addCallback(lambda res:
1420 self.failUnlessNodeHasChild(self._foo_node, u"sub"))
1421 d.addCallback(lambda res: self._foo_node.get(u"sub"))
1422 d.addCallback(self.failUnlessNodeKeysAre, [u"baz.txt"])
1425 def test_PUT_NEWDIRURL_mkdir_p(self):
1426 d = defer.succeed(None)
1427 d.addCallback(lambda res: self.POST(self.public_url + "/foo", t='mkdir', name='mkp'))
1428 d.addCallback(lambda res: self.failUnlessNodeHasChild(self._foo_node, u"mkp"))
1429 d.addCallback(lambda res: self._foo_node.get(u"mkp"))
1430 def mkdir_p(mkpnode):
1431 url = '/uri/%s?t=mkdir-p&path=/sub1/sub2' % urllib.quote(mkpnode.get_uri())
1433 def made_subsub(ssuri):
1434 d = self._foo_node.get_child_at_path(u"mkp/sub1/sub2")
1435 d.addCallback(lambda ssnode: self.failUnlessReallyEqual(ssnode.get_uri(), ssuri))
1437 d.addCallback(lambda uri2: self.failUnlessReallyEqual(uri2, ssuri))
1439 d.addCallback(made_subsub)
1441 d.addCallback(mkdir_p)
1444 def test_PUT_NEWDIRURL_mkdirs(self):
1445 d = self.PUT(self.public_url + "/foo/subdir/newdir?t=mkdir", "")
1446 d.addCallback(lambda res:
1447 self.failIfNodeHasChild(self._foo_node, u"newdir"))
1448 d.addCallback(lambda res:
1449 self.failUnlessNodeHasChild(self._foo_node, u"subdir"))
1450 d.addCallback(lambda res:
1451 self._foo_node.get_child_at_path(u"subdir/newdir"))
1452 d.addCallback(self.failUnlessNodeKeysAre, [])
1455 def test_DELETE_DIRURL(self):
1456 d = self.DELETE(self.public_url + "/foo")
1457 d.addCallback(lambda res:
1458 self.failIfNodeHasChild(self.public_root, u"foo"))
1461 def test_DELETE_DIRURL_missing(self):
1462 d = self.DELETE(self.public_url + "/foo/missing")
1463 d.addBoth(self.should404, "test_DELETE_DIRURL_missing")
1464 d.addCallback(lambda res:
1465 self.failUnlessNodeHasChild(self.public_root, u"foo"))
1468 def test_DELETE_DIRURL_missing2(self):
1469 d = self.DELETE(self.public_url + "/missing")
1470 d.addBoth(self.should404, "test_DELETE_DIRURL_missing2")
1473 def dump_root(self):
1475 w = webish.DirnodeWalkerMixin()
1476 def visitor(childpath, childnode, metadata):
1478 d = w.walk(self.public_root, visitor)
1481 def failUnlessNodeKeysAre(self, node, expected_keys):
1482 for k in expected_keys:
1483 assert isinstance(k, unicode)
1485 def _check(children):
1486 self.failUnlessReallyEqual(sorted(children.keys()), sorted(expected_keys))
1487 d.addCallback(_check)
1489 def failUnlessNodeHasChild(self, node, name):
1490 assert isinstance(name, unicode)
1492 def _check(children):
1493 self.failUnless(name in children)
1494 d.addCallback(_check)
1496 def failIfNodeHasChild(self, node, name):
1497 assert isinstance(name, unicode)
1499 def _check(children):
1500 self.failIf(name in children)
1501 d.addCallback(_check)
1504 def failUnlessChildContentsAre(self, node, name, expected_contents):
1505 assert isinstance(name, unicode)
1506 d = node.get_child_at_path(name)
1507 d.addCallback(lambda node: download_to_data(node))
1508 def _check(contents):
1509 self.failUnlessReallyEqual(contents, expected_contents)
1510 d.addCallback(_check)
1513 def failUnlessMutableChildContentsAre(self, node, name, expected_contents):
1514 assert isinstance(name, unicode)
1515 d = node.get_child_at_path(name)
1516 d.addCallback(lambda node: node.download_best_version())
1517 def _check(contents):
1518 self.failUnlessReallyEqual(contents, expected_contents)
1519 d.addCallback(_check)
1522 def failUnlessRWChildURIIs(self, node, name, expected_uri):
1523 assert isinstance(name, unicode)
1524 d = node.get_child_at_path(name)
1526 self.failUnless(child.is_unknown() or not child.is_readonly())
1527 self.failUnlessReallyEqual(child.get_uri(), expected_uri.strip())
1528 self.failUnlessReallyEqual(child.get_write_uri(), expected_uri.strip())
1529 expected_ro_uri = self._make_readonly(expected_uri)
1531 self.failUnlessReallyEqual(child.get_readonly_uri(), expected_ro_uri.strip())
1532 d.addCallback(_check)
1535 def failUnlessROChildURIIs(self, node, name, expected_uri):
1536 assert isinstance(name, unicode)
1537 d = node.get_child_at_path(name)
1539 self.failUnless(child.is_unknown() or child.is_readonly())
1540 self.failUnlessReallyEqual(child.get_write_uri(), None)
1541 self.failUnlessReallyEqual(child.get_uri(), expected_uri.strip())
1542 self.failUnlessReallyEqual(child.get_readonly_uri(), expected_uri.strip())
1543 d.addCallback(_check)
1546 def failUnlessURIMatchesRWChild(self, got_uri, node, name):
1547 assert isinstance(name, unicode)
1548 d = node.get_child_at_path(name)
1550 self.failUnless(child.is_unknown() or not child.is_readonly())
1551 self.failUnlessReallyEqual(child.get_uri(), got_uri.strip())
1552 self.failUnlessReallyEqual(child.get_write_uri(), got_uri.strip())
1553 expected_ro_uri = self._make_readonly(got_uri)
1555 self.failUnlessReallyEqual(child.get_readonly_uri(), expected_ro_uri.strip())
1556 d.addCallback(_check)
1559 def failUnlessURIMatchesROChild(self, got_uri, node, name):
1560 assert isinstance(name, unicode)
1561 d = node.get_child_at_path(name)
1563 self.failUnless(child.is_unknown() or child.is_readonly())
1564 self.failUnlessReallyEqual(child.get_write_uri(), None)
1565 self.failUnlessReallyEqual(got_uri.strip(), child.get_uri())
1566 self.failUnlessReallyEqual(got_uri.strip(), child.get_readonly_uri())
1567 d.addCallback(_check)
1570 def failUnlessCHKURIHasContents(self, got_uri, contents):
1571 self.failUnless(FakeCHKFileNode.all_contents[got_uri] == contents)
1573 def test_POST_upload(self):
1574 d = self.POST(self.public_url + "/foo", t="upload",
1575 file=("new.txt", self.NEWFILE_CONTENTS))
1577 d.addCallback(self.failUnlessURIMatchesROChild, fn, u"new.txt")
1578 d.addCallback(lambda res:
1579 self.failUnlessChildContentsAre(fn, u"new.txt",
1580 self.NEWFILE_CONTENTS))
1583 def test_POST_upload_unicode(self):
1584 filename = u"n\u00e9wer.txt" # n e-acute w e r . t x t
1585 d = self.POST(self.public_url + "/foo", t="upload",
1586 file=(filename, self.NEWFILE_CONTENTS))
1588 d.addCallback(self.failUnlessURIMatchesROChild, fn, filename)
1589 d.addCallback(lambda res:
1590 self.failUnlessChildContentsAre(fn, filename,
1591 self.NEWFILE_CONTENTS))
1592 target_url = self.public_url + "/foo/" + filename.encode("utf-8")
1593 d.addCallback(lambda res: self.GET(target_url))
1594 d.addCallback(lambda contents: self.failUnlessReallyEqual(contents,
1595 self.NEWFILE_CONTENTS,
1599 def test_POST_upload_unicode_named(self):
1600 filename = u"n\u00e9wer.txt" # n e-acute w e r . t x t
1601 d = self.POST(self.public_url + "/foo", t="upload",
1603 file=("overridden", self.NEWFILE_CONTENTS))
1605 d.addCallback(self.failUnlessURIMatchesROChild, fn, filename)
1606 d.addCallback(lambda res:
1607 self.failUnlessChildContentsAre(fn, filename,
1608 self.NEWFILE_CONTENTS))
1609 target_url = self.public_url + "/foo/" + filename.encode("utf-8")
1610 d.addCallback(lambda res: self.GET(target_url))
1611 d.addCallback(lambda contents: self.failUnlessReallyEqual(contents,
1612 self.NEWFILE_CONTENTS,
1616 def test_POST_upload_no_link(self):
1617 d = self.POST("/uri", t="upload",
1618 file=("new.txt", self.NEWFILE_CONTENTS))
1619 def _check_upload_results(page):
1620 # this should be a page which describes the results of the upload
1621 # that just finished.
1622 self.failUnless("Upload Results:" in page)
1623 self.failUnless("URI:" in page)
1624 uri_re = re.compile("URI: <tt><span>(.*)</span>")
1625 mo = uri_re.search(page)
1626 self.failUnless(mo, page)
1627 new_uri = mo.group(1)
1629 d.addCallback(_check_upload_results)
1630 d.addCallback(self.failUnlessCHKURIHasContents, self.NEWFILE_CONTENTS)
1633 def test_POST_upload_no_link_whendone(self):
1634 d = self.POST("/uri", t="upload", when_done="/",
1635 file=("new.txt", self.NEWFILE_CONTENTS))
1636 d.addBoth(self.shouldRedirect, "/")
1639 def shouldRedirect2(self, which, checker, callable, *args, **kwargs):
1640 d = defer.maybeDeferred(callable, *args, **kwargs)
1642 if isinstance(res, failure.Failure):
1643 res.trap(error.PageRedirect)
1644 statuscode = res.value.status
1645 target = res.value.location
1646 return checker(statuscode, target)
1647 self.fail("%s: callable was supposed to redirect, not return '%s'"
1652 def test_POST_upload_no_link_whendone_results(self):
1653 def check(statuscode, target):
1654 self.failUnlessReallyEqual(statuscode, str(http.FOUND))
1655 self.failUnless(target.startswith(self.webish_url), target)
1656 return client.getPage(target, method="GET")
1657 d = self.shouldRedirect2("test_POST_upload_no_link_whendone_results",
1659 self.POST, "/uri", t="upload",
1660 when_done="/uri/%(uri)s",
1661 file=("new.txt", self.NEWFILE_CONTENTS))
1662 d.addCallback(lambda res:
1663 self.failUnlessReallyEqual(res, self.NEWFILE_CONTENTS))
1666 def test_POST_upload_no_link_mutable(self):
1667 d = self.POST("/uri", t="upload", mutable="true",
1668 file=("new.txt", self.NEWFILE_CONTENTS))
1669 def _check(filecap):
1670 filecap = filecap.strip()
1671 self.failUnless(filecap.startswith("URI:SSK:"), filecap)
1672 self.filecap = filecap
1673 u = uri.WriteableSSKFileURI.init_from_string(filecap)
1674 self.failUnless(u.get_storage_index() in FakeMutableFileNode.all_contents)
1675 n = self.s.create_node_from_uri(filecap)
1676 return n.download_best_version()
1677 d.addCallback(_check)
1679 self.failUnlessReallyEqual(data, self.NEWFILE_CONTENTS)
1680 return self.GET("/uri/%s" % urllib.quote(self.filecap))
1681 d.addCallback(_check2)
1683 self.failUnlessReallyEqual(data, self.NEWFILE_CONTENTS)
1684 return self.GET("/file/%s" % urllib.quote(self.filecap))
1685 d.addCallback(_check3)
1687 self.failUnlessReallyEqual(data, self.NEWFILE_CONTENTS)
1688 d.addCallback(_check4)
1691 def test_POST_upload_no_link_mutable_toobig(self):
1692 d = self.shouldFail2(error.Error,
1693 "test_POST_upload_no_link_mutable_toobig",
1694 "413 Request Entity Too Large",
1695 "SDMF is limited to one segment, and 10001 > 10000",
1697 "/uri", t="upload", mutable="true",
1699 "b" * (self.s.MUTABLE_SIZELIMIT+1)) )
1702 def test_POST_upload_mutable(self):
1703 # this creates a mutable file
1704 d = self.POST(self.public_url + "/foo", t="upload", mutable="true",
1705 file=("new.txt", self.NEWFILE_CONTENTS))
1707 d.addCallback(self.failUnlessURIMatchesRWChild, fn, u"new.txt")
1708 d.addCallback(lambda res:
1709 self.failUnlessMutableChildContentsAre(fn, u"new.txt",
1710 self.NEWFILE_CONTENTS))
1711 d.addCallback(lambda res: self._foo_node.get(u"new.txt"))
1713 self.failUnless(IMutableFileNode.providedBy(newnode))
1714 self.failUnless(newnode.is_mutable())
1715 self.failIf(newnode.is_readonly())
1716 self._mutable_node = newnode
1717 self._mutable_uri = newnode.get_uri()
1720 # now upload it again and make sure that the URI doesn't change
1721 NEWER_CONTENTS = self.NEWFILE_CONTENTS + "newer\n"
1722 d.addCallback(lambda res:
1723 self.POST(self.public_url + "/foo", t="upload",
1725 file=("new.txt", NEWER_CONTENTS)))
1726 d.addCallback(self.failUnlessURIMatchesRWChild, fn, u"new.txt")
1727 d.addCallback(lambda res:
1728 self.failUnlessMutableChildContentsAre(fn, u"new.txt",
1730 d.addCallback(lambda res: self._foo_node.get(u"new.txt"))
1732 self.failUnless(IMutableFileNode.providedBy(newnode))
1733 self.failUnless(newnode.is_mutable())
1734 self.failIf(newnode.is_readonly())
1735 self.failUnlessReallyEqual(self._mutable_uri, newnode.get_uri())
1736 d.addCallback(_got2)
1738 # upload a second time, using PUT instead of POST
1739 NEW2_CONTENTS = NEWER_CONTENTS + "overwrite with PUT\n"
1740 d.addCallback(lambda res:
1741 self.PUT(self.public_url + "/foo/new.txt", NEW2_CONTENTS))
1742 d.addCallback(self.failUnlessURIMatchesRWChild, fn, u"new.txt")
1743 d.addCallback(lambda res:
1744 self.failUnlessMutableChildContentsAre(fn, u"new.txt",
1747 # finally list the directory, since mutable files are displayed
1748 # slightly differently
1750 d.addCallback(lambda res:
1751 self.GET(self.public_url + "/foo/",
1752 followRedirect=True))
1753 def _check_page(res):
1754 # TODO: assert more about the contents
1755 self.failUnless("SSK" in res)
1757 d.addCallback(_check_page)
1759 d.addCallback(lambda res: self._foo_node.get(u"new.txt"))
1761 self.failUnless(IMutableFileNode.providedBy(newnode))
1762 self.failUnless(newnode.is_mutable())
1763 self.failIf(newnode.is_readonly())
1764 self.failUnlessReallyEqual(self._mutable_uri, newnode.get_uri())
1765 d.addCallback(_got3)
1767 # look at the JSON form of the enclosing directory
1768 d.addCallback(lambda res:
1769 self.GET(self.public_url + "/foo/?t=json",
1770 followRedirect=True))
1771 def _check_page_json(res):
1772 parsed = simplejson.loads(res)
1773 self.failUnlessEqual(parsed[0], "dirnode")
1774 children = dict( [(unicode(name),value)
1776 in parsed[1]["children"].iteritems()] )
1777 self.failUnless(u"new.txt" in children)
1778 new_json = children[u"new.txt"]
1779 self.failUnlessEqual(new_json[0], "filenode")
1780 self.failUnless(new_json[1]["mutable"])
1781 self.failUnlessReallyEqual(to_str(new_json[1]["rw_uri"]), self._mutable_uri)
1782 ro_uri = self._mutable_node.get_readonly().to_string()
1783 self.failUnlessReallyEqual(to_str(new_json[1]["ro_uri"]), ro_uri)
1784 d.addCallback(_check_page_json)
1786 # and the JSON form of the file
1787 d.addCallback(lambda res:
1788 self.GET(self.public_url + "/foo/new.txt?t=json"))
1789 def _check_file_json(res):
1790 parsed = simplejson.loads(res)
1791 self.failUnlessEqual(parsed[0], "filenode")
1792 self.failUnless(parsed[1]["mutable"])
1793 self.failUnlessReallyEqual(to_str(parsed[1]["rw_uri"]), self._mutable_uri)
1794 ro_uri = self._mutable_node.get_readonly().to_string()
1795 self.failUnlessReallyEqual(to_str(parsed[1]["ro_uri"]), ro_uri)
1796 d.addCallback(_check_file_json)
1798 # and look at t=uri and t=readonly-uri
1799 d.addCallback(lambda res:
1800 self.GET(self.public_url + "/foo/new.txt?t=uri"))
1801 d.addCallback(lambda res: self.failUnlessReallyEqual(res, self._mutable_uri))
1802 d.addCallback(lambda res:
1803 self.GET(self.public_url + "/foo/new.txt?t=readonly-uri"))
1804 def _check_ro_uri(res):
1805 ro_uri = self._mutable_node.get_readonly().to_string()
1806 self.failUnlessReallyEqual(res, ro_uri)
1807 d.addCallback(_check_ro_uri)
1809 # make sure we can get to it from /uri/URI
1810 d.addCallback(lambda res:
1811 self.GET("/uri/%s" % urllib.quote(self._mutable_uri)))
1812 d.addCallback(lambda res:
1813 self.failUnlessReallyEqual(res, NEW2_CONTENTS))
1815 # and that HEAD computes the size correctly
1816 d.addCallback(lambda res:
1817 self.HEAD(self.public_url + "/foo/new.txt",
1818 return_response=True))
1819 def _got_headers((res, status, headers)):
1820 self.failUnlessReallyEqual(res, "")
1821 self.failUnlessReallyEqual(headers["content-length"][0],
1822 str(len(NEW2_CONTENTS)))
1823 self.failUnlessReallyEqual(headers["content-type"], ["text/plain"])
1824 d.addCallback(_got_headers)
1826 # make sure that size errors are displayed correctly for overwrite
1827 d.addCallback(lambda res:
1828 self.shouldFail2(error.Error,
1829 "test_POST_upload_mutable-toobig",
1830 "413 Request Entity Too Large",
1831 "SDMF is limited to one segment, and 10001 > 10000",
1833 self.public_url + "/foo", t="upload",
1836 "b" * (self.s.MUTABLE_SIZELIMIT+1)),
1839 d.addErrback(self.dump_error)
1842 def test_POST_upload_mutable_toobig(self):
1843 d = self.shouldFail2(error.Error,
1844 "test_POST_upload_mutable_toobig",
1845 "413 Request Entity Too Large",
1846 "SDMF is limited to one segment, and 10001 > 10000",
1848 self.public_url + "/foo",
1849 t="upload", mutable="true",
1851 "b" * (self.s.MUTABLE_SIZELIMIT+1)) )
1854 def dump_error(self, f):
1855 # if the web server returns an error code (like 400 Bad Request),
1856 # web.client.getPage puts the HTTP response body into the .response
1857 # attribute of the exception object that it gives back. It does not
1858 # appear in the Failure's repr(), so the ERROR that trial displays
1859 # will be rather terse and unhelpful. addErrback this method to the
1860 # end of your chain to get more information out of these errors.
1861 if f.check(error.Error):
1862 print "web.error.Error:"
1864 print f.value.response
1867 def test_POST_upload_replace(self):
1868 d = self.POST(self.public_url + "/foo", t="upload",
1869 file=("bar.txt", self.NEWFILE_CONTENTS))
1871 d.addCallback(self.failUnlessURIMatchesROChild, fn, u"bar.txt")
1872 d.addCallback(lambda res:
1873 self.failUnlessChildContentsAre(fn, u"bar.txt",
1874 self.NEWFILE_CONTENTS))
1877 def test_POST_upload_no_replace_ok(self):
1878 d = self.POST(self.public_url + "/foo?replace=false", t="upload",
1879 file=("new.txt", self.NEWFILE_CONTENTS))
1880 d.addCallback(lambda res: self.GET(self.public_url + "/foo/new.txt"))
1881 d.addCallback(lambda res: self.failUnlessReallyEqual(res,
1882 self.NEWFILE_CONTENTS))
1885 def test_POST_upload_no_replace_queryarg(self):
1886 d = self.POST(self.public_url + "/foo?replace=false", t="upload",
1887 file=("bar.txt", self.NEWFILE_CONTENTS))
1888 d.addBoth(self.shouldFail, error.Error,
1889 "POST_upload_no_replace_queryarg",
1891 "There was already a child by that name, and you asked me "
1892 "to not replace it")
1893 d.addCallback(lambda res: self.GET(self.public_url + "/foo/bar.txt"))
1894 d.addCallback(self.failUnlessIsBarDotTxt)
1897 def test_POST_upload_no_replace_field(self):
1898 d = self.POST(self.public_url + "/foo", t="upload", replace="false",
1899 file=("bar.txt", self.NEWFILE_CONTENTS))
1900 d.addBoth(self.shouldFail, error.Error, "POST_upload_no_replace_field",
1902 "There was already a child by that name, and you asked me "
1903 "to not replace it")
1904 d.addCallback(lambda res: self.GET(self.public_url + "/foo/bar.txt"))
1905 d.addCallback(self.failUnlessIsBarDotTxt)
1908 def test_POST_upload_whendone(self):
1909 d = self.POST(self.public_url + "/foo", t="upload", when_done="/THERE",
1910 file=("new.txt", self.NEWFILE_CONTENTS))
1911 d.addBoth(self.shouldRedirect, "/THERE")
1913 d.addCallback(lambda res:
1914 self.failUnlessChildContentsAre(fn, u"new.txt",
1915 self.NEWFILE_CONTENTS))
1918 def test_POST_upload_named(self):
1920 d = self.POST(self.public_url + "/foo", t="upload",
1921 name="new.txt", file=self.NEWFILE_CONTENTS)
1922 d.addCallback(self.failUnlessURIMatchesROChild, fn, u"new.txt")
1923 d.addCallback(lambda res:
1924 self.failUnlessChildContentsAre(fn, u"new.txt",
1925 self.NEWFILE_CONTENTS))
1928 def test_POST_upload_named_badfilename(self):
1929 d = self.POST(self.public_url + "/foo", t="upload",
1930 name="slashes/are/bad.txt", file=self.NEWFILE_CONTENTS)
1931 d.addBoth(self.shouldFail, error.Error,
1932 "test_POST_upload_named_badfilename",
1934 "name= may not contain a slash",
1936 # make sure that nothing was added
1937 d.addCallback(lambda res:
1938 self.failUnlessNodeKeysAre(self._foo_node,
1939 [u"bar.txt", u"blockingfile",
1940 u"empty", u"n\u00fc.txt",
1944 def test_POST_FILEURL_check(self):
1945 bar_url = self.public_url + "/foo/bar.txt"
1946 d = self.POST(bar_url, t="check")
1948 self.failUnless("Healthy :" in res)
1949 d.addCallback(_check)
1950 redir_url = "http://allmydata.org/TARGET"
1951 def _check2(statuscode, target):
1952 self.failUnlessReallyEqual(statuscode, str(http.FOUND))
1953 self.failUnlessReallyEqual(target, redir_url)
1954 d.addCallback(lambda res:
1955 self.shouldRedirect2("test_POST_FILEURL_check",
1959 when_done=redir_url))
1960 d.addCallback(lambda res:
1961 self.POST(bar_url, t="check", return_to=redir_url))
1963 self.failUnless("Healthy :" in res)
1964 self.failUnless("Return to file" in res)
1965 self.failUnless(redir_url in res)
1966 d.addCallback(_check3)
1968 d.addCallback(lambda res:
1969 self.POST(bar_url, t="check", output="JSON"))
1970 def _check_json(res):
1971 data = simplejson.loads(res)
1972 self.failUnless("storage-index" in data)
1973 self.failUnless(data["results"]["healthy"])
1974 d.addCallback(_check_json)
1978 def test_POST_FILEURL_check_and_repair(self):
1979 bar_url = self.public_url + "/foo/bar.txt"
1980 d = self.POST(bar_url, t="check", repair="true")
1982 self.failUnless("Healthy :" in res)
1983 d.addCallback(_check)
1984 redir_url = "http://allmydata.org/TARGET"
1985 def _check2(statuscode, target):
1986 self.failUnlessReallyEqual(statuscode, str(http.FOUND))
1987 self.failUnlessReallyEqual(target, redir_url)
1988 d.addCallback(lambda res:
1989 self.shouldRedirect2("test_POST_FILEURL_check_and_repair",
1992 t="check", repair="true",
1993 when_done=redir_url))
1994 d.addCallback(lambda res:
1995 self.POST(bar_url, t="check", return_to=redir_url))
1997 self.failUnless("Healthy :" in res)
1998 self.failUnless("Return to file" in res)
1999 self.failUnless(redir_url in res)
2000 d.addCallback(_check3)
2003 def test_POST_DIRURL_check(self):
2004 foo_url = self.public_url + "/foo/"
2005 d = self.POST(foo_url, t="check")
2007 self.failUnless("Healthy :" in res, res)
2008 d.addCallback(_check)
2009 redir_url = "http://allmydata.org/TARGET"
2010 def _check2(statuscode, target):
2011 self.failUnlessReallyEqual(statuscode, str(http.FOUND))
2012 self.failUnlessReallyEqual(target, redir_url)
2013 d.addCallback(lambda res:
2014 self.shouldRedirect2("test_POST_DIRURL_check",
2018 when_done=redir_url))
2019 d.addCallback(lambda res:
2020 self.POST(foo_url, t="check", return_to=redir_url))
2022 self.failUnless("Healthy :" in res, res)
2023 self.failUnless("Return to file/directory" in res)
2024 self.failUnless(redir_url in res)
2025 d.addCallback(_check3)
2027 d.addCallback(lambda res:
2028 self.POST(foo_url, t="check", output="JSON"))
2029 def _check_json(res):
2030 data = simplejson.loads(res)
2031 self.failUnless("storage-index" in data)
2032 self.failUnless(data["results"]["healthy"])
2033 d.addCallback(_check_json)
2037 def test_POST_DIRURL_check_and_repair(self):
2038 foo_url = self.public_url + "/foo/"
2039 d = self.POST(foo_url, t="check", repair="true")
2041 self.failUnless("Healthy :" in res, res)
2042 d.addCallback(_check)
2043 redir_url = "http://allmydata.org/TARGET"
2044 def _check2(statuscode, target):
2045 self.failUnlessReallyEqual(statuscode, str(http.FOUND))
2046 self.failUnlessReallyEqual(target, redir_url)
2047 d.addCallback(lambda res:
2048 self.shouldRedirect2("test_POST_DIRURL_check_and_repair",
2051 t="check", repair="true",
2052 when_done=redir_url))
2053 d.addCallback(lambda res:
2054 self.POST(foo_url, t="check", return_to=redir_url))
2056 self.failUnless("Healthy :" in res)
2057 self.failUnless("Return to file/directory" in res)
2058 self.failUnless(redir_url in res)
2059 d.addCallback(_check3)
2062 def wait_for_operation(self, ignored, ophandle):
2063 url = "/operations/" + ophandle
2064 url += "?t=status&output=JSON"
2067 data = simplejson.loads(res)
2068 if not data["finished"]:
2069 d = self.stall(delay=1.0)
2070 d.addCallback(self.wait_for_operation, ophandle)
2076 def get_operation_results(self, ignored, ophandle, output=None):
2077 url = "/operations/" + ophandle
2080 url += "&output=" + output
2083 if output and output.lower() == "json":
2084 return simplejson.loads(res)
2089 def test_POST_DIRURL_deepcheck_no_ophandle(self):
2090 d = self.shouldFail2(error.Error,
2091 "test_POST_DIRURL_deepcheck_no_ophandle",
2093 "slow operation requires ophandle=",
2094 self.POST, self.public_url, t="start-deep-check")
2097 def test_POST_DIRURL_deepcheck(self):
2098 def _check_redirect(statuscode, target):
2099 self.failUnlessReallyEqual(statuscode, str(http.FOUND))
2100 self.failUnless(target.endswith("/operations/123"))
2101 d = self.shouldRedirect2("test_POST_DIRURL_deepcheck", _check_redirect,
2102 self.POST, self.public_url,
2103 t="start-deep-check", ophandle="123")
2104 d.addCallback(self.wait_for_operation, "123")
2105 def _check_json(data):
2106 self.failUnlessReallyEqual(data["finished"], True)
2107 self.failUnlessReallyEqual(data["count-objects-checked"], 8)
2108 self.failUnlessReallyEqual(data["count-objects-healthy"], 8)
2109 d.addCallback(_check_json)
2110 d.addCallback(self.get_operation_results, "123", "html")
2111 def _check_html(res):
2112 self.failUnless("Objects Checked: <span>8</span>" in res)
2113 self.failUnless("Objects Healthy: <span>8</span>" in res)
2114 d.addCallback(_check_html)
2116 d.addCallback(lambda res:
2117 self.GET("/operations/123/"))
2118 d.addCallback(_check_html) # should be the same as without the slash
2120 d.addCallback(lambda res:
2121 self.shouldFail2(error.Error, "one", "404 Not Found",
2122 "No detailed results for SI bogus",
2123 self.GET, "/operations/123/bogus"))
2125 foo_si = self._foo_node.get_storage_index()
2126 foo_si_s = base32.b2a(foo_si)
2127 d.addCallback(lambda res:
2128 self.GET("/operations/123/%s?output=JSON" % foo_si_s))
2129 def _check_foo_json(res):
2130 data = simplejson.loads(res)
2131 self.failUnlessEqual(data["storage-index"], foo_si_s)
2132 self.failUnless(data["results"]["healthy"])
2133 d.addCallback(_check_foo_json)
2136 def test_POST_DIRURL_deepcheck_and_repair(self):
2137 d = self.POST(self.public_url, t="start-deep-check", repair="true",
2138 ophandle="124", output="json", followRedirect=True)
2139 d.addCallback(self.wait_for_operation, "124")
2140 def _check_json(data):
2141 self.failUnlessReallyEqual(data["finished"], True)
2142 self.failUnlessReallyEqual(data["count-objects-checked"], 8)
2143 self.failUnlessReallyEqual(data["count-objects-healthy-pre-repair"], 8)
2144 self.failUnlessReallyEqual(data["count-objects-unhealthy-pre-repair"], 0)
2145 self.failUnlessReallyEqual(data["count-corrupt-shares-pre-repair"], 0)
2146 self.failUnlessReallyEqual(data["count-repairs-attempted"], 0)
2147 self.failUnlessReallyEqual(data["count-repairs-successful"], 0)
2148 self.failUnlessReallyEqual(data["count-repairs-unsuccessful"], 0)
2149 self.failUnlessReallyEqual(data["count-objects-healthy-post-repair"], 8)
2150 self.failUnlessReallyEqual(data["count-objects-unhealthy-post-repair"], 0)
2151 self.failUnlessReallyEqual(data["count-corrupt-shares-post-repair"], 0)
2152 d.addCallback(_check_json)
2153 d.addCallback(self.get_operation_results, "124", "html")
2154 def _check_html(res):
2155 self.failUnless("Objects Checked: <span>8</span>" in res)
2157 self.failUnless("Objects Healthy (before repair): <span>8</span>" in res)
2158 self.failUnless("Objects Unhealthy (before repair): <span>0</span>" in res)
2159 self.failUnless("Corrupt Shares (before repair): <span>0</span>" in res)
2161 self.failUnless("Repairs Attempted: <span>0</span>" in res)
2162 self.failUnless("Repairs Successful: <span>0</span>" in res)
2163 self.failUnless("Repairs Unsuccessful: <span>0</span>" in res)
2165 self.failUnless("Objects Healthy (after repair): <span>8</span>" in res)
2166 self.failUnless("Objects Unhealthy (after repair): <span>0</span>" in res)
2167 self.failUnless("Corrupt Shares (after repair): <span>0</span>" in res)
2168 d.addCallback(_check_html)
2171 def test_POST_FILEURL_bad_t(self):
2172 d = self.shouldFail2(error.Error, "POST_bad_t", "400 Bad Request",
2173 "POST to file: bad t=bogus",
2174 self.POST, self.public_url + "/foo/bar.txt",
2178 def test_POST_mkdir(self): # return value?
2179 d = self.POST(self.public_url + "/foo", t="mkdir", name="newdir")
2180 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
2181 d.addCallback(self.failUnlessNodeKeysAre, [])
2184 def test_POST_mkdir_initial_children(self):
2185 (newkids, caps) = self._create_initial_children()
2186 d = self.POST2(self.public_url +
2187 "/foo?t=mkdir-with-children&name=newdir",
2188 simplejson.dumps(newkids))
2189 d.addCallback(lambda res:
2190 self.failUnlessNodeHasChild(self._foo_node, u"newdir"))
2191 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
2192 d.addCallback(self.failUnlessNodeKeysAre, newkids.keys())
2193 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
2194 d.addCallback(self.failUnlessROChildURIIs, u"child-imm", caps['filecap1'])
2197 def test_POST_mkdir_immutable(self):
2198 (newkids, caps) = self._create_immutable_children()
2199 d = self.POST2(self.public_url +
2200 "/foo?t=mkdir-immutable&name=newdir",
2201 simplejson.dumps(newkids))
2202 d.addCallback(lambda res:
2203 self.failUnlessNodeHasChild(self._foo_node, u"newdir"))
2204 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
2205 d.addCallback(self.failUnlessNodeKeysAre, newkids.keys())
2206 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
2207 d.addCallback(self.failUnlessROChildURIIs, u"child-imm", caps['filecap1'])
2208 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
2209 d.addCallback(self.failUnlessROChildURIIs, u"unknownchild-imm", caps['unknown_immcap'])
2210 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
2211 d.addCallback(self.failUnlessROChildURIIs, u"dirchild-imm", caps['immdircap'])
2212 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
2213 d.addCallback(self.failUnlessROChildURIIs, u"dirchild-lit", caps['litdircap'])
2214 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
2215 d.addCallback(self.failUnlessROChildURIIs, u"dirchild-empty", caps['emptydircap'])
2218 def test_POST_mkdir_immutable_bad(self):
2219 (newkids, caps) = self._create_initial_children()
2220 d = self.shouldFail2(error.Error, "test_POST_mkdir_immutable_bad",
2222 "needed to be immutable but was not",
2225 "/foo?t=mkdir-immutable&name=newdir",
2226 simplejson.dumps(newkids))
2229 def test_POST_mkdir_2(self):
2230 d = self.POST(self.public_url + "/foo/newdir?t=mkdir", "")
2231 d.addCallback(lambda res:
2232 self.failUnlessNodeHasChild(self._foo_node, u"newdir"))
2233 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
2234 d.addCallback(self.failUnlessNodeKeysAre, [])
2237 def test_POST_mkdirs_2(self):
2238 d = self.POST(self.public_url + "/foo/bardir/newdir?t=mkdir", "")
2239 d.addCallback(lambda res:
2240 self.failUnlessNodeHasChild(self._foo_node, u"bardir"))
2241 d.addCallback(lambda res: self._foo_node.get(u"bardir"))
2242 d.addCallback(lambda bardirnode: bardirnode.get(u"newdir"))
2243 d.addCallback(self.failUnlessNodeKeysAre, [])
2246 def test_POST_mkdir_no_parentdir_noredirect(self):
2247 d = self.POST("/uri?t=mkdir")
2248 def _after_mkdir(res):
2249 uri.DirectoryURI.init_from_string(res)
2250 d.addCallback(_after_mkdir)
2253 def test_POST_mkdir_no_parentdir_noredirect2(self):
2254 # make sure form-based arguments (as on the welcome page) still work
2255 d = self.POST("/uri", t="mkdir")
2256 def _after_mkdir(res):
2257 uri.DirectoryURI.init_from_string(res)
2258 d.addCallback(_after_mkdir)
2259 d.addErrback(self.explain_web_error)
2262 def test_POST_mkdir_no_parentdir_redirect(self):
2263 d = self.POST("/uri?t=mkdir&redirect_to_result=true")
2264 d.addBoth(self.shouldRedirect, None, statuscode='303')
2265 def _check_target(target):
2266 target = urllib.unquote(target)
2267 self.failUnless(target.startswith("uri/URI:DIR2:"), target)
2268 d.addCallback(_check_target)
2271 def test_POST_mkdir_no_parentdir_redirect2(self):
2272 d = self.POST("/uri", t="mkdir", redirect_to_result="true")
2273 d.addBoth(self.shouldRedirect, None, statuscode='303')
2274 def _check_target(target):
2275 target = urllib.unquote(target)
2276 self.failUnless(target.startswith("uri/URI:DIR2:"), target)
2277 d.addCallback(_check_target)
2278 d.addErrback(self.explain_web_error)
2281 def _make_readonly(self, u):
2282 ro_uri = uri.from_string(u).get_readonly()
2285 return ro_uri.to_string()
2287 def _create_initial_children(self):
2288 contents, n, filecap1 = self.makefile(12)
2289 md1 = {"metakey1": "metavalue1"}
2290 filecap2 = make_mutable_file_uri()
2291 node3 = self.s.create_node_from_uri(make_mutable_file_uri())
2292 filecap3 = node3.get_readonly_uri()
2293 node4 = self.s.create_node_from_uri(make_mutable_file_uri())
2294 dircap = DirectoryNode(node4, None, None).get_uri()
2295 litdircap = "URI:DIR2-LIT:ge3dumj2mewdcotyfqydulbshj5x2lbm"
2296 emptydircap = "URI:DIR2-LIT:"
2297 newkids = {u"child-imm": ["filenode", {"rw_uri": filecap1,
2298 "ro_uri": self._make_readonly(filecap1),
2299 "metadata": md1, }],
2300 u"child-mutable": ["filenode", {"rw_uri": filecap2,
2301 "ro_uri": self._make_readonly(filecap2)}],
2302 u"child-mutable-ro": ["filenode", {"ro_uri": filecap3}],
2303 u"unknownchild-rw": ["unknown", {"rw_uri": unknown_rwcap,
2304 "ro_uri": unknown_rocap}],
2305 u"unknownchild-ro": ["unknown", {"ro_uri": unknown_rocap}],
2306 u"unknownchild-imm": ["unknown", {"ro_uri": unknown_immcap}],
2307 u"dirchild": ["dirnode", {"rw_uri": dircap,
2308 "ro_uri": self._make_readonly(dircap)}],
2309 u"dirchild-lit": ["dirnode", {"ro_uri": litdircap}],
2310 u"dirchild-empty": ["dirnode", {"ro_uri": emptydircap}],
2312 return newkids, {'filecap1': filecap1,
2313 'filecap2': filecap2,
2314 'filecap3': filecap3,
2315 'unknown_rwcap': unknown_rwcap,
2316 'unknown_rocap': unknown_rocap,
2317 'unknown_immcap': unknown_immcap,
2319 'litdircap': litdircap,
2320 'emptydircap': emptydircap}
2322 def _create_immutable_children(self):
2323 contents, n, filecap1 = self.makefile(12)
2324 md1 = {"metakey1": "metavalue1"}
2325 tnode = create_chk_filenode("immutable directory contents\n"*10)
2326 dnode = DirectoryNode(tnode, None, None)
2327 assert not dnode.is_mutable()
2328 immdircap = dnode.get_uri()
2329 litdircap = "URI:DIR2-LIT:ge3dumj2mewdcotyfqydulbshj5x2lbm"
2330 emptydircap = "URI:DIR2-LIT:"
2331 newkids = {u"child-imm": ["filenode", {"ro_uri": filecap1,
2332 "metadata": md1, }],
2333 u"unknownchild-imm": ["unknown", {"ro_uri": unknown_immcap}],
2334 u"dirchild-imm": ["dirnode", {"ro_uri": immdircap}],
2335 u"dirchild-lit": ["dirnode", {"ro_uri": litdircap}],
2336 u"dirchild-empty": ["dirnode", {"ro_uri": emptydircap}],
2338 return newkids, {'filecap1': filecap1,
2339 'unknown_immcap': unknown_immcap,
2340 'immdircap': immdircap,
2341 'litdircap': litdircap,
2342 'emptydircap': emptydircap}
2344 def test_POST_mkdir_no_parentdir_initial_children(self):
2345 (newkids, caps) = self._create_initial_children()
2346 d = self.POST2("/uri?t=mkdir-with-children", simplejson.dumps(newkids))
2347 def _after_mkdir(res):
2348 self.failUnless(res.startswith("URI:DIR"), res)
2349 n = self.s.create_node_from_uri(res)
2350 d2 = self.failUnlessNodeKeysAre(n, newkids.keys())
2351 d2.addCallback(lambda ign:
2352 self.failUnlessROChildURIIs(n, u"child-imm",
2354 d2.addCallback(lambda ign:
2355 self.failUnlessRWChildURIIs(n, u"child-mutable",
2357 d2.addCallback(lambda ign:
2358 self.failUnlessROChildURIIs(n, u"child-mutable-ro",
2360 d2.addCallback(lambda ign:
2361 self.failUnlessRWChildURIIs(n, u"unknownchild-rw",
2362 caps['unknown_rwcap']))
2363 d2.addCallback(lambda ign:
2364 self.failUnlessROChildURIIs(n, u"unknownchild-ro",
2365 caps['unknown_rocap']))
2366 d2.addCallback(lambda ign:
2367 self.failUnlessROChildURIIs(n, u"unknownchild-imm",
2368 caps['unknown_immcap']))
2369 d2.addCallback(lambda ign:
2370 self.failUnlessRWChildURIIs(n, u"dirchild",
2373 d.addCallback(_after_mkdir)
2376 def test_POST_mkdir_no_parentdir_unexpected_children(self):
2377 # the regular /uri?t=mkdir operation is specified to ignore its body.
2378 # Only t=mkdir-with-children pays attention to it.
2379 (newkids, caps) = self._create_initial_children()
2380 d = self.shouldHTTPError("POST t=mkdir unexpected children",
2382 "t=mkdir does not accept children=, "
2383 "try t=mkdir-with-children instead",
2384 self.POST2, "/uri?t=mkdir", # without children
2385 simplejson.dumps(newkids))
2388 def test_POST_noparent_bad(self):
2389 d = self.shouldHTTPError("POST /uri?t=bogus", 400, "Bad Request",
2390 "/uri accepts only PUT, PUT?t=mkdir, "
2391 "POST?t=upload, and POST?t=mkdir",
2392 self.POST, "/uri?t=bogus")
2395 def test_POST_mkdir_no_parentdir_immutable(self):
2396 (newkids, caps) = self._create_immutable_children()
2397 d = self.POST2("/uri?t=mkdir-immutable", simplejson.dumps(newkids))
2398 def _after_mkdir(res):
2399 self.failUnless(res.startswith("URI:DIR"), res)
2400 n = self.s.create_node_from_uri(res)
2401 d2 = self.failUnlessNodeKeysAre(n, newkids.keys())
2402 d2.addCallback(lambda ign:
2403 self.failUnlessROChildURIIs(n, u"child-imm",
2405 d2.addCallback(lambda ign:
2406 self.failUnlessROChildURIIs(n, u"unknownchild-imm",
2407 caps['unknown_immcap']))
2408 d2.addCallback(lambda ign:
2409 self.failUnlessROChildURIIs(n, u"dirchild-imm",
2411 d2.addCallback(lambda ign:
2412 self.failUnlessROChildURIIs(n, u"dirchild-lit",
2414 d2.addCallback(lambda ign:
2415 self.failUnlessROChildURIIs(n, u"dirchild-empty",
2416 caps['emptydircap']))
2418 d.addCallback(_after_mkdir)
2421 def test_POST_mkdir_no_parentdir_immutable_bad(self):
2422 (newkids, caps) = self._create_initial_children()
2423 d = self.shouldFail2(error.Error,
2424 "test_POST_mkdir_no_parentdir_immutable_bad",
2426 "needed to be immutable but was not",
2428 "/uri?t=mkdir-immutable",
2429 simplejson.dumps(newkids))
2432 def test_welcome_page_mkdir_button(self):
2433 # Fetch the welcome page.
2435 def _after_get_welcome_page(res):
2436 MKDIR_BUTTON_RE = re.compile(
2437 '<form action="([^"]*)" method="post".*?'
2438 '<input type="hidden" name="t" value="([^"]*)" />'
2439 '<input type="hidden" name="([^"]*)" value="([^"]*)" />'
2440 '<input type="submit" value="Create a directory" />',
2442 mo = MKDIR_BUTTON_RE.search(res)
2443 formaction = mo.group(1)
2445 formaname = mo.group(3)
2446 formavalue = mo.group(4)
2447 return (formaction, formt, formaname, formavalue)
2448 d.addCallback(_after_get_welcome_page)
2449 def _after_parse_form(res):
2450 (formaction, formt, formaname, formavalue) = res
2451 return self.POST("/%s?t=%s&%s=%s" % (formaction, formt, formaname, formavalue))
2452 d.addCallback(_after_parse_form)
2453 d.addBoth(self.shouldRedirect, None, statuscode='303')
2456 def test_POST_mkdir_replace(self): # return value?
2457 d = self.POST(self.public_url + "/foo", t="mkdir", name="sub")
2458 d.addCallback(lambda res: self._foo_node.get(u"sub"))
2459 d.addCallback(self.failUnlessNodeKeysAre, [])
2462 def test_POST_mkdir_no_replace_queryarg(self): # return value?
2463 d = self.POST(self.public_url + "/foo?replace=false", t="mkdir", name="sub")
2464 d.addBoth(self.shouldFail, error.Error,
2465 "POST_mkdir_no_replace_queryarg",
2467 "There was already a child by that name, and you asked me "
2468 "to not replace it")
2469 d.addCallback(lambda res: self._foo_node.get(u"sub"))
2470 d.addCallback(self.failUnlessNodeKeysAre, [u"baz.txt"])
2473 def test_POST_mkdir_no_replace_field(self): # return value?
2474 d = self.POST(self.public_url + "/foo", t="mkdir", name="sub",
2476 d.addBoth(self.shouldFail, error.Error, "POST_mkdir_no_replace_field",
2478 "There was already a child by that name, and you asked me "
2479 "to not replace it")
2480 d.addCallback(lambda res: self._foo_node.get(u"sub"))
2481 d.addCallback(self.failUnlessNodeKeysAre, [u"baz.txt"])
2484 def test_POST_mkdir_whendone_field(self):
2485 d = self.POST(self.public_url + "/foo",
2486 t="mkdir", name="newdir", when_done="/THERE")
2487 d.addBoth(self.shouldRedirect, "/THERE")
2488 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
2489 d.addCallback(self.failUnlessNodeKeysAre, [])
2492 def test_POST_mkdir_whendone_queryarg(self):
2493 d = self.POST(self.public_url + "/foo?when_done=/THERE",
2494 t="mkdir", name="newdir")
2495 d.addBoth(self.shouldRedirect, "/THERE")
2496 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
2497 d.addCallback(self.failUnlessNodeKeysAre, [])
2500 def test_POST_bad_t(self):
2501 d = self.shouldFail2(error.Error, "POST_bad_t", "400 Bad Request",
2502 "POST to a directory with bad t=BOGUS",
2503 self.POST, self.public_url + "/foo", t="BOGUS")
2506 def test_POST_set_children(self, command_name="set_children"):
2507 contents9, n9, newuri9 = self.makefile(9)
2508 contents10, n10, newuri10 = self.makefile(10)
2509 contents11, n11, newuri11 = self.makefile(11)
2512 "atomic_added_1": [ "filenode", { "rw_uri": "%s",
2515 "ctime": 1002777696.7564139,
2516 "mtime": 1002777696.7564139
2519 "atomic_added_2": [ "filenode", { "rw_uri": "%s",
2522 "ctime": 1002777696.7564139,
2523 "mtime": 1002777696.7564139
2526 "atomic_added_3": [ "filenode", { "rw_uri": "%s",
2529 "ctime": 1002777696.7564139,
2530 "mtime": 1002777696.7564139
2533 }""" % (newuri9, newuri10, newuri11)
2535 url = self.webish_url + self.public_url + "/foo" + "?t=" + command_name
2537 d = client.getPage(url, method="POST", postdata=reqbody)
2539 self.failUnlessURIMatchesROChild(newuri9, self._foo_node, u"atomic_added_1")
2540 self.failUnlessURIMatchesROChild(newuri10, self._foo_node, u"atomic_added_2")
2541 self.failUnlessURIMatchesROChild(newuri11, self._foo_node, u"atomic_added_3")
2543 d.addCallback(_then)
2544 d.addErrback(self.dump_error)
2547 def test_POST_set_children_with_hyphen(self):
2548 return self.test_POST_set_children(command_name="set-children")
2550 def test_POST_link_uri(self):
2551 contents, n, newuri = self.makefile(8)
2552 d = self.POST(self.public_url + "/foo", t="uri", name="new.txt", uri=newuri)
2553 d.addCallback(self.failUnlessURIMatchesROChild, self._foo_node, u"new.txt")
2554 d.addCallback(lambda res:
2555 self.failUnlessChildContentsAre(self._foo_node, u"new.txt",
2559 def test_POST_link_uri_replace(self):
2560 contents, n, newuri = self.makefile(8)
2561 d = self.POST(self.public_url + "/foo", t="uri", name="bar.txt", uri=newuri)
2562 d.addCallback(self.failUnlessURIMatchesROChild, self._foo_node, u"bar.txt")
2563 d.addCallback(lambda res:
2564 self.failUnlessChildContentsAre(self._foo_node, u"bar.txt",
2568 def test_POST_link_uri_unknown_bad(self):
2569 d = self.POST(self.public_url + "/foo", t="uri", name="future.txt", uri=unknown_rwcap)
2570 d.addBoth(self.shouldFail, error.Error,
2571 "POST_link_uri_unknown_bad",
2573 "unknown cap in a write slot")
2576 def test_POST_link_uri_unknown_ro_good(self):
2577 d = self.POST(self.public_url + "/foo", t="uri", name="future-ro.txt", uri=unknown_rocap)
2578 d.addCallback(self.failUnlessURIMatchesROChild, self._foo_node, u"future-ro.txt")
2581 def test_POST_link_uri_unknown_imm_good(self):
2582 d = self.POST(self.public_url + "/foo", t="uri", name="future-imm.txt", uri=unknown_immcap)
2583 d.addCallback(self.failUnlessURIMatchesROChild, self._foo_node, u"future-imm.txt")
2586 def test_POST_link_uri_no_replace_queryarg(self):
2587 contents, n, newuri = self.makefile(8)
2588 d = self.POST(self.public_url + "/foo?replace=false", t="uri",
2589 name="bar.txt", uri=newuri)
2590 d.addBoth(self.shouldFail, error.Error,
2591 "POST_link_uri_no_replace_queryarg",
2593 "There was already a child by that name, and you asked me "
2594 "to not replace it")
2595 d.addCallback(lambda res: self.GET(self.public_url + "/foo/bar.txt"))
2596 d.addCallback(self.failUnlessIsBarDotTxt)
2599 def test_POST_link_uri_no_replace_field(self):
2600 contents, n, newuri = self.makefile(8)
2601 d = self.POST(self.public_url + "/foo", t="uri", replace="false",
2602 name="bar.txt", uri=newuri)
2603 d.addBoth(self.shouldFail, error.Error,
2604 "POST_link_uri_no_replace_field",
2606 "There was already a child by that name, and you asked me "
2607 "to not replace it")
2608 d.addCallback(lambda res: self.GET(self.public_url + "/foo/bar.txt"))
2609 d.addCallback(self.failUnlessIsBarDotTxt)
2612 def test_POST_delete(self):
2613 d = self.POST(self.public_url + "/foo", t="delete", name="bar.txt")
2614 d.addCallback(lambda res: self._foo_node.list())
2615 def _check(children):
2616 self.failIf(u"bar.txt" in children)
2617 d.addCallback(_check)
2620 def test_POST_rename_file(self):
2621 d = self.POST(self.public_url + "/foo", t="rename",
2622 from_name="bar.txt", to_name='wibble.txt')
2623 d.addCallback(lambda res:
2624 self.failIfNodeHasChild(self._foo_node, u"bar.txt"))
2625 d.addCallback(lambda res:
2626 self.failUnlessNodeHasChild(self._foo_node, u"wibble.txt"))
2627 d.addCallback(lambda res: self.GET(self.public_url + "/foo/wibble.txt"))
2628 d.addCallback(self.failUnlessIsBarDotTxt)
2629 d.addCallback(lambda res: self.GET(self.public_url + "/foo/wibble.txt?t=json"))
2630 d.addCallback(self.failUnlessIsBarJSON)
2633 def test_POST_rename_file_redundant(self):
2634 d = self.POST(self.public_url + "/foo", t="rename",
2635 from_name="bar.txt", to_name='bar.txt')
2636 d.addCallback(lambda res:
2637 self.failUnlessNodeHasChild(self._foo_node, u"bar.txt"))
2638 d.addCallback(lambda res: self.GET(self.public_url + "/foo/bar.txt"))
2639 d.addCallback(self.failUnlessIsBarDotTxt)
2640 d.addCallback(lambda res: self.GET(self.public_url + "/foo/bar.txt?t=json"))
2641 d.addCallback(self.failUnlessIsBarJSON)
2644 def test_POST_rename_file_replace(self):
2645 # rename a file and replace a directory with it
2646 d = self.POST(self.public_url + "/foo", t="rename",
2647 from_name="bar.txt", to_name='empty')
2648 d.addCallback(lambda res:
2649 self.failIfNodeHasChild(self._foo_node, u"bar.txt"))
2650 d.addCallback(lambda res:
2651 self.failUnlessNodeHasChild(self._foo_node, u"empty"))
2652 d.addCallback(lambda res: self.GET(self.public_url + "/foo/empty"))
2653 d.addCallback(self.failUnlessIsBarDotTxt)
2654 d.addCallback(lambda res: self.GET(self.public_url + "/foo/empty?t=json"))
2655 d.addCallback(self.failUnlessIsBarJSON)
2658 def test_POST_rename_file_no_replace_queryarg(self):
2659 # rename a file and replace a directory with it
2660 d = self.POST(self.public_url + "/foo?replace=false", t="rename",
2661 from_name="bar.txt", to_name='empty')
2662 d.addBoth(self.shouldFail, error.Error,
2663 "POST_rename_file_no_replace_queryarg",
2665 "There was already a child by that name, and you asked me "
2666 "to not replace it")
2667 d.addCallback(lambda res: self.GET(self.public_url + "/foo/empty?t=json"))
2668 d.addCallback(self.failUnlessIsEmptyJSON)
2671 def test_POST_rename_file_no_replace_field(self):
2672 # rename a file and replace a directory with it
2673 d = self.POST(self.public_url + "/foo", t="rename", replace="false",
2674 from_name="bar.txt", to_name='empty')
2675 d.addBoth(self.shouldFail, error.Error,
2676 "POST_rename_file_no_replace_field",
2678 "There was already a child by that name, and you asked me "
2679 "to not replace it")
2680 d.addCallback(lambda res: self.GET(self.public_url + "/foo/empty?t=json"))
2681 d.addCallback(self.failUnlessIsEmptyJSON)
2684 def failUnlessIsEmptyJSON(self, res):
2685 data = simplejson.loads(res)
2686 self.failUnlessEqual(data[0], "dirnode", data)
2687 self.failUnlessReallyEqual(len(data[1]["children"]), 0)
2689 def test_POST_rename_file_slash_fail(self):
2690 d = self.POST(self.public_url + "/foo", t="rename",
2691 from_name="bar.txt", to_name='kirk/spock.txt')
2692 d.addBoth(self.shouldFail, error.Error,
2693 "test_POST_rename_file_slash_fail",
2695 "to_name= may not contain a slash",
2697 d.addCallback(lambda res:
2698 self.failUnlessNodeHasChild(self._foo_node, u"bar.txt"))
2701 def test_POST_rename_dir(self):
2702 d = self.POST(self.public_url, t="rename",
2703 from_name="foo", to_name='plunk')
2704 d.addCallback(lambda res:
2705 self.failIfNodeHasChild(self.public_root, u"foo"))
2706 d.addCallback(lambda res:
2707 self.failUnlessNodeHasChild(self.public_root, u"plunk"))
2708 d.addCallback(lambda res: self.GET(self.public_url + "/plunk?t=json"))
2709 d.addCallback(self.failUnlessIsFooJSON)
2712 def shouldRedirect(self, res, target=None, statuscode=None, which=""):
2713 """ If target is not None then the redirection has to go to target. If
2714 statuscode is not None then the redirection has to be accomplished with
2715 that HTTP status code."""
2716 if not isinstance(res, failure.Failure):
2717 to_where = (target is None) and "somewhere" or ("to " + target)
2718 self.fail("%s: we were expecting to get redirected %s, not get an"
2719 " actual page: %s" % (which, to_where, res))
2720 res.trap(error.PageRedirect)
2721 if statuscode is not None:
2722 self.failUnlessReallyEqual(res.value.status, statuscode,
2723 "%s: not a redirect" % which)
2724 if target is not None:
2725 # the PageRedirect does not seem to capture the uri= query arg
2726 # properly, so we can't check for it.
2727 realtarget = self.webish_url + target
2728 self.failUnlessReallyEqual(res.value.location, realtarget,
2729 "%s: wrong target" % which)
2730 return res.value.location
2732 def test_GET_URI_form(self):
2733 base = "/uri?uri=%s" % self._bar_txt_uri
2734 # this is supposed to give us a redirect to /uri/$URI, plus arguments
2735 targetbase = "/uri/%s" % urllib.quote(self._bar_txt_uri)
2737 d.addBoth(self.shouldRedirect, targetbase)
2738 d.addCallback(lambda res: self.GET(base+"&filename=bar.txt"))
2739 d.addBoth(self.shouldRedirect, targetbase+"?filename=bar.txt")
2740 d.addCallback(lambda res: self.GET(base+"&t=json"))
2741 d.addBoth(self.shouldRedirect, targetbase+"?t=json")
2742 d.addCallback(self.log, "about to get file by uri")
2743 d.addCallback(lambda res: self.GET(base, followRedirect=True))
2744 d.addCallback(self.failUnlessIsBarDotTxt)
2745 d.addCallback(self.log, "got file by uri, about to get dir by uri")
2746 d.addCallback(lambda res: self.GET("/uri?uri=%s&t=json" % self._foo_uri,
2747 followRedirect=True))
2748 d.addCallback(self.failUnlessIsFooJSON)
2749 d.addCallback(self.log, "got dir by uri")
2753 def test_GET_URI_form_bad(self):
2754 d = self.shouldFail2(error.Error, "test_GET_URI_form_bad",
2755 "400 Bad Request", "GET /uri requires uri=",
2759 def test_GET_rename_form(self):
2760 d = self.GET(self.public_url + "/foo?t=rename-form&name=bar.txt",
2761 followRedirect=True)
2763 self.failUnless('name="when_done" value="."' in res, res)
2764 self.failUnless(re.search(r'name="from_name" value="bar\.txt"', res))
2765 d.addCallback(_check)
2768 def log(self, res, msg):
2769 #print "MSG: %s RES: %s" % (msg, res)
2773 def test_GET_URI_URL(self):
2774 base = "/uri/%s" % self._bar_txt_uri
2776 d.addCallback(self.failUnlessIsBarDotTxt)
2777 d.addCallback(lambda res: self.GET(base+"?filename=bar.txt"))
2778 d.addCallback(self.failUnlessIsBarDotTxt)
2779 d.addCallback(lambda res: self.GET(base+"?filename=bar.txt&save=true"))
2780 d.addCallback(self.failUnlessIsBarDotTxt)
2783 def test_GET_URI_URL_dir(self):
2784 base = "/uri/%s?t=json" % self._foo_uri
2786 d.addCallback(self.failUnlessIsFooJSON)
2789 def test_GET_URI_URL_missing(self):
2790 base = "/uri/%s" % self._bad_file_uri
2791 d = self.shouldHTTPError("test_GET_URI_URL_missing",
2792 http.GONE, None, "NotEnoughSharesError",
2794 # TODO: how can we exercise both sides of WebDownloadTarget.fail
2795 # here? we must arrange for a download to fail after target.open()
2796 # has been called, and then inspect the response to see that it is
2797 # shorter than we expected.
2800 def test_PUT_DIRURL_uri(self):
2801 d = self.s.create_dirnode()
2803 new_uri = dn.get_uri()
2804 # replace /foo with a new (empty) directory
2805 d = self.PUT(self.public_url + "/foo?t=uri", new_uri)
2806 d.addCallback(lambda res:
2807 self.failUnlessReallyEqual(res.strip(), new_uri))
2808 d.addCallback(lambda res:
2809 self.failUnlessRWChildURIIs(self.public_root,
2813 d.addCallback(_made_dir)
2816 def test_PUT_DIRURL_uri_noreplace(self):
2817 d = self.s.create_dirnode()
2819 new_uri = dn.get_uri()
2820 # replace /foo with a new (empty) directory, but ask that
2821 # replace=false, so it should fail
2822 d = self.shouldFail2(error.Error, "test_PUT_DIRURL_uri_noreplace",
2823 "409 Conflict", "There was already a child by that name, and you asked me to not replace it",
2825 self.public_url + "/foo?t=uri&replace=false",
2827 d.addCallback(lambda res:
2828 self.failUnlessRWChildURIIs(self.public_root,
2832 d.addCallback(_made_dir)
2835 def test_PUT_DIRURL_bad_t(self):
2836 d = self.shouldFail2(error.Error, "test_PUT_DIRURL_bad_t",
2837 "400 Bad Request", "PUT to a directory",
2838 self.PUT, self.public_url + "/foo?t=BOGUS", "")
2839 d.addCallback(lambda res:
2840 self.failUnlessRWChildURIIs(self.public_root,
2845 def test_PUT_NEWFILEURL_uri(self):
2846 contents, n, new_uri = self.makefile(8)
2847 d = self.PUT(self.public_url + "/foo/new.txt?t=uri", new_uri)
2848 d.addCallback(lambda res: self.failUnlessReallyEqual(res.strip(), new_uri))
2849 d.addCallback(lambda res:
2850 self.failUnlessChildContentsAre(self._foo_node, u"new.txt",
2854 def test_PUT_NEWFILEURL_uri_replace(self):
2855 contents, n, new_uri = self.makefile(8)
2856 d = self.PUT(self.public_url + "/foo/bar.txt?t=uri", new_uri)
2857 d.addCallback(lambda res: self.failUnlessReallyEqual(res.strip(), new_uri))
2858 d.addCallback(lambda res:
2859 self.failUnlessChildContentsAre(self._foo_node, u"bar.txt",
2863 def test_PUT_NEWFILEURL_uri_no_replace(self):
2864 contents, n, new_uri = self.makefile(8)
2865 d = self.PUT(self.public_url + "/foo/bar.txt?t=uri&replace=false", new_uri)
2866 d.addBoth(self.shouldFail, error.Error, "PUT_NEWFILEURL_uri_no_replace",
2868 "There was already a child by that name, and you asked me "
2869 "to not replace it")
2872 def test_PUT_NEWFILEURL_uri_unknown_bad(self):
2873 d = self.PUT(self.public_url + "/foo/put-future.txt?t=uri", unknown_rwcap)
2874 d.addBoth(self.shouldFail, error.Error,
2875 "POST_put_uri_unknown_bad",
2877 "unknown cap in a write slot")
2880 def test_PUT_NEWFILEURL_uri_unknown_ro_good(self):
2881 d = self.PUT(self.public_url + "/foo/put-future-ro.txt?t=uri", unknown_rocap)
2882 d.addCallback(self.failUnlessURIMatchesROChild, self._foo_node,
2883 u"put-future-ro.txt")
2886 def test_PUT_NEWFILEURL_uri_unknown_imm_good(self):
2887 d = self.PUT(self.public_url + "/foo/put-future-imm.txt?t=uri", unknown_immcap)
2888 d.addCallback(self.failUnlessURIMatchesROChild, self._foo_node,
2889 u"put-future-imm.txt")
2892 def test_PUT_NEWFILE_URI(self):
2893 file_contents = "New file contents here\n"
2894 d = self.PUT("/uri", file_contents)
2896 assert isinstance(uri, str), uri
2897 self.failUnless(uri in FakeCHKFileNode.all_contents)
2898 self.failUnlessReallyEqual(FakeCHKFileNode.all_contents[uri],
2900 return self.GET("/uri/%s" % uri)
2901 d.addCallback(_check)
2903 self.failUnlessReallyEqual(res, file_contents)
2904 d.addCallback(_check2)
2907 def test_PUT_NEWFILE_URI_not_mutable(self):
2908 file_contents = "New file contents here\n"
2909 d = self.PUT("/uri?mutable=false", file_contents)
2911 assert isinstance(uri, str), uri
2912 self.failUnless(uri in FakeCHKFileNode.all_contents)
2913 self.failUnlessReallyEqual(FakeCHKFileNode.all_contents[uri],
2915 return self.GET("/uri/%s" % uri)
2916 d.addCallback(_check)
2918 self.failUnlessReallyEqual(res, file_contents)
2919 d.addCallback(_check2)
2922 def test_PUT_NEWFILE_URI_only_PUT(self):
2923 d = self.PUT("/uri?t=bogus", "")
2924 d.addBoth(self.shouldFail, error.Error,
2925 "PUT_NEWFILE_URI_only_PUT",
2927 "/uri accepts only PUT, PUT?t=mkdir, POST?t=upload, and POST?t=mkdir")
2930 def test_PUT_NEWFILE_URI_mutable(self):
2931 file_contents = "New file contents here\n"
2932 d = self.PUT("/uri?mutable=true", file_contents)
2933 def _check1(filecap):
2934 filecap = filecap.strip()
2935 self.failUnless(filecap.startswith("URI:SSK:"), filecap)
2936 self.filecap = filecap
2937 u = uri.WriteableSSKFileURI.init_from_string(filecap)
2938 self.failUnless(u.get_storage_index() in FakeMutableFileNode.all_contents)
2939 n = self.s.create_node_from_uri(filecap)
2940 return n.download_best_version()
2941 d.addCallback(_check1)
2943 self.failUnlessReallyEqual(data, file_contents)
2944 return self.GET("/uri/%s" % urllib.quote(self.filecap))
2945 d.addCallback(_check2)
2947 self.failUnlessReallyEqual(res, file_contents)
2948 d.addCallback(_check3)
2951 def test_PUT_mkdir(self):
2952 d = self.PUT("/uri?t=mkdir", "")
2954 n = self.s.create_node_from_uri(uri.strip())
2955 d2 = self.failUnlessNodeKeysAre(n, [])
2956 d2.addCallback(lambda res:
2957 self.GET("/uri/%s?t=json" % uri))
2959 d.addCallback(_check)
2960 d.addCallback(self.failUnlessIsEmptyJSON)
2963 def test_POST_check(self):
2964 d = self.POST(self.public_url + "/foo", t="check", name="bar.txt")
2966 # this returns a string form of the results, which are probably
2967 # None since we're using fake filenodes.
2968 # TODO: verify that the check actually happened, by changing
2969 # FakeCHKFileNode to count how many times .check() has been
2972 d.addCallback(_done)
2975 def test_bad_method(self):
2976 url = self.webish_url + self.public_url + "/foo/bar.txt"
2977 d = self.shouldHTTPError("test_bad_method",
2978 501, "Not Implemented",
2979 "I don't know how to treat a BOGUS request.",
2980 client.getPage, url, method="BOGUS")
2983 def test_short_url(self):
2984 url = self.webish_url + "/uri"
2985 d = self.shouldHTTPError("test_short_url", 501, "Not Implemented",
2986 "I don't know how to treat a DELETE request.",
2987 client.getPage, url, method="DELETE")
2990 def test_ophandle_bad(self):
2991 url = self.webish_url + "/operations/bogus?t=status"
2992 d = self.shouldHTTPError("test_ophandle_bad", 404, "404 Not Found",
2993 "unknown/expired handle 'bogus'",
2994 client.getPage, url)
2997 def test_ophandle_cancel(self):
2998 d = self.POST(self.public_url + "/foo/?t=start-manifest&ophandle=128",
2999 followRedirect=True)
3000 d.addCallback(lambda ignored:
3001 self.GET("/operations/128?t=status&output=JSON"))
3003 data = simplejson.loads(res)
3004 self.failUnless("finished" in data, res)
3005 monitor = self.ws.root.child_operations.handles["128"][0]
3006 d = self.POST("/operations/128?t=cancel&output=JSON")
3008 data = simplejson.loads(res)
3009 self.failUnless("finished" in data, res)
3010 # t=cancel causes the handle to be forgotten
3011 self.failUnless(monitor.is_cancelled())
3012 d.addCallback(_check2)
3014 d.addCallback(_check1)
3015 d.addCallback(lambda ignored:
3016 self.shouldHTTPError("test_ophandle_cancel",
3017 404, "404 Not Found",
3018 "unknown/expired handle '128'",
3020 "/operations/128?t=status&output=JSON"))
3023 def test_ophandle_retainfor(self):
3024 d = self.POST(self.public_url + "/foo/?t=start-manifest&ophandle=129&retain-for=60",
3025 followRedirect=True)
3026 d.addCallback(lambda ignored:
3027 self.GET("/operations/129?t=status&output=JSON&retain-for=0"))
3029 data = simplejson.loads(res)
3030 self.failUnless("finished" in data, res)
3031 d.addCallback(_check1)
3032 # the retain-for=0 will cause the handle to be expired very soon
3033 d.addCallback(lambda ign:
3034 self.clock.advance(2.0))
3035 d.addCallback(lambda ignored:
3036 self.shouldHTTPError("test_ophandle_retainfor",
3037 404, "404 Not Found",
3038 "unknown/expired handle '129'",
3040 "/operations/129?t=status&output=JSON"))
3043 def test_ophandle_release_after_complete(self):
3044 d = self.POST(self.public_url + "/foo/?t=start-manifest&ophandle=130",
3045 followRedirect=True)
3046 d.addCallback(self.wait_for_operation, "130")
3047 d.addCallback(lambda ignored:
3048 self.GET("/operations/130?t=status&output=JSON&release-after-complete=true"))
3049 # the release-after-complete=true will cause the handle to be expired
3050 d.addCallback(lambda ignored:
3051 self.shouldHTTPError("test_ophandle_release_after_complete",
3052 404, "404 Not Found",
3053 "unknown/expired handle '130'",
3055 "/operations/130?t=status&output=JSON"))
3058 def test_uncollected_ophandle_expiration(self):
3059 # uncollected ophandles should expire after 4 days
3060 def _make_uncollected_ophandle(ophandle):
3061 d = self.POST(self.public_url +
3062 "/foo/?t=start-manifest&ophandle=%d" % ophandle,
3063 followRedirect=False)
3064 # When we start the operation, the webapi server will want
3065 # to redirect us to the page for the ophandle, so we get
3066 # confirmation that the operation has started. If the
3067 # manifest operation has finished by the time we get there,
3068 # following that redirect (by setting followRedirect=True
3069 # above) has the side effect of collecting the ophandle that
3070 # we've just created, which means that we can't use the
3071 # ophandle to test the uncollected timeout anymore. So,
3072 # instead, catch the 302 here and don't follow it.
3073 d.addBoth(self.should302, "uncollected_ophandle_creation")
3075 # Create an ophandle, don't collect it, then advance the clock by
3076 # 4 days - 1 second and make sure that the ophandle is still there.
3077 d = _make_uncollected_ophandle(131)
3078 d.addCallback(lambda ign:
3079 self.clock.advance((96*60*60) - 1)) # 96 hours = 4 days
3080 d.addCallback(lambda ign:
3081 self.GET("/operations/131?t=status&output=JSON"))
3083 data = simplejson.loads(res)
3084 self.failUnless("finished" in data, res)
3085 d.addCallback(_check1)
3086 # Create an ophandle, don't collect it, then try to collect it
3087 # after 4 days. It should be gone.
3088 d.addCallback(lambda ign:
3089 _make_uncollected_ophandle(132))
3090 d.addCallback(lambda ign:
3091 self.clock.advance(96*60*60))
3092 d.addCallback(lambda ign:
3093 self.shouldHTTPError("test_uncollected_ophandle_expired_after_100_hours",
3094 404, "404 Not Found",
3095 "unknown/expired handle '132'",
3097 "/operations/132?t=status&output=JSON"))
3100 def test_collected_ophandle_expiration(self):
3101 # collected ophandles should expire after 1 day
3102 def _make_collected_ophandle(ophandle):
3103 d = self.POST(self.public_url +
3104 "/foo/?t=start-manifest&ophandle=%d" % ophandle,
3105 followRedirect=True)
3106 # By following the initial redirect, we collect the ophandle
3107 # we've just created.
3109 # Create a collected ophandle, then collect it after 23 hours
3110 # and 59 seconds to make sure that it is still there.
3111 d = _make_collected_ophandle(133)
3112 d.addCallback(lambda ign:
3113 self.clock.advance((24*60*60) - 1))
3114 d.addCallback(lambda ign:
3115 self.GET("/operations/133?t=status&output=JSON"))
3117 data = simplejson.loads(res)
3118 self.failUnless("finished" in data, res)
3119 d.addCallback(_check1)
3120 # Create another uncollected ophandle, then try to collect it
3121 # after 24 hours to make sure that it is gone.
3122 d.addCallback(lambda ign:
3123 _make_collected_ophandle(134))
3124 d.addCallback(lambda ign:
3125 self.clock.advance(24*60*60))
3126 d.addCallback(lambda ign:
3127 self.shouldHTTPError("test_collected_ophandle_expired_after_1000_minutes",
3128 404, "404 Not Found",
3129 "unknown/expired handle '134'",
3131 "/operations/134?t=status&output=JSON"))
3134 def test_incident(self):
3135 d = self.POST("/report_incident", details="eek")
3137 self.failUnless("Thank you for your report!" in res, res)
3138 d.addCallback(_done)
3141 def test_static(self):
3142 webdir = os.path.join(self.staticdir, "subdir")
3143 fileutil.make_dirs(webdir)
3144 f = open(os.path.join(webdir, "hello.txt"), "wb")
3148 d = self.GET("/static/subdir/hello.txt")
3150 self.failUnlessReallyEqual(res, "hello")
3151 d.addCallback(_check)
3155 class Util(ShouldFailMixin, testutil.ReallyEqualMixin, unittest.TestCase):
3156 def test_load_file(self):
3157 # This will raise an exception unless a well-formed XML file is found under that name.
3158 common.getxmlfile('directory.xhtml').load()
3160 def test_parse_replace_arg(self):
3161 self.failUnlessReallyEqual(common.parse_replace_arg("true"), True)
3162 self.failUnlessReallyEqual(common.parse_replace_arg("false"), False)
3163 self.failUnlessReallyEqual(common.parse_replace_arg("only-files"),
3165 self.shouldFail(AssertionError, "test_parse_replace_arg", "",
3166 common.parse_replace_arg, "only_fles")
3168 def test_abbreviate_time(self):
3169 self.failUnlessReallyEqual(common.abbreviate_time(None), "")
3170 self.failUnlessReallyEqual(common.abbreviate_time(1.234), "1.23s")
3171 self.failUnlessReallyEqual(common.abbreviate_time(0.123), "123ms")
3172 self.failUnlessReallyEqual(common.abbreviate_time(0.00123), "1.2ms")
3173 self.failUnlessReallyEqual(common.abbreviate_time(0.000123), "123us")
3174 self.failUnlessReallyEqual(common.abbreviate_time(-123000), "-123000000000us")
3176 def test_compute_rate(self):
3177 self.failUnlessReallyEqual(common.compute_rate(None, None), None)
3178 self.failUnlessReallyEqual(common.compute_rate(None, 1), None)
3179 self.failUnlessReallyEqual(common.compute_rate(250000, None), None)
3180 self.failUnlessReallyEqual(common.compute_rate(250000, 0), None)
3181 self.failUnlessReallyEqual(common.compute_rate(250000, 10), 25000.0)
3182 self.failUnlessReallyEqual(common.compute_rate(0, 10), 0.0)
3183 self.shouldFail(AssertionError, "test_compute_rate", "",
3184 common.compute_rate, -100, 10)
3185 self.shouldFail(AssertionError, "test_compute_rate", "",
3186 common.compute_rate, 100, -10)
3189 rate = common.compute_rate(10*1000*1000, 1)
3190 self.failUnlessReallyEqual(common.abbreviate_rate(rate), "10.00MBps")
3192 def test_abbreviate_rate(self):
3193 self.failUnlessReallyEqual(common.abbreviate_rate(None), "")
3194 self.failUnlessReallyEqual(common.abbreviate_rate(1234000), "1.23MBps")
3195 self.failUnlessReallyEqual(common.abbreviate_rate(12340), "12.3kBps")
3196 self.failUnlessReallyEqual(common.abbreviate_rate(123), "123Bps")
3198 def test_abbreviate_size(self):
3199 self.failUnlessReallyEqual(common.abbreviate_size(None), "")
3200 self.failUnlessReallyEqual(common.abbreviate_size(1.23*1000*1000*1000), "1.23GB")
3201 self.failUnlessReallyEqual(common.abbreviate_size(1.23*1000*1000), "1.23MB")
3202 self.failUnlessReallyEqual(common.abbreviate_size(1230), "1.2kB")
3203 self.failUnlessReallyEqual(common.abbreviate_size(123), "123B")
3205 def test_plural(self):
3207 return "%d second%s" % (s, status.plural(s))
3208 self.failUnlessReallyEqual(convert(0), "0 seconds")
3209 self.failUnlessReallyEqual(convert(1), "1 second")
3210 self.failUnlessReallyEqual(convert(2), "2 seconds")
3212 return "has share%s: %s" % (status.plural(s), ",".join(s))
3213 self.failUnlessReallyEqual(convert2([]), "has shares: ")
3214 self.failUnlessReallyEqual(convert2(["1"]), "has share: 1")
3215 self.failUnlessReallyEqual(convert2(["1","2"]), "has shares: 1,2")
3218 class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMixin, unittest.TestCase):
3220 def CHECK(self, ign, which, args, clientnum=0):
3221 fileurl = self.fileurls[which]
3222 url = fileurl + "?" + args
3223 return self.GET(url, method="POST", clientnum=clientnum)
3225 def test_filecheck(self):
3226 self.basedir = "web/Grid/filecheck"
3228 c0 = self.g.clients[0]
3231 d = c0.upload(upload.Data(DATA, convergence=""))
3232 def _stash_uri(ur, which):
3233 self.uris[which] = ur.uri
3234 d.addCallback(_stash_uri, "good")
3235 d.addCallback(lambda ign:
3236 c0.upload(upload.Data(DATA+"1", convergence="")))
3237 d.addCallback(_stash_uri, "sick")
3238 d.addCallback(lambda ign:
3239 c0.upload(upload.Data(DATA+"2", convergence="")))
3240 d.addCallback(_stash_uri, "dead")
3241 def _stash_mutable_uri(n, which):
3242 self.uris[which] = n.get_uri()
3243 assert isinstance(self.uris[which], str)
3244 d.addCallback(lambda ign: c0.create_mutable_file(DATA+"3"))
3245 d.addCallback(_stash_mutable_uri, "corrupt")
3246 d.addCallback(lambda ign:
3247 c0.upload(upload.Data("literal", convergence="")))
3248 d.addCallback(_stash_uri, "small")
3249 d.addCallback(lambda ign: c0.create_immutable_dirnode({}))
3250 d.addCallback(_stash_mutable_uri, "smalldir")
3252 def _compute_fileurls(ignored):
3254 for which in self.uris:
3255 self.fileurls[which] = "uri/" + urllib.quote(self.uris[which])
3256 d.addCallback(_compute_fileurls)
3258 def _clobber_shares(ignored):
3259 good_shares = self.find_uri_shares(self.uris["good"])
3260 self.failUnlessReallyEqual(len(good_shares), 10)
3261 sick_shares = self.find_uri_shares(self.uris["sick"])
3262 os.unlink(sick_shares[0][2])
3263 dead_shares = self.find_uri_shares(self.uris["dead"])
3264 for i in range(1, 10):
3265 os.unlink(dead_shares[i][2])
3266 c_shares = self.find_uri_shares(self.uris["corrupt"])
3267 cso = CorruptShareOptions()
3268 cso.stdout = StringIO()
3269 cso.parseOptions([c_shares[0][2]])
3271 d.addCallback(_clobber_shares)
3273 d.addCallback(self.CHECK, "good", "t=check")
3274 def _got_html_good(res):
3275 self.failUnless("Healthy" in res, res)
3276 self.failIf("Not Healthy" in res, res)
3277 d.addCallback(_got_html_good)
3278 d.addCallback(self.CHECK, "good", "t=check&return_to=somewhere")
3279 def _got_html_good_return_to(res):
3280 self.failUnless("Healthy" in res, res)
3281 self.failIf("Not Healthy" in res, res)
3282 self.failUnless('<a href="somewhere">Return to file'
3284 d.addCallback(_got_html_good_return_to)
3285 d.addCallback(self.CHECK, "good", "t=check&output=json")
3286 def _got_json_good(res):
3287 r = simplejson.loads(res)
3288 self.failUnlessEqual(r["summary"], "Healthy")
3289 self.failUnless(r["results"]["healthy"])
3290 self.failIf(r["results"]["needs-rebalancing"])
3291 self.failUnless(r["results"]["recoverable"])
3292 d.addCallback(_got_json_good)
3294 d.addCallback(self.CHECK, "small", "t=check")
3295 def _got_html_small(res):
3296 self.failUnless("Literal files are always healthy" in res, res)
3297 self.failIf("Not Healthy" in res, res)
3298 d.addCallback(_got_html_small)
3299 d.addCallback(self.CHECK, "small", "t=check&return_to=somewhere")
3300 def _got_html_small_return_to(res):
3301 self.failUnless("Literal files are always healthy" in res, res)
3302 self.failIf("Not Healthy" in res, res)
3303 self.failUnless('<a href="somewhere">Return to file'
3305 d.addCallback(_got_html_small_return_to)
3306 d.addCallback(self.CHECK, "small", "t=check&output=json")
3307 def _got_json_small(res):
3308 r = simplejson.loads(res)
3309 self.failUnlessEqual(r["storage-index"], "")
3310 self.failUnless(r["results"]["healthy"])
3311 d.addCallback(_got_json_small)
3313 d.addCallback(self.CHECK, "smalldir", "t=check")
3314 def _got_html_smalldir(res):
3315 self.failUnless("Literal files are always healthy" in res, res)
3316 self.failIf("Not Healthy" in res, res)
3317 d.addCallback(_got_html_smalldir)
3318 d.addCallback(self.CHECK, "smalldir", "t=check&output=json")
3319 def _got_json_smalldir(res):
3320 r = simplejson.loads(res)
3321 self.failUnlessEqual(r["storage-index"], "")
3322 self.failUnless(r["results"]["healthy"])
3323 d.addCallback(_got_json_smalldir)
3325 d.addCallback(self.CHECK, "sick", "t=check")
3326 def _got_html_sick(res):
3327 self.failUnless("Not Healthy" in res, res)
3328 d.addCallback(_got_html_sick)
3329 d.addCallback(self.CHECK, "sick", "t=check&output=json")
3330 def _got_json_sick(res):
3331 r = simplejson.loads(res)
3332 self.failUnlessEqual(r["summary"],
3333 "Not Healthy: 9 shares (enc 3-of-10)")
3334 self.failIf(r["results"]["healthy"])
3335 self.failIf(r["results"]["needs-rebalancing"])
3336 self.failUnless(r["results"]["recoverable"])
3337 d.addCallback(_got_json_sick)
3339 d.addCallback(self.CHECK, "dead", "t=check")
3340 def _got_html_dead(res):
3341 self.failUnless("Not Healthy" in res, res)
3342 d.addCallback(_got_html_dead)
3343 d.addCallback(self.CHECK, "dead", "t=check&output=json")
3344 def _got_json_dead(res):
3345 r = simplejson.loads(res)
3346 self.failUnlessEqual(r["summary"],
3347 "Not Healthy: 1 shares (enc 3-of-10)")
3348 self.failIf(r["results"]["healthy"])
3349 self.failIf(r["results"]["needs-rebalancing"])
3350 self.failIf(r["results"]["recoverable"])
3351 d.addCallback(_got_json_dead)
3353 d.addCallback(self.CHECK, "corrupt", "t=check&verify=true")
3354 def _got_html_corrupt(res):
3355 self.failUnless("Not Healthy! : Unhealthy" in res, res)
3356 d.addCallback(_got_html_corrupt)
3357 d.addCallback(self.CHECK, "corrupt", "t=check&verify=true&output=json")
3358 def _got_json_corrupt(res):
3359 r = simplejson.loads(res)
3360 self.failUnless("Unhealthy: 9 shares (enc 3-of-10)" in r["summary"],
3362 self.failIf(r["results"]["healthy"])
3363 self.failUnless(r["results"]["recoverable"])
3364 self.failUnlessReallyEqual(r["results"]["count-shares-good"], 9)
3365 self.failUnlessReallyEqual(r["results"]["count-corrupt-shares"], 1)
3366 d.addCallback(_got_json_corrupt)
3368 d.addErrback(self.explain_web_error)
3371 def test_repair_html(self):
3372 self.basedir = "web/Grid/repair_html"
3374 c0 = self.g.clients[0]
3377 d = c0.upload(upload.Data(DATA, convergence=""))
3378 def _stash_uri(ur, which):
3379 self.uris[which] = ur.uri
3380 d.addCallback(_stash_uri, "good")
3381 d.addCallback(lambda ign:
3382 c0.upload(upload.Data(DATA+"1", convergence="")))
3383 d.addCallback(_stash_uri, "sick")
3384 d.addCallback(lambda ign:
3385 c0.upload(upload.Data(DATA+"2", convergence="")))
3386 d.addCallback(_stash_uri, "dead")
3387 def _stash_mutable_uri(n, which):
3388 self.uris[which] = n.get_uri()
3389 assert isinstance(self.uris[which], str)
3390 d.addCallback(lambda ign: c0.create_mutable_file(DATA+"3"))
3391 d.addCallback(_stash_mutable_uri, "corrupt")
3393 def _compute_fileurls(ignored):
3395 for which in self.uris:
3396 self.fileurls[which] = "uri/" + urllib.quote(self.uris[which])
3397 d.addCallback(_compute_fileurls)
3399 def _clobber_shares(ignored):
3400 good_shares = self.find_uri_shares(self.uris["good"])
3401 self.failUnlessReallyEqual(len(good_shares), 10)
3402 sick_shares = self.find_uri_shares(self.uris["sick"])
3403 os.unlink(sick_shares[0][2])
3404 dead_shares = self.find_uri_shares(self.uris["dead"])
3405 for i in range(1, 10):
3406 os.unlink(dead_shares[i][2])
3407 c_shares = self.find_uri_shares(self.uris["corrupt"])
3408 cso = CorruptShareOptions()
3409 cso.stdout = StringIO()
3410 cso.parseOptions([c_shares[0][2]])
3412 d.addCallback(_clobber_shares)
3414 d.addCallback(self.CHECK, "good", "t=check&repair=true")
3415 def _got_html_good(res):
3416 self.failUnless("Healthy" in res, res)
3417 self.failIf("Not Healthy" in res, res)
3418 self.failUnless("No repair necessary" in res, res)
3419 d.addCallback(_got_html_good)
3421 d.addCallback(self.CHECK, "sick", "t=check&repair=true")
3422 def _got_html_sick(res):
3423 self.failUnless("Healthy : healthy" in res, res)
3424 self.failIf("Not Healthy" in res, res)
3425 self.failUnless("Repair successful" in res, res)
3426 d.addCallback(_got_html_sick)
3428 # repair of a dead file will fail, of course, but it isn't yet
3429 # clear how this should be reported. Right now it shows up as
3432 #d.addCallback(self.CHECK, "dead", "t=check&repair=true")
3433 #def _got_html_dead(res):
3435 # self.failUnless("Healthy : healthy" in res, res)
3436 # self.failIf("Not Healthy" in res, res)
3437 # self.failUnless("No repair necessary" in res, res)
3438 #d.addCallback(_got_html_dead)
3440 d.addCallback(self.CHECK, "corrupt", "t=check&verify=true&repair=true")
3441 def _got_html_corrupt(res):
3442 self.failUnless("Healthy : Healthy" in res, res)
3443 self.failIf("Not Healthy" in res, res)
3444 self.failUnless("Repair successful" in res, res)
3445 d.addCallback(_got_html_corrupt)
3447 d.addErrback(self.explain_web_error)
3450 def test_repair_json(self):
3451 self.basedir = "web/Grid/repair_json"
3453 c0 = self.g.clients[0]
3456 d = c0.upload(upload.Data(DATA+"1", convergence=""))
3457 def _stash_uri(ur, which):
3458 self.uris[which] = ur.uri
3459 d.addCallback(_stash_uri, "sick")
3461 def _compute_fileurls(ignored):
3463 for which in self.uris:
3464 self.fileurls[which] = "uri/" + urllib.quote(self.uris[which])
3465 d.addCallback(_compute_fileurls)
3467 def _clobber_shares(ignored):
3468 sick_shares = self.find_uri_shares(self.uris["sick"])
3469 os.unlink(sick_shares[0][2])
3470 d.addCallback(_clobber_shares)
3472 d.addCallback(self.CHECK, "sick", "t=check&repair=true&output=json")
3473 def _got_json_sick(res):
3474 r = simplejson.loads(res)
3475 self.failUnlessReallyEqual(r["repair-attempted"], True)
3476 self.failUnlessReallyEqual(r["repair-successful"], True)
3477 self.failUnlessEqual(r["pre-repair-results"]["summary"],
3478 "Not Healthy: 9 shares (enc 3-of-10)")
3479 self.failIf(r["pre-repair-results"]["results"]["healthy"])
3480 self.failUnlessEqual(r["post-repair-results"]["summary"], "healthy")
3481 self.failUnless(r["post-repair-results"]["results"]["healthy"])
3482 d.addCallback(_got_json_sick)
3484 d.addErrback(self.explain_web_error)
3487 def test_unknown(self, immutable=False):
3488 self.basedir = "web/Grid/unknown"
3490 self.basedir = "web/Grid/unknown-immutable"
3493 c0 = self.g.clients[0]
3497 # the future cap format may contain slashes, which must be tolerated
3498 expected_info_url = "uri/%s?t=info" % urllib.quote(unknown_rwcap,
3502 name = u"future-imm"
3503 future_node = UnknownNode(None, unknown_immcap, deep_immutable=True)
3504 d = c0.create_immutable_dirnode({name: (future_node, {})})
3507 future_node = UnknownNode(unknown_rwcap, unknown_rocap)
3508 d = c0.create_dirnode()
3510 def _stash_root_and_create_file(n):
3512 self.rooturl = "uri/" + urllib.quote(n.get_uri()) + "/"
3513 self.rourl = "uri/" + urllib.quote(n.get_readonly_uri()) + "/"
3515 return self.rootnode.set_node(name, future_node)
3516 d.addCallback(_stash_root_and_create_file)
3518 # make sure directory listing tolerates unknown nodes
3519 d.addCallback(lambda ign: self.GET(self.rooturl))
3520 def _check_directory_html(res, expected_type_suffix):
3521 pattern = re.compile(r'<td>\?%s</td>[ \t\n\r]*'
3522 '<td>%s</td>' % (expected_type_suffix, str(name)),
3524 self.failUnless(re.search(pattern, res), res)
3525 # find the More Info link for name, should be relative
3526 mo = re.search(r'<a href="([^"]+)">More Info</a>', res)
3527 info_url = mo.group(1)
3528 self.failUnlessReallyEqual(info_url, "%s?t=info" % (str(name),))
3530 d.addCallback(_check_directory_html, "-IMM")
3532 d.addCallback(_check_directory_html, "")
3534 d.addCallback(lambda ign: self.GET(self.rooturl+"?t=json"))
3535 def _check_directory_json(res, expect_rw_uri):
3536 data = simplejson.loads(res)
3537 self.failUnlessEqual(data[0], "dirnode")
3538 f = data[1]["children"][name]
3539 self.failUnlessEqual(f[0], "unknown")
3541 self.failUnlessReallyEqual(to_str(f[1]["rw_uri"]), unknown_rwcap, data)
3543 self.failIfIn("rw_uri", f[1])
3545 self.failUnlessReallyEqual(to_str(f[1]["ro_uri"]), unknown_immcap, data)
3547 self.failUnlessReallyEqual(to_str(f[1]["ro_uri"]), unknown_rocap, data)
3548 self.failUnless("metadata" in f[1])
3549 d.addCallback(_check_directory_json, expect_rw_uri=not immutable)
3551 def _check_info(res, expect_rw_uri, expect_ro_uri):
3552 self.failUnlessIn("Object Type: <span>unknown</span>", res)
3554 self.failUnlessIn(unknown_rwcap, res)
3557 self.failUnlessIn(unknown_immcap, res)
3559 self.failUnlessIn(unknown_rocap, res)
3561 self.failIfIn(unknown_rocap, res)
3562 self.failIfIn("Raw data as", res)
3563 self.failIfIn("Directory writecap", res)
3564 self.failIfIn("Checker Operations", res)
3565 self.failIfIn("Mutable File Operations", res)
3566 self.failIfIn("Directory Operations", res)
3568 # FIXME: these should have expect_rw_uri=not immutable; I don't know
3569 # why they fail. Possibly related to ticket #922.
3571 d.addCallback(lambda ign: self.GET(expected_info_url))
3572 d.addCallback(_check_info, expect_rw_uri=False, expect_ro_uri=False)
3573 d.addCallback(lambda ign: self.GET("%s%s?t=info" % (self.rooturl, str(name))))
3574 d.addCallback(_check_info, expect_rw_uri=False, expect_ro_uri=True)
3576 def _check_json(res, expect_rw_uri):
3577 data = simplejson.loads(res)
3578 self.failUnlessEqual(data[0], "unknown")
3580 self.failUnlessReallyEqual(to_str(data[1]["rw_uri"]), unknown_rwcap, data)
3582 self.failIfIn("rw_uri", data[1])
3585 self.failUnlessReallyEqual(to_str(data[1]["ro_uri"]), unknown_immcap, data)
3586 self.failUnlessReallyEqual(data[1]["mutable"], False)
3588 self.failUnlessReallyEqual(to_str(data[1]["ro_uri"]), unknown_rocap, data)
3589 self.failUnlessReallyEqual(data[1]["mutable"], True)
3591 self.failUnlessReallyEqual(to_str(data[1]["ro_uri"]), unknown_rocap, data)
3592 self.failIf("mutable" in data[1], data[1])
3594 # TODO: check metadata contents
3595 self.failUnless("metadata" in data[1])
3597 d.addCallback(lambda ign: self.GET("%s%s?t=json" % (self.rooturl, str(name))))
3598 d.addCallback(_check_json, expect_rw_uri=not immutable)
3600 # and make sure that a read-only version of the directory can be
3601 # rendered too. This version will not have unknown_rwcap, whether
3602 # or not future_node was immutable.
3603 d.addCallback(lambda ign: self.GET(self.rourl))
3605 d.addCallback(_check_directory_html, "-IMM")
3607 d.addCallback(_check_directory_html, "-RO")
3609 d.addCallback(lambda ign: self.GET(self.rourl+"?t=json"))
3610 d.addCallback(_check_directory_json, expect_rw_uri=False)
3612 d.addCallback(lambda ign: self.GET("%s%s?t=json" % (self.rourl, str(name))))
3613 d.addCallback(_check_json, expect_rw_uri=False)
3615 # TODO: check that getting t=info from the Info link in the ro directory
3616 # works, and does not include the writecap URI.
3619 def test_immutable_unknown(self):
3620 return self.test_unknown(immutable=True)
3622 def test_mutant_dirnodes_are_omitted(self):
3623 self.basedir = "web/Grid/mutant_dirnodes_are_omitted"
3626 c = self.g.clients[0]
3631 lonely_uri = "URI:LIT:n5xgk" # LIT for "one"
3632 mut_write_uri = "URI:SSK:vfvcbdfbszyrsaxchgevhmmlii:euw4iw7bbnkrrwpzuburbhppuxhc3gwxv26f6imekhz7zyw2ojnq"
3633 mut_read_uri = "URI:SSK-RO:e3mdrzfwhoq42hy5ubcz6rp3o4:ybyibhnp3vvwuq2vaw2ckjmesgkklfs6ghxleztqidihjyofgw7q"
3635 # This method tests mainly dirnode, but we'd have to duplicate code in order to
3636 # test the dirnode and web layers separately.
3638 # 'lonely' is a valid LIT child, 'ro' is a mutant child with an SSK-RO readcap,
3639 # and 'write-in-ro' is a mutant child with an SSK writecap in the ro_uri field.
3640 # When the directory is read, the mutants should be silently disposed of, leaving
3641 # their lonely sibling.
3642 # We don't test the case of a retrieving a cap from the encrypted rw_uri field,
3643 # because immutable directories don't have a writecap and therefore that field
3644 # isn't (and can't be) decrypted.
3645 # TODO: The field still exists in the netstring. Technically we should check what
3646 # happens if something is put there (_unpack_contents should raise ValueError),
3647 # but that can wait.
3649 lonely_child = nm.create_from_cap(lonely_uri)
3650 mutant_ro_child = nm.create_from_cap(mut_read_uri)
3651 mutant_write_in_ro_child = nm.create_from_cap(mut_write_uri)
3653 def _by_hook_or_by_crook():
3655 for n in [mutant_ro_child, mutant_write_in_ro_child]:
3656 n.is_allowed_in_immutable_directory = _by_hook_or_by_crook
3658 mutant_write_in_ro_child.get_write_uri = lambda: None
3659 mutant_write_in_ro_child.get_readonly_uri = lambda: mut_write_uri
3661 kids = {u"lonely": (lonely_child, {}),
3662 u"ro": (mutant_ro_child, {}),
3663 u"write-in-ro": (mutant_write_in_ro_child, {}),
3665 d = c.create_immutable_dirnode(kids)
3668 self.failUnless(isinstance(dn, dirnode.DirectoryNode))
3669 self.failIf(dn.is_mutable())
3670 self.failUnless(dn.is_readonly())
3671 # This checks that if we somehow ended up calling dn._decrypt_rwcapdata, it would fail.
3672 self.failIf(hasattr(dn._node, 'get_writekey'))
3674 self.failUnless("RO-IMM" in rep)
3676 self.failUnlessIn("CHK", cap.to_string())
3679 self.rooturl = "uri/" + urllib.quote(dn.get_uri()) + "/"
3680 return download_to_data(dn._node)
3681 d.addCallback(_created)
3683 def _check_data(data):
3684 # Decode the netstring representation of the directory to check that all children
3685 # are present. This is a bit of an abstraction violation, but there's not really
3686 # any other way to do it given that the real DirectoryNode._unpack_contents would
3687 # strip the mutant children out (which is what we're trying to test, later).
3690 while position < len(data):
3691 entries, position = split_netstring(data, 1, position)
3693 (name_utf8, ro_uri, rwcapdata, metadata_s), subpos = split_netstring(entry, 4)
3694 name = name_utf8.decode("utf-8")
3695 self.failUnless(rwcapdata == "")
3696 self.failUnless(name in kids)
3697 (expected_child, ign) = kids[name]
3698 self.failUnlessReallyEqual(ro_uri, expected_child.get_readonly_uri())
3701 self.failUnlessReallyEqual(numkids, 3)
3702 return self.rootnode.list()
3703 d.addCallback(_check_data)
3705 # Now when we use the real directory listing code, the mutants should be absent.
3706 def _check_kids(children):
3707 self.failUnlessReallyEqual(sorted(children.keys()), [u"lonely"])
3708 lonely_node, lonely_metadata = children[u"lonely"]
3710 self.failUnlessReallyEqual(lonely_node.get_write_uri(), None)
3711 self.failUnlessReallyEqual(lonely_node.get_readonly_uri(), lonely_uri)
3712 d.addCallback(_check_kids)
3714 d.addCallback(lambda ign: nm.create_from_cap(self.cap.to_string()))
3715 d.addCallback(lambda n: n.list())
3716 d.addCallback(_check_kids) # again with dirnode recreated from cap
3718 # Make sure the lonely child can be listed in HTML...
3719 d.addCallback(lambda ign: self.GET(self.rooturl))
3720 def _check_html(res):
3721 self.failIfIn("URI:SSK", res)
3722 get_lonely = "".join([r'<td>FILE</td>',
3724 r'<a href="[^"]+%s[^"]+">lonely</a>' % (urllib.quote(lonely_uri),),
3726 r'\s+<td>%d</td>' % len("one"),
3728 self.failUnless(re.search(get_lonely, res), res)
3730 # find the More Info link for name, should be relative
3731 mo = re.search(r'<a href="([^"]+)">More Info</a>', res)
3732 info_url = mo.group(1)
3733 self.failUnless(info_url.endswith(urllib.quote(lonely_uri) + "?t=info"), info_url)
3734 d.addCallback(_check_html)
3737 d.addCallback(lambda ign: self.GET(self.rooturl+"?t=json"))
3738 def _check_json(res):
3739 data = simplejson.loads(res)
3740 self.failUnlessEqual(data[0], "dirnode")
3741 listed_children = data[1]["children"]
3742 self.failUnlessReallyEqual(sorted(listed_children.keys()), [u"lonely"])
3743 ll_type, ll_data = listed_children[u"lonely"]
3744 self.failUnlessEqual(ll_type, "filenode")
3745 self.failIf("rw_uri" in ll_data)
3746 self.failUnlessReallyEqual(to_str(ll_data["ro_uri"]), lonely_uri)
3747 d.addCallback(_check_json)
3750 def test_deep_check(self):
3751 self.basedir = "web/Grid/deep_check"
3753 c0 = self.g.clients[0]
3757 d = c0.create_dirnode()
3758 def _stash_root_and_create_file(n):
3760 self.fileurls["root"] = "uri/" + urllib.quote(n.get_uri()) + "/"
3761 return n.add_file(u"good", upload.Data(DATA, convergence=""))
3762 d.addCallback(_stash_root_and_create_file)
3763 def _stash_uri(fn, which):
3764 self.uris[which] = fn.get_uri()
3766 d.addCallback(_stash_uri, "good")
3767 d.addCallback(lambda ign:
3768 self.rootnode.add_file(u"small",
3769 upload.Data("literal",
3771 d.addCallback(_stash_uri, "small")
3772 d.addCallback(lambda ign:
3773 self.rootnode.add_file(u"sick",
3774 upload.Data(DATA+"1",
3776 d.addCallback(_stash_uri, "sick")
3778 # this tests that deep-check and stream-manifest will ignore
3779 # UnknownNode instances. Hopefully this will also cover deep-stats.
3780 future_node = UnknownNode(unknown_rwcap, unknown_rocap)
3781 d.addCallback(lambda ign: self.rootnode.set_node(u"future", future_node))
3783 def _clobber_shares(ignored):
3784 self.delete_shares_numbered(self.uris["sick"], [0,1])
3785 d.addCallback(_clobber_shares)
3793 d.addCallback(self.CHECK, "root", "t=stream-deep-check")
3796 units = [simplejson.loads(line)
3797 for line in res.splitlines()
3800 print "response is:", res
3801 print "undecodeable line was '%s'" % line
3803 self.failUnlessReallyEqual(len(units), 5+1)
3804 # should be parent-first
3806 self.failUnlessEqual(u0["path"], [])
3807 self.failUnlessEqual(u0["type"], "directory")
3808 self.failUnlessReallyEqual(to_str(u0["cap"]), self.rootnode.get_uri())
3809 u0cr = u0["check-results"]
3810 self.failUnlessReallyEqual(u0cr["results"]["count-shares-good"], 10)
3812 ugood = [u for u in units
3813 if u["type"] == "file" and u["path"] == [u"good"]][0]
3814 self.failUnlessReallyEqual(to_str(ugood["cap"]), self.uris["good"])
3815 ugoodcr = ugood["check-results"]
3816 self.failUnlessReallyEqual(ugoodcr["results"]["count-shares-good"], 10)
3819 self.failUnlessEqual(stats["type"], "stats")
3821 self.failUnlessReallyEqual(s["count-immutable-files"], 2)
3822 self.failUnlessReallyEqual(s["count-literal-files"], 1)
3823 self.failUnlessReallyEqual(s["count-directories"], 1)
3824 self.failUnlessReallyEqual(s["count-unknown"], 1)
3825 d.addCallback(_done)
3827 d.addCallback(self.CHECK, "root", "t=stream-manifest")
3828 def _check_manifest(res):
3829 self.failUnless(res.endswith("\n"))
3830 units = [simplejson.loads(t) for t in res[:-1].split("\n")]
3831 self.failUnlessReallyEqual(len(units), 5+1)
3832 self.failUnlessEqual(units[-1]["type"], "stats")
3834 self.failUnlessEqual(first["path"], [])
3835 self.failUnlessEqual(to_str(first["cap"]), self.rootnode.get_uri())
3836 self.failUnlessEqual(first["type"], "directory")
3837 stats = units[-1]["stats"]
3838 self.failUnlessReallyEqual(stats["count-immutable-files"], 2)
3839 self.failUnlessReallyEqual(stats["count-literal-files"], 1)
3840 self.failUnlessReallyEqual(stats["count-mutable-files"], 0)
3841 self.failUnlessReallyEqual(stats["count-immutable-files"], 2)
3842 self.failUnlessReallyEqual(stats["count-unknown"], 1)
3843 d.addCallback(_check_manifest)
3845 # now add root/subdir and root/subdir/grandchild, then make subdir
3846 # unrecoverable, then see what happens
3848 d.addCallback(lambda ign:
3849 self.rootnode.create_subdirectory(u"subdir"))
3850 d.addCallback(_stash_uri, "subdir")
3851 d.addCallback(lambda subdir_node:
3852 subdir_node.add_file(u"grandchild",
3853 upload.Data(DATA+"2",
3855 d.addCallback(_stash_uri, "grandchild")
3857 d.addCallback(lambda ign:
3858 self.delete_shares_numbered(self.uris["subdir"],
3866 # root/subdir [unrecoverable]
3867 # root/subdir/grandchild
3869 # how should a streaming-JSON API indicate fatal error?
3870 # answer: emit ERROR: instead of a JSON string
3872 d.addCallback(self.CHECK, "root", "t=stream-manifest")
3873 def _check_broken_manifest(res):
3874 lines = res.splitlines()
3876 for (i,line) in enumerate(lines)
3877 if line.startswith("ERROR:")]
3879 self.fail("no ERROR: in output: %s" % (res,))
3880 first_error = error_lines[0]
3881 error_line = lines[first_error]
3882 error_msg = lines[first_error+1:]
3883 error_msg_s = "\n".join(error_msg) + "\n"
3884 self.failUnlessIn("ERROR: UnrecoverableFileError(no recoverable versions)",
3886 self.failUnless(len(error_msg) > 2, error_msg_s) # some traceback
3887 units = [simplejson.loads(line) for line in lines[:first_error]]
3888 self.failUnlessReallyEqual(len(units), 6) # includes subdir
3889 last_unit = units[-1]
3890 self.failUnlessEqual(last_unit["path"], ["subdir"])
3891 d.addCallback(_check_broken_manifest)
3893 d.addCallback(self.CHECK, "root", "t=stream-deep-check")
3894 def _check_broken_deepcheck(res):
3895 lines = res.splitlines()
3897 for (i,line) in enumerate(lines)
3898 if line.startswith("ERROR:")]
3900 self.fail("no ERROR: in output: %s" % (res,))
3901 first_error = error_lines[0]
3902 error_line = lines[first_error]
3903 error_msg = lines[first_error+1:]
3904 error_msg_s = "\n".join(error_msg) + "\n"
3905 self.failUnlessIn("ERROR: UnrecoverableFileError(no recoverable versions)",
3907 self.failUnless(len(error_msg) > 2, error_msg_s) # some traceback
3908 units = [simplejson.loads(line) for line in lines[:first_error]]
3909 self.failUnlessReallyEqual(len(units), 6) # includes subdir
3910 last_unit = units[-1]
3911 self.failUnlessEqual(last_unit["path"], ["subdir"])
3912 r = last_unit["check-results"]["results"]
3913 self.failUnlessReallyEqual(r["count-recoverable-versions"], 0)
3914 self.failUnlessReallyEqual(r["count-shares-good"], 1)
3915 self.failUnlessReallyEqual(r["recoverable"], False)
3916 d.addCallback(_check_broken_deepcheck)
3918 d.addErrback(self.explain_web_error)
3921 def test_deep_check_and_repair(self):
3922 self.basedir = "web/Grid/deep_check_and_repair"
3924 c0 = self.g.clients[0]
3928 d = c0.create_dirnode()
3929 def _stash_root_and_create_file(n):
3931 self.fileurls["root"] = "uri/" + urllib.quote(n.get_uri()) + "/"
3932 return n.add_file(u"good", upload.Data(DATA, convergence=""))
3933 d.addCallback(_stash_root_and_create_file)
3934 def _stash_uri(fn, which):
3935 self.uris[which] = fn.get_uri()
3936 d.addCallback(_stash_uri, "good")
3937 d.addCallback(lambda ign:
3938 self.rootnode.add_file(u"small",
3939 upload.Data("literal",
3941 d.addCallback(_stash_uri, "small")
3942 d.addCallback(lambda ign:
3943 self.rootnode.add_file(u"sick",
3944 upload.Data(DATA+"1",
3946 d.addCallback(_stash_uri, "sick")
3947 #d.addCallback(lambda ign:
3948 # self.rootnode.add_file(u"dead",
3949 # upload.Data(DATA+"2",
3951 #d.addCallback(_stash_uri, "dead")
3953 #d.addCallback(lambda ign: c0.create_mutable_file("mutable"))
3954 #d.addCallback(lambda fn: self.rootnode.set_node(u"corrupt", fn))
3955 #d.addCallback(_stash_uri, "corrupt")
3957 def _clobber_shares(ignored):
3958 good_shares = self.find_uri_shares(self.uris["good"])
3959 self.failUnlessReallyEqual(len(good_shares), 10)
3960 sick_shares = self.find_uri_shares(self.uris["sick"])
3961 os.unlink(sick_shares[0][2])
3962 #dead_shares = self.find_uri_shares(self.uris["dead"])
3963 #for i in range(1, 10):
3964 # os.unlink(dead_shares[i][2])
3966 #c_shares = self.find_uri_shares(self.uris["corrupt"])
3967 #cso = CorruptShareOptions()
3968 #cso.stdout = StringIO()
3969 #cso.parseOptions([c_shares[0][2]])
3971 d.addCallback(_clobber_shares)
3974 # root/good CHK, 10 shares
3976 # root/sick CHK, 9 shares
3978 d.addCallback(self.CHECK, "root", "t=stream-deep-check&repair=true")
3980 units = [simplejson.loads(line)
3981 for line in res.splitlines()
3983 self.failUnlessReallyEqual(len(units), 4+1)
3984 # should be parent-first
3986 self.failUnlessEqual(u0["path"], [])
3987 self.failUnlessEqual(u0["type"], "directory")
3988 self.failUnlessReallyEqual(to_str(u0["cap"]), self.rootnode.get_uri())
3989 u0crr = u0["check-and-repair-results"]
3990 self.failUnlessReallyEqual(u0crr["repair-attempted"], False)
3991 self.failUnlessReallyEqual(u0crr["pre-repair-results"]["results"]["count-shares-good"], 10)
3993 ugood = [u for u in units
3994 if u["type"] == "file" and u["path"] == [u"good"]][0]
3995 self.failUnlessEqual(to_str(ugood["cap"]), self.uris["good"])
3996 ugoodcrr = ugood["check-and-repair-results"]
3997 self.failUnlessReallyEqual(ugoodcrr["repair-attempted"], False)
3998 self.failUnlessReallyEqual(ugoodcrr["pre-repair-results"]["results"]["count-shares-good"], 10)
4000 usick = [u for u in units
4001 if u["type"] == "file" and u["path"] == [u"sick"]][0]
4002 self.failUnlessReallyEqual(to_str(usick["cap"]), self.uris["sick"])
4003 usickcrr = usick["check-and-repair-results"]
4004 self.failUnlessReallyEqual(usickcrr["repair-attempted"], True)
4005 self.failUnlessReallyEqual(usickcrr["repair-successful"], True)
4006 self.failUnlessReallyEqual(usickcrr["pre-repair-results"]["results"]["count-shares-good"], 9)
4007 self.failUnlessReallyEqual(usickcrr["post-repair-results"]["results"]["count-shares-good"], 10)
4010 self.failUnlessEqual(stats["type"], "stats")
4012 self.failUnlessReallyEqual(s["count-immutable-files"], 2)
4013 self.failUnlessReallyEqual(s["count-literal-files"], 1)
4014 self.failUnlessReallyEqual(s["count-directories"], 1)
4015 d.addCallback(_done)
4017 d.addErrback(self.explain_web_error)
4020 def _count_leases(self, ignored, which):
4021 u = self.uris[which]
4022 shares = self.find_uri_shares(u)
4024 for shnum, serverid, fn in shares:
4025 sf = get_share_file(fn)
4026 num_leases = len(list(sf.get_leases()))
4027 lease_counts.append( (fn, num_leases) )
4030 def _assert_leasecount(self, lease_counts, expected):
4031 for (fn, num_leases) in lease_counts:
4032 if num_leases != expected:
4033 self.fail("expected %d leases, have %d, on %s" %
4034 (expected, num_leases, fn))
4036 def test_add_lease(self):
4037 self.basedir = "web/Grid/add_lease"
4038 self.set_up_grid(num_clients=2)
4039 c0 = self.g.clients[0]
4042 d = c0.upload(upload.Data(DATA, convergence=""))
4043 def _stash_uri(ur, which):
4044 self.uris[which] = ur.uri
4045 d.addCallback(_stash_uri, "one")
4046 d.addCallback(lambda ign:
4047 c0.upload(upload.Data(DATA+"1", convergence="")))
4048 d.addCallback(_stash_uri, "two")
4049 def _stash_mutable_uri(n, which):
4050 self.uris[which] = n.get_uri()
4051 assert isinstance(self.uris[which], str)
4052 d.addCallback(lambda ign: c0.create_mutable_file(DATA+"2"))
4053 d.addCallback(_stash_mutable_uri, "mutable")
4055 def _compute_fileurls(ignored):
4057 for which in self.uris:
4058 self.fileurls[which] = "uri/" + urllib.quote(self.uris[which])
4059 d.addCallback(_compute_fileurls)
4061 d.addCallback(self._count_leases, "one")
4062 d.addCallback(self._assert_leasecount, 1)
4063 d.addCallback(self._count_leases, "two")
4064 d.addCallback(self._assert_leasecount, 1)
4065 d.addCallback(self._count_leases, "mutable")
4066 d.addCallback(self._assert_leasecount, 1)
4068 d.addCallback(self.CHECK, "one", "t=check") # no add-lease
4069 def _got_html_good(res):
4070 self.failUnless("Healthy" in res, res)
4071 self.failIf("Not Healthy" in res, res)
4072 d.addCallback(_got_html_good)
4074 d.addCallback(self._count_leases, "one")
4075 d.addCallback(self._assert_leasecount, 1)
4076 d.addCallback(self._count_leases, "two")
4077 d.addCallback(self._assert_leasecount, 1)
4078 d.addCallback(self._count_leases, "mutable")
4079 d.addCallback(self._assert_leasecount, 1)
4081 # this CHECK uses the original client, which uses the same
4082 # lease-secrets, so it will just renew the original lease
4083 d.addCallback(self.CHECK, "one", "t=check&add-lease=true")
4084 d.addCallback(_got_html_good)
4086 d.addCallback(self._count_leases, "one")
4087 d.addCallback(self._assert_leasecount, 1)
4088 d.addCallback(self._count_leases, "two")
4089 d.addCallback(self._assert_leasecount, 1)
4090 d.addCallback(self._count_leases, "mutable")
4091 d.addCallback(self._assert_leasecount, 1)
4093 # this CHECK uses an alternate client, which adds a second lease
4094 d.addCallback(self.CHECK, "one", "t=check&add-lease=true", clientnum=1)
4095 d.addCallback(_got_html_good)
4097 d.addCallback(self._count_leases, "one")
4098 d.addCallback(self._assert_leasecount, 2)
4099 d.addCallback(self._count_leases, "two")
4100 d.addCallback(self._assert_leasecount, 1)
4101 d.addCallback(self._count_leases, "mutable")
4102 d.addCallback(self._assert_leasecount, 1)
4104 d.addCallback(self.CHECK, "mutable", "t=check&add-lease=true")
4105 d.addCallback(_got_html_good)
4107 d.addCallback(self._count_leases, "one")
4108 d.addCallback(self._assert_leasecount, 2)
4109 d.addCallback(self._count_leases, "two")
4110 d.addCallback(self._assert_leasecount, 1)
4111 d.addCallback(self._count_leases, "mutable")
4112 d.addCallback(self._assert_leasecount, 1)
4114 d.addCallback(self.CHECK, "mutable", "t=check&add-lease=true",
4116 d.addCallback(_got_html_good)
4118 d.addCallback(self._count_leases, "one")
4119 d.addCallback(self._assert_leasecount, 2)
4120 d.addCallback(self._count_leases, "two")
4121 d.addCallback(self._assert_leasecount, 1)
4122 d.addCallback(self._count_leases, "mutable")
4123 d.addCallback(self._assert_leasecount, 2)
4125 d.addErrback(self.explain_web_error)
4128 def test_deep_add_lease(self):
4129 self.basedir = "web/Grid/deep_add_lease"
4130 self.set_up_grid(num_clients=2)
4131 c0 = self.g.clients[0]
4135 d = c0.create_dirnode()
4136 def _stash_root_and_create_file(n):
4138 self.uris["root"] = n.get_uri()
4139 self.fileurls["root"] = "uri/" + urllib.quote(n.get_uri()) + "/"
4140 return n.add_file(u"one", upload.Data(DATA, convergence=""))
4141 d.addCallback(_stash_root_and_create_file)
4142 def _stash_uri(fn, which):
4143 self.uris[which] = fn.get_uri()
4144 d.addCallback(_stash_uri, "one")
4145 d.addCallback(lambda ign:
4146 self.rootnode.add_file(u"small",
4147 upload.Data("literal",
4149 d.addCallback(_stash_uri, "small")
4151 d.addCallback(lambda ign: c0.create_mutable_file("mutable"))
4152 d.addCallback(lambda fn: self.rootnode.set_node(u"mutable", fn))
4153 d.addCallback(_stash_uri, "mutable")
4155 d.addCallback(self.CHECK, "root", "t=stream-deep-check") # no add-lease
4157 units = [simplejson.loads(line)
4158 for line in res.splitlines()
4160 # root, one, small, mutable, stats
4161 self.failUnlessReallyEqual(len(units), 4+1)
4162 d.addCallback(_done)
4164 d.addCallback(self._count_leases, "root")
4165 d.addCallback(self._assert_leasecount, 1)
4166 d.addCallback(self._count_leases, "one")
4167 d.addCallback(self._assert_leasecount, 1)
4168 d.addCallback(self._count_leases, "mutable")
4169 d.addCallback(self._assert_leasecount, 1)
4171 d.addCallback(self.CHECK, "root", "t=stream-deep-check&add-lease=true")
4172 d.addCallback(_done)
4174 d.addCallback(self._count_leases, "root")
4175 d.addCallback(self._assert_leasecount, 1)
4176 d.addCallback(self._count_leases, "one")
4177 d.addCallback(self._assert_leasecount, 1)
4178 d.addCallback(self._count_leases, "mutable")
4179 d.addCallback(self._assert_leasecount, 1)
4181 d.addCallback(self.CHECK, "root", "t=stream-deep-check&add-lease=true",
4183 d.addCallback(_done)
4185 d.addCallback(self._count_leases, "root")
4186 d.addCallback(self._assert_leasecount, 2)
4187 d.addCallback(self._count_leases, "one")
4188 d.addCallback(self._assert_leasecount, 2)
4189 d.addCallback(self._count_leases, "mutable")
4190 d.addCallback(self._assert_leasecount, 2)
4192 d.addErrback(self.explain_web_error)
4196 def test_exceptions(self):
4197 self.basedir = "web/Grid/exceptions"
4198 self.set_up_grid(num_clients=1, num_servers=2)
4199 c0 = self.g.clients[0]
4200 c0.DEFAULT_ENCODING_PARAMETERS['happy'] = 2
4203 d = c0.create_dirnode()
4205 self.fileurls["root"] = "uri/" + urllib.quote(n.get_uri()) + "/"
4206 self.fileurls["imaginary"] = self.fileurls["root"] + "imaginary"
4208 d.addCallback(_stash_root)
4209 d.addCallback(lambda ign: c0.upload(upload.Data(DATA, convergence="")))
4211 self.fileurls["1share"] = "uri/" + urllib.quote(ur.uri)
4212 self.delete_shares_numbered(ur.uri, range(1,10))
4214 u = uri.from_string(ur.uri)
4215 u.key = testutil.flip_bit(u.key, 0)
4216 baduri = u.to_string()
4217 self.fileurls["0shares"] = "uri/" + urllib.quote(baduri)
4218 d.addCallback(_stash_bad)
4219 d.addCallback(lambda ign: c0.create_dirnode())
4220 def _mangle_dirnode_1share(n):
4222 url = self.fileurls["dir-1share"] = "uri/" + urllib.quote(u) + "/"
4223 self.fileurls["dir-1share-json"] = url + "?t=json"
4224 self.delete_shares_numbered(u, range(1,10))
4225 d.addCallback(_mangle_dirnode_1share)
4226 d.addCallback(lambda ign: c0.create_dirnode())
4227 def _mangle_dirnode_0share(n):
4229 url = self.fileurls["dir-0share"] = "uri/" + urllib.quote(u) + "/"
4230 self.fileurls["dir-0share-json"] = url + "?t=json"
4231 self.delete_shares_numbered(u, range(0,10))
4232 d.addCallback(_mangle_dirnode_0share)
4234 # NotEnoughSharesError should be reported sensibly, with a
4235 # text/plain explanation of the problem, and perhaps some
4236 # information on which shares *could* be found.
4238 d.addCallback(lambda ignored:
4239 self.shouldHTTPError("GET unrecoverable",
4240 410, "Gone", "NoSharesError",
4241 self.GET, self.fileurls["0shares"]))
4242 def _check_zero_shares(body):
4243 self.failIf("<html>" in body, body)
4244 body = " ".join(body.strip().split())
4245 exp = ("NoSharesError: no shares could be found. "
4246 "Zero shares usually indicates a corrupt URI, or that "
4247 "no servers were connected, but it might also indicate "
4248 "severe corruption. You should perform a filecheck on "
4249 "this object to learn more. The full error message is: "
4250 "no shares (need 3). Last failure: None")
4251 self.failUnlessReallyEqual(exp, body)
4252 d.addCallback(_check_zero_shares)
4255 d.addCallback(lambda ignored:
4256 self.shouldHTTPError("GET 1share",
4257 410, "Gone", "NotEnoughSharesError",
4258 self.GET, self.fileurls["1share"]))
4259 def _check_one_share(body):
4260 self.failIf("<html>" in body, body)
4261 body = " ".join(body.strip().split())
4262 msgbase = ("NotEnoughSharesError: This indicates that some "
4263 "servers were unavailable, or that shares have been "
4264 "lost to server departure, hard drive failure, or disk "
4265 "corruption. You should perform a filecheck on "
4266 "this object to learn more. The full error message is:"
4268 msg1 = msgbase + (" ran out of shares:"
4271 " overdue= unused= need 3. Last failure: None")
4272 msg2 = msgbase + (" ran out of shares:"
4274 " pending=Share(sh0-on-xgru5)"
4275 " overdue= unused= need 3. Last failure: None")
4276 self.failUnless(body == msg1 or body == msg2, body)
4277 d.addCallback(_check_one_share)
4279 d.addCallback(lambda ignored:
4280 self.shouldHTTPError("GET imaginary",
4281 404, "Not Found", None,
4282 self.GET, self.fileurls["imaginary"]))
4283 def _missing_child(body):
4284 self.failUnless("No such child: imaginary" in body, body)
4285 d.addCallback(_missing_child)
4287 d.addCallback(lambda ignored: self.GET(self.fileurls["dir-0share"]))
4288 def _check_0shares_dir_html(body):
4289 self.failUnless("<html>" in body, body)
4290 # we should see the regular page, but without the child table or
4292 body = " ".join(body.strip().split())
4293 self.failUnlessIn('href="?t=info">More info on this directory',
4295 exp = ("UnrecoverableFileError: the directory (or mutable file) "
4296 "could not be retrieved, because there were insufficient "
4297 "good shares. This might indicate that no servers were "
4298 "connected, insufficient servers were connected, the URI "
4299 "was corrupt, or that shares have been lost due to server "
4300 "departure, hard drive failure, or disk corruption. You "
4301 "should perform a filecheck on this object to learn more.")
4302 self.failUnlessIn(exp, body)
4303 self.failUnlessIn("No upload forms: directory is unreadable", body)
4304 d.addCallback(_check_0shares_dir_html)
4306 d.addCallback(lambda ignored: self.GET(self.fileurls["dir-1share"]))
4307 def _check_1shares_dir_html(body):
4308 # at some point, we'll split UnrecoverableFileError into 0-shares
4309 # and some-shares like we did for immutable files (since there
4310 # are different sorts of advice to offer in each case). For now,
4311 # they present the same way.
4312 self.failUnless("<html>" in body, body)
4313 body = " ".join(body.strip().split())
4314 self.failUnlessIn('href="?t=info">More info on this directory',
4316 exp = ("UnrecoverableFileError: the directory (or mutable file) "
4317 "could not be retrieved, because there were insufficient "
4318 "good shares. This might indicate that no servers were "
4319 "connected, insufficient servers were connected, the URI "
4320 "was corrupt, or that shares have been lost due to server "
4321 "departure, hard drive failure, or disk corruption. You "
4322 "should perform a filecheck on this object to learn more.")
4323 self.failUnlessIn(exp, body)
4324 self.failUnlessIn("No upload forms: directory is unreadable", body)
4325 d.addCallback(_check_1shares_dir_html)
4327 d.addCallback(lambda ignored:
4328 self.shouldHTTPError("GET dir-0share-json",
4329 410, "Gone", "UnrecoverableFileError",
4331 self.fileurls["dir-0share-json"]))
4332 def _check_unrecoverable_file(body):
4333 self.failIf("<html>" in body, body)
4334 body = " ".join(body.strip().split())
4335 exp = ("UnrecoverableFileError: the directory (or mutable file) "
4336 "could not be retrieved, because there were insufficient "
4337 "good shares. This might indicate that no servers were "
4338 "connected, insufficient servers were connected, the URI "
4339 "was corrupt, or that shares have been lost due to server "
4340 "departure, hard drive failure, or disk corruption. You "
4341 "should perform a filecheck on this object to learn more.")
4342 self.failUnlessReallyEqual(exp, body)
4343 d.addCallback(_check_unrecoverable_file)
4345 d.addCallback(lambda ignored:
4346 self.shouldHTTPError("GET dir-1share-json",
4347 410, "Gone", "UnrecoverableFileError",
4349 self.fileurls["dir-1share-json"]))
4350 d.addCallback(_check_unrecoverable_file)
4352 d.addCallback(lambda ignored:
4353 self.shouldHTTPError("GET imaginary",
4354 404, "Not Found", None,
4355 self.GET, self.fileurls["imaginary"]))
4357 # attach a webapi child that throws a random error, to test how it
4359 w = c0.getServiceNamed("webish")
4360 w.root.putChild("ERRORBOOM", ErrorBoom())
4362 # "Accept: */*" : should get a text/html stack trace
4363 # "Accept: text/plain" : should get a text/plain stack trace
4364 # "Accept: text/plain, application/octet-stream" : text/plain (CLI)
4365 # no Accept header: should get a text/html stack trace
4367 d.addCallback(lambda ignored:
4368 self.shouldHTTPError("GET errorboom_html",
4369 500, "Internal Server Error", None,
4370 self.GET, "ERRORBOOM",
4371 headers={"accept": ["*/*"]}))
4372 def _internal_error_html1(body):
4373 self.failUnless("<html>" in body, "expected HTML, not '%s'" % body)
4374 d.addCallback(_internal_error_html1)
4376 d.addCallback(lambda ignored:
4377 self.shouldHTTPError("GET errorboom_text",
4378 500, "Internal Server Error", None,
4379 self.GET, "ERRORBOOM",
4380 headers={"accept": ["text/plain"]}))
4381 def _internal_error_text2(body):
4382 self.failIf("<html>" in body, body)
4383 self.failUnless(body.startswith("Traceback "), body)
4384 d.addCallback(_internal_error_text2)
4386 CLI_accepts = "text/plain, application/octet-stream"
4387 d.addCallback(lambda ignored:
4388 self.shouldHTTPError("GET errorboom_text",
4389 500, "Internal Server Error", None,
4390 self.GET, "ERRORBOOM",
4391 headers={"accept": [CLI_accepts]}))
4392 def _internal_error_text3(body):
4393 self.failIf("<html>" in body, body)
4394 self.failUnless(body.startswith("Traceback "), body)
4395 d.addCallback(_internal_error_text3)
4397 d.addCallback(lambda ignored:
4398 self.shouldHTTPError("GET errorboom_text",
4399 500, "Internal Server Error", None,
4400 self.GET, "ERRORBOOM"))
4401 def _internal_error_html4(body):
4402 self.failUnless("<html>" in body, "expected HTML, not '%s'" % body)
4403 d.addCallback(_internal_error_html4)
4405 def _flush_errors(res):
4406 # Trial: please ignore the CompletelyUnhandledError in the logs
4407 self.flushLoggedErrors(CompletelyUnhandledError)
4409 d.addBoth(_flush_errors)
4413 class CompletelyUnhandledError(Exception):
4415 class ErrorBoom(rend.Page):
4416 def beforeRender(self, ctx):
4417 raise CompletelyUnhandledError("whoops")