2 import os.path, re, urllib, time
4 from StringIO import StringIO
5 from twisted.application import service
6 from twisted.trial import unittest
7 from twisted.internet import defer, reactor
8 from twisted.internet.task import Clock
9 from twisted.web import client, error, http
10 from twisted.python import failure, log
11 from nevow import rend
12 from allmydata import interfaces, uri, webish, dirnode
13 from allmydata.storage.shares import get_share_file
14 from allmydata.storage_client import StorageFarmBroker
15 from allmydata.immutable import upload
16 from allmydata.immutable.downloader.status import DownloadStatus
17 from allmydata.dirnode import DirectoryNode
18 from allmydata.nodemaker import NodeMaker
19 from allmydata.unknown import UnknownNode
20 from allmydata.web import status, common
21 from allmydata.scripts.debug import CorruptShareOptions, corrupt_share
22 from allmydata.util import fileutil, base32, hashutil
23 from allmydata.util.consumer import download_to_data
24 from allmydata.util.netstring import split_netstring
25 from allmydata.util.encodingutil import to_str
26 from allmydata.test.common import FakeCHKFileNode, FakeMutableFileNode, \
27 create_chk_filenode, WebErrorMixin, ShouldFailMixin, make_mutable_file_uri
28 from allmydata.interfaces import IMutableFileNode
29 from allmydata.mutable import servermap, publish, retrieve
30 import allmydata.test.common_util as testutil
31 from allmydata.test.no_network import GridTestMixin
32 from allmydata.test.common_web import HTTPClientGETFactory, \
34 from allmydata.client import Client, SecretHolder
36 # create a fake uploader/downloader, and a couple of fake dirnodes, then
37 # create a webserver that works against them
39 timeout = 480 # Most of these take longer than 240 seconds on Francois's arm box.
41 unknown_rwcap = u"lafs://from_the_future_rw_\u263A".encode('utf-8')
42 unknown_rocap = u"ro.lafs://readonly_from_the_future_ro_\u263A".encode('utf-8')
43 unknown_immcap = u"imm.lafs://immutable_from_the_future_imm_\u263A".encode('utf-8')
45 class FakeStatsProvider:
47 stats = {'stats': {}, 'counters': {}}
50 class FakeNodeMaker(NodeMaker):
51 def _create_lit(self, cap):
52 return FakeCHKFileNode(cap)
53 def _create_immutable(self, cap):
54 return FakeCHKFileNode(cap)
55 def _create_mutable(self, cap):
56 return FakeMutableFileNode(None, None, None, None).init_from_cap(cap)
57 def create_mutable_file(self, contents="", keysize=None):
58 n = FakeMutableFileNode(None, None, None, None)
59 return n.create(contents)
61 class FakeUploader(service.Service):
63 def upload(self, uploadable, history=None):
64 d = uploadable.get_size()
65 d.addCallback(lambda size: uploadable.read(size))
68 n = create_chk_filenode(data)
69 results = upload.UploadResults()
70 results.uri = n.get_uri()
72 d.addCallback(_got_data)
74 def get_helper_info(self):
78 ds = DownloadStatus("storage_index", 1234)
81 serverid_a = hashutil.tagged_hash("foo", "serverid_a")[:20]
82 serverid_b = hashutil.tagged_hash("foo", "serverid_b")[:20]
83 storage_index = hashutil.storage_index_hash("SI")
84 e0 = ds.add_segment_request(0, now)
86 e0.deliver(now+1, 0, 100, 0.5) # when, start,len, decodetime
87 e1 = ds.add_segment_request(1, now+2)
89 # two outstanding requests
90 e2 = ds.add_segment_request(2, now+4)
91 e3 = ds.add_segment_request(3, now+5)
92 del e2,e3 # hush pyflakes
94 # simulate a segment which gets delivered faster than a system clock tick (ticket #1166)
95 e = ds.add_segment_request(4, now)
97 e.deliver(now, 0, 140, 0.5)
99 e = ds.add_dyhb_request(serverid_a, now)
100 e.finished([1,2], now+1)
101 e = ds.add_dyhb_request(serverid_b, now+2) # left unfinished
103 e = ds.add_read_event(0, 120, now)
104 e.update(60, 0.5, 0.1) # bytes, decrypttime, pausetime
106 e = ds.add_read_event(120, 30, now+2) # left unfinished
108 e = ds.add_block_request(serverid_a, 1, 100, 20, now)
109 e.finished(20, now+1)
110 e = ds.add_block_request(serverid_a, 1, 120, 30, now+1) # left unfinished
112 # make sure that add_read_event() can come first too
113 ds1 = DownloadStatus(storage_index, 1234)
114 e = ds1.add_read_event(0, 120, now)
115 e.update(60, 0.5, 0.1) # bytes, decrypttime, pausetime
121 _all_upload_status = [upload.UploadStatus()]
122 _all_download_status = [build_one_ds()]
123 _all_mapupdate_statuses = [servermap.UpdateStatus()]
124 _all_publish_statuses = [publish.PublishStatus()]
125 _all_retrieve_statuses = [retrieve.RetrieveStatus()]
127 def list_all_upload_statuses(self):
128 return self._all_upload_status
129 def list_all_download_statuses(self):
130 return self._all_download_status
131 def list_all_mapupdate_statuses(self):
132 return self._all_mapupdate_statuses
133 def list_all_publish_statuses(self):
134 return self._all_publish_statuses
135 def list_all_retrieve_statuses(self):
136 return self._all_retrieve_statuses
137 def list_all_helper_statuses(self):
140 class FakeClient(Client):
142 # don't upcall to Client.__init__, since we only want to initialize a
144 service.MultiService.__init__(self)
145 self.nodeid = "fake_nodeid"
146 self.nickname = "fake_nickname"
147 self.introducer_furl = "None"
148 self.stats_provider = FakeStatsProvider()
149 self._secret_holder = SecretHolder("lease secret", "convergence secret")
151 self.convergence = "some random string"
152 self.storage_broker = StorageFarmBroker(None, permute_peers=True)
153 self.introducer_client = None
154 self.history = FakeHistory()
155 self.uploader = FakeUploader()
156 self.uploader.setServiceParent(self)
157 self.nodemaker = FakeNodeMaker(None, self._secret_holder, None,
161 def startService(self):
162 return service.MultiService.startService(self)
163 def stopService(self):
164 return service.MultiService.stopService(self)
166 MUTABLE_SIZELIMIT = FakeMutableFileNode.MUTABLE_SIZELIMIT
168 class WebMixin(object):
170 self.s = FakeClient()
171 self.s.startService()
172 self.staticdir = self.mktemp()
174 self.ws = webish.WebishServer(self.s, "0", staticdir=self.staticdir,
176 self.ws.setServiceParent(self.s)
177 self.webish_port = self.ws.getPortnum()
178 self.webish_url = self.ws.getURL()
179 assert self.webish_url.endswith("/")
180 self.webish_url = self.webish_url[:-1] # these tests add their own /
182 l = [ self.s.create_dirnode() for x in range(6) ]
183 d = defer.DeferredList(l)
185 self.public_root = res[0][1]
186 assert interfaces.IDirectoryNode.providedBy(self.public_root), res
187 self.public_url = "/uri/" + self.public_root.get_uri()
188 self.private_root = res[1][1]
192 self._foo_uri = foo.get_uri()
193 self._foo_readonly_uri = foo.get_readonly_uri()
194 self._foo_verifycap = foo.get_verify_cap().to_string()
195 # NOTE: we ignore the deferred on all set_uri() calls, because we
196 # know the fake nodes do these synchronously
197 self.public_root.set_uri(u"foo", foo.get_uri(),
198 foo.get_readonly_uri())
200 self.BAR_CONTENTS, n, self._bar_txt_uri = self.makefile(0)
201 foo.set_uri(u"bar.txt", self._bar_txt_uri, self._bar_txt_uri)
202 self._bar_txt_verifycap = n.get_verify_cap().to_string()
204 foo.set_uri(u"empty", res[3][1].get_uri(),
205 res[3][1].get_readonly_uri())
206 sub_uri = res[4][1].get_uri()
207 self._sub_uri = sub_uri
208 foo.set_uri(u"sub", sub_uri, sub_uri)
209 sub = self.s.create_node_from_uri(sub_uri)
211 _ign, n, blocking_uri = self.makefile(1)
212 foo.set_uri(u"blockingfile", blocking_uri, blocking_uri)
214 unicode_filename = u"n\u00fc.txt" # n u-umlaut . t x t
215 # ok, unicode calls it LATIN SMALL LETTER U WITH DIAERESIS but I
216 # still think of it as an umlaut
217 foo.set_uri(unicode_filename, self._bar_txt_uri, self._bar_txt_uri)
219 _ign, n, baz_file = self.makefile(2)
220 self._baz_file_uri = baz_file
221 sub.set_uri(u"baz.txt", baz_file, baz_file)
223 _ign, n, self._bad_file_uri = self.makefile(3)
224 # this uri should not be downloadable
225 del FakeCHKFileNode.all_contents[self._bad_file_uri]
228 self.public_root.set_uri(u"reedownlee", rodir.get_readonly_uri(),
229 rodir.get_readonly_uri())
230 rodir.set_uri(u"nor", baz_file, baz_file)
235 # public/foo/blockingfile
238 # public/foo/sub/baz.txt
240 # public/reedownlee/nor
241 self.NEWFILE_CONTENTS = "newfile contents\n"
243 return foo.get_metadata_for(u"bar.txt")
245 def _got_metadata(metadata):
246 self._bar_txt_metadata = metadata
247 d.addCallback(_got_metadata)
250 def makefile(self, number):
251 contents = "contents of file %s\n" % number
252 n = create_chk_filenode(contents)
253 return contents, n, n.get_uri()
256 return self.s.stopService()
258 def failUnlessIsBarDotTxt(self, res):
259 self.failUnlessReallyEqual(res, self.BAR_CONTENTS, res)
261 def failUnlessIsBarJSON(self, res):
262 data = simplejson.loads(res)
263 self.failUnless(isinstance(data, list))
264 self.failUnlessEqual(data[0], "filenode")
265 self.failUnless(isinstance(data[1], dict))
266 self.failIf(data[1]["mutable"])
267 self.failIf("rw_uri" in data[1]) # immutable
268 self.failUnlessReallyEqual(to_str(data[1]["ro_uri"]), self._bar_txt_uri)
269 self.failUnlessReallyEqual(to_str(data[1]["verify_uri"]), self._bar_txt_verifycap)
270 self.failUnlessReallyEqual(data[1]["size"], len(self.BAR_CONTENTS))
272 def failUnlessIsFooJSON(self, res):
273 data = simplejson.loads(res)
274 self.failUnless(isinstance(data, list))
275 self.failUnlessEqual(data[0], "dirnode", res)
276 self.failUnless(isinstance(data[1], dict))
277 self.failUnless(data[1]["mutable"])
278 self.failUnless("rw_uri" in data[1]) # mutable
279 self.failUnlessReallyEqual(to_str(data[1]["rw_uri"]), self._foo_uri)
280 self.failUnlessReallyEqual(to_str(data[1]["ro_uri"]), self._foo_readonly_uri)
281 self.failUnlessReallyEqual(to_str(data[1]["verify_uri"]), self._foo_verifycap)
283 kidnames = sorted([unicode(n) for n in data[1]["children"]])
284 self.failUnlessEqual(kidnames,
285 [u"bar.txt", u"blockingfile", u"empty",
286 u"n\u00fc.txt", u"sub"])
287 kids = dict( [(unicode(name),value)
289 in data[1]["children"].iteritems()] )
290 self.failUnlessEqual(kids[u"sub"][0], "dirnode")
291 self.failUnlessIn("metadata", kids[u"sub"][1])
292 self.failUnlessIn("tahoe", kids[u"sub"][1]["metadata"])
293 tahoe_md = kids[u"sub"][1]["metadata"]["tahoe"]
294 self.failUnlessIn("linkcrtime", tahoe_md)
295 self.failUnlessIn("linkmotime", tahoe_md)
296 self.failUnlessEqual(kids[u"bar.txt"][0], "filenode")
297 self.failUnlessReallyEqual(kids[u"bar.txt"][1]["size"], len(self.BAR_CONTENTS))
298 self.failUnlessReallyEqual(to_str(kids[u"bar.txt"][1]["ro_uri"]), self._bar_txt_uri)
299 self.failUnlessReallyEqual(to_str(kids[u"bar.txt"][1]["verify_uri"]),
300 self._bar_txt_verifycap)
301 self.failUnlessIn("metadata", kids[u"bar.txt"][1])
302 self.failUnlessIn("tahoe", kids[u"bar.txt"][1]["metadata"])
303 self.failUnlessReallyEqual(kids[u"bar.txt"][1]["metadata"]["tahoe"]["linkcrtime"],
304 self._bar_txt_metadata["tahoe"]["linkcrtime"])
305 self.failUnlessReallyEqual(to_str(kids[u"n\u00fc.txt"][1]["ro_uri"]),
308 def GET(self, urlpath, followRedirect=False, return_response=False,
310 # if return_response=True, this fires with (data, statuscode,
311 # respheaders) instead of just data.
312 assert not isinstance(urlpath, unicode)
313 url = self.webish_url + urlpath
314 factory = HTTPClientGETFactory(url, method="GET",
315 followRedirect=followRedirect, **kwargs)
316 reactor.connectTCP("localhost", self.webish_port, factory)
319 return (data, factory.status, factory.response_headers)
321 d.addCallback(_got_data)
322 return factory.deferred
324 def HEAD(self, urlpath, return_response=False, **kwargs):
325 # this requires some surgery, because twisted.web.client doesn't want
326 # to give us back the response headers.
327 factory = HTTPClientHEADFactory(urlpath, method="HEAD", **kwargs)
328 reactor.connectTCP("localhost", self.webish_port, factory)
331 return (data, factory.status, factory.response_headers)
333 d.addCallback(_got_data)
334 return factory.deferred
336 def PUT(self, urlpath, data, **kwargs):
337 url = self.webish_url + urlpath
338 return client.getPage(url, method="PUT", postdata=data, **kwargs)
340 def DELETE(self, urlpath):
341 url = self.webish_url + urlpath
342 return client.getPage(url, method="DELETE")
344 def POST(self, urlpath, followRedirect=False, **fields):
345 sepbase = "boogabooga"
349 form.append('Content-Disposition: form-data; name="_charset"')
353 for name, value in fields.iteritems():
354 if isinstance(value, tuple):
355 filename, value = value
356 form.append('Content-Disposition: form-data; name="%s"; '
357 'filename="%s"' % (name, filename.encode("utf-8")))
359 form.append('Content-Disposition: form-data; name="%s"' % name)
361 if isinstance(value, unicode):
362 value = value.encode("utf-8")
365 assert isinstance(value, str)
372 body = "\r\n".join(form) + "\r\n"
373 headers["content-type"] = "multipart/form-data; boundary=%s" % sepbase
374 return self.POST2(urlpath, body, headers, followRedirect)
376 def POST2(self, urlpath, body="", headers={}, followRedirect=False):
377 url = self.webish_url + urlpath
378 return client.getPage(url, method="POST", postdata=body,
379 headers=headers, followRedirect=followRedirect)
381 def shouldFail(self, res, expected_failure, which,
382 substring=None, response_substring=None):
383 if isinstance(res, failure.Failure):
384 res.trap(expected_failure)
386 self.failUnless(substring in str(res),
387 "substring '%s' not in '%s'"
388 % (substring, str(res)))
389 if response_substring:
390 self.failUnless(response_substring in res.value.response,
391 "response substring '%s' not in '%s'"
392 % (response_substring, res.value.response))
394 self.fail("%s was supposed to raise %s, not get '%s'" %
395 (which, expected_failure, res))
397 def shouldFail2(self, expected_failure, which, substring,
399 callable, *args, **kwargs):
400 assert substring is None or isinstance(substring, str)
401 assert response_substring is None or isinstance(response_substring, str)
402 d = defer.maybeDeferred(callable, *args, **kwargs)
404 if isinstance(res, failure.Failure):
405 res.trap(expected_failure)
407 self.failUnless(substring in str(res),
408 "%s: substring '%s' not in '%s'"
409 % (which, substring, str(res)))
410 if response_substring:
411 self.failUnless(response_substring in res.value.response,
412 "%s: response substring '%s' not in '%s'"
414 response_substring, res.value.response))
416 self.fail("%s was supposed to raise %s, not get '%s'" %
417 (which, expected_failure, res))
421 def should404(self, res, which):
422 if isinstance(res, failure.Failure):
423 res.trap(error.Error)
424 self.failUnlessReallyEqual(res.value.status, "404")
426 self.fail("%s was supposed to Error(404), not get '%s'" %
429 def should302(self, res, which):
430 if isinstance(res, failure.Failure):
431 res.trap(error.Error)
432 self.failUnlessReallyEqual(res.value.status, "302")
434 self.fail("%s was supposed to Error(302), not get '%s'" %
438 class Web(WebMixin, WebErrorMixin, testutil.StallMixin, testutil.ReallyEqualMixin, unittest.TestCase):
439 def test_create(self):
442 def test_welcome(self):
445 self.failUnless('Welcome To Tahoe-LAFS' in res, res)
447 self.s.basedir = 'web/test_welcome'
448 fileutil.make_dirs("web/test_welcome")
449 fileutil.make_dirs("web/test_welcome/private")
451 d.addCallback(_check)
454 def test_provisioning(self):
455 d = self.GET("/provisioning/")
457 self.failUnless('Provisioning Tool' in res)
458 fields = {'filled': True,
459 "num_users": int(50e3),
460 "files_per_user": 1000,
461 "space_per_user": int(1e9),
462 "sharing_ratio": 1.0,
463 "encoding_parameters": "3-of-10-5",
465 "ownership_mode": "A",
466 "download_rate": 100,
471 return self.POST("/provisioning/", **fields)
473 d.addCallback(_check)
475 self.failUnless('Provisioning Tool' in res)
476 self.failUnless("Share space consumed: 167.01TB" in res)
478 fields = {'filled': True,
479 "num_users": int(50e6),
480 "files_per_user": 1000,
481 "space_per_user": int(5e9),
482 "sharing_ratio": 1.0,
483 "encoding_parameters": "25-of-100-50",
484 "num_servers": 30000,
485 "ownership_mode": "E",
486 "drive_failure_model": "U",
488 "download_rate": 1000,
493 return self.POST("/provisioning/", **fields)
494 d.addCallback(_check2)
496 self.failUnless("Share space consumed: huge!" in res)
497 fields = {'filled': True}
498 return self.POST("/provisioning/", **fields)
499 d.addCallback(_check3)
501 self.failUnless("Share space consumed:" in res)
502 d.addCallback(_check4)
505 def test_reliability_tool(self):
507 from allmydata import reliability
508 _hush_pyflakes = reliability
511 raise unittest.SkipTest("reliability tool requires NumPy")
513 d = self.GET("/reliability/")
515 self.failUnless('Reliability Tool' in res)
516 fields = {'drive_lifetime': "8Y",
521 "check_period": "1M",
522 "report_period": "3M",
525 return self.POST("/reliability/", **fields)
527 d.addCallback(_check)
529 self.failUnless('Reliability Tool' in res)
530 r = r'Probability of loss \(no maintenance\):\s+<span>0.033591'
531 self.failUnless(re.search(r, res), res)
532 d.addCallback(_check2)
535 def test_status(self):
536 h = self.s.get_history()
537 dl_num = h.list_all_download_statuses()[0].get_counter()
538 ul_num = h.list_all_upload_statuses()[0].get_counter()
539 mu_num = h.list_all_mapupdate_statuses()[0].get_counter()
540 pub_num = h.list_all_publish_statuses()[0].get_counter()
541 ret_num = h.list_all_retrieve_statuses()[0].get_counter()
542 d = self.GET("/status", followRedirect=True)
544 self.failUnless('Upload and Download Status' in res, res)
545 self.failUnless('"down-%d"' % dl_num in res, res)
546 self.failUnless('"up-%d"' % ul_num in res, res)
547 self.failUnless('"mapupdate-%d"' % mu_num in res, res)
548 self.failUnless('"publish-%d"' % pub_num in res, res)
549 self.failUnless('"retrieve-%d"' % ret_num in res, res)
550 d.addCallback(_check)
551 d.addCallback(lambda res: self.GET("/status/?t=json"))
552 def _check_json(res):
553 data = simplejson.loads(res)
554 self.failUnless(isinstance(data, dict))
555 #active = data["active"]
556 # TODO: test more. We need a way to fake an active operation
558 d.addCallback(_check_json)
560 d.addCallback(lambda res: self.GET("/status/down-%d" % dl_num))
562 self.failUnless("File Download Status" in res, res)
563 d.addCallback(_check_dl)
564 d.addCallback(lambda res: self.GET("/status/down-%d/event_json" % dl_num))
565 def _check_dl_json(res):
566 data = simplejson.loads(res)
567 self.failUnless(isinstance(data, dict))
568 self.failUnless("read" in data)
569 self.failUnlessEqual(data["read"][0]["length"], 120)
570 self.failUnlessEqual(data["segment"][0]["segment_length"], 100)
571 self.failUnlessEqual(data["segment"][2]["segment_number"], 2)
572 self.failUnlessEqual(data["segment"][2]["finish_time"], None)
573 phwr_id = base32.b2a(hashutil.tagged_hash("foo", "serverid_a")[:20])
574 cmpu_id = base32.b2a(hashutil.tagged_hash("foo", "serverid_b")[:20])
575 # serverids[] keys are strings, since that's what JSON does, but
576 # we'd really like them to be ints
577 self.failUnlessEqual(data["serverids"]["0"], "phwr")
578 self.failUnlessEqual(data["serverids"]["1"], "cmpu")
579 self.failUnlessEqual(data["server_info"][phwr_id]["short"], "phwr")
580 self.failUnlessEqual(data["server_info"][cmpu_id]["short"], "cmpu")
581 self.failUnless("dyhb" in data)
582 d.addCallback(_check_dl_json)
583 d.addCallback(lambda res: self.GET("/status/up-%d" % ul_num))
585 self.failUnless("File Upload Status" in res, res)
586 d.addCallback(_check_ul)
587 d.addCallback(lambda res: self.GET("/status/mapupdate-%d" % mu_num))
588 def _check_mapupdate(res):
589 self.failUnless("Mutable File Servermap Update Status" in res, res)
590 d.addCallback(_check_mapupdate)
591 d.addCallback(lambda res: self.GET("/status/publish-%d" % pub_num))
592 def _check_publish(res):
593 self.failUnless("Mutable File Publish Status" in res, res)
594 d.addCallback(_check_publish)
595 d.addCallback(lambda res: self.GET("/status/retrieve-%d" % ret_num))
596 def _check_retrieve(res):
597 self.failUnless("Mutable File Retrieve Status" in res, res)
598 d.addCallback(_check_retrieve)
602 def test_status_numbers(self):
603 drrm = status.DownloadResultsRendererMixin()
604 self.failUnlessReallyEqual(drrm.render_time(None, None), "")
605 self.failUnlessReallyEqual(drrm.render_time(None, 2.5), "2.50s")
606 self.failUnlessReallyEqual(drrm.render_time(None, 0.25), "250ms")
607 self.failUnlessReallyEqual(drrm.render_time(None, 0.0021), "2.1ms")
608 self.failUnlessReallyEqual(drrm.render_time(None, 0.000123), "123us")
609 self.failUnlessReallyEqual(drrm.render_rate(None, None), "")
610 self.failUnlessReallyEqual(drrm.render_rate(None, 2500000), "2.50MBps")
611 self.failUnlessReallyEqual(drrm.render_rate(None, 30100), "30.1kBps")
612 self.failUnlessReallyEqual(drrm.render_rate(None, 123), "123Bps")
614 urrm = status.UploadResultsRendererMixin()
615 self.failUnlessReallyEqual(urrm.render_time(None, None), "")
616 self.failUnlessReallyEqual(urrm.render_time(None, 2.5), "2.50s")
617 self.failUnlessReallyEqual(urrm.render_time(None, 0.25), "250ms")
618 self.failUnlessReallyEqual(urrm.render_time(None, 0.0021), "2.1ms")
619 self.failUnlessReallyEqual(urrm.render_time(None, 0.000123), "123us")
620 self.failUnlessReallyEqual(urrm.render_rate(None, None), "")
621 self.failUnlessReallyEqual(urrm.render_rate(None, 2500000), "2.50MBps")
622 self.failUnlessReallyEqual(urrm.render_rate(None, 30100), "30.1kBps")
623 self.failUnlessReallyEqual(urrm.render_rate(None, 123), "123Bps")
625 def test_GET_FILEURL(self):
626 d = self.GET(self.public_url + "/foo/bar.txt")
627 d.addCallback(self.failUnlessIsBarDotTxt)
630 def test_GET_FILEURL_range(self):
631 headers = {"range": "bytes=1-10"}
632 d = self.GET(self.public_url + "/foo/bar.txt", headers=headers,
633 return_response=True)
634 def _got((res, status, headers)):
635 self.failUnlessReallyEqual(int(status), 206)
636 self.failUnless(headers.has_key("content-range"))
637 self.failUnlessReallyEqual(headers["content-range"][0],
638 "bytes 1-10/%d" % len(self.BAR_CONTENTS))
639 self.failUnlessReallyEqual(res, self.BAR_CONTENTS[1:11])
643 def test_GET_FILEURL_partial_range(self):
644 headers = {"range": "bytes=5-"}
645 length = len(self.BAR_CONTENTS)
646 d = self.GET(self.public_url + "/foo/bar.txt", headers=headers,
647 return_response=True)
648 def _got((res, status, headers)):
649 self.failUnlessReallyEqual(int(status), 206)
650 self.failUnless(headers.has_key("content-range"))
651 self.failUnlessReallyEqual(headers["content-range"][0],
652 "bytes 5-%d/%d" % (length-1, length))
653 self.failUnlessReallyEqual(res, self.BAR_CONTENTS[5:])
657 def test_GET_FILEURL_partial_end_range(self):
658 headers = {"range": "bytes=-5"}
659 length = len(self.BAR_CONTENTS)
660 d = self.GET(self.public_url + "/foo/bar.txt", headers=headers,
661 return_response=True)
662 def _got((res, status, headers)):
663 self.failUnlessReallyEqual(int(status), 206)
664 self.failUnless(headers.has_key("content-range"))
665 self.failUnlessReallyEqual(headers["content-range"][0],
666 "bytes %d-%d/%d" % (length-5, length-1, length))
667 self.failUnlessReallyEqual(res, self.BAR_CONTENTS[-5:])
671 def test_GET_FILEURL_partial_range_overrun(self):
672 headers = {"range": "bytes=100-200"}
673 d = self.shouldFail2(error.Error, "test_GET_FILEURL_range_overrun",
674 "416 Requested Range not satisfiable",
675 "First beyond end of file",
676 self.GET, self.public_url + "/foo/bar.txt",
680 def test_HEAD_FILEURL_range(self):
681 headers = {"range": "bytes=1-10"}
682 d = self.HEAD(self.public_url + "/foo/bar.txt", headers=headers,
683 return_response=True)
684 def _got((res, status, headers)):
685 self.failUnlessReallyEqual(res, "")
686 self.failUnlessReallyEqual(int(status), 206)
687 self.failUnless(headers.has_key("content-range"))
688 self.failUnlessReallyEqual(headers["content-range"][0],
689 "bytes 1-10/%d" % len(self.BAR_CONTENTS))
693 def test_HEAD_FILEURL_partial_range(self):
694 headers = {"range": "bytes=5-"}
695 length = len(self.BAR_CONTENTS)
696 d = self.HEAD(self.public_url + "/foo/bar.txt", headers=headers,
697 return_response=True)
698 def _got((res, status, headers)):
699 self.failUnlessReallyEqual(int(status), 206)
700 self.failUnless(headers.has_key("content-range"))
701 self.failUnlessReallyEqual(headers["content-range"][0],
702 "bytes 5-%d/%d" % (length-1, length))
706 def test_HEAD_FILEURL_partial_end_range(self):
707 headers = {"range": "bytes=-5"}
708 length = len(self.BAR_CONTENTS)
709 d = self.HEAD(self.public_url + "/foo/bar.txt", headers=headers,
710 return_response=True)
711 def _got((res, status, headers)):
712 self.failUnlessReallyEqual(int(status), 206)
713 self.failUnless(headers.has_key("content-range"))
714 self.failUnlessReallyEqual(headers["content-range"][0],
715 "bytes %d-%d/%d" % (length-5, length-1, length))
719 def test_HEAD_FILEURL_partial_range_overrun(self):
720 headers = {"range": "bytes=100-200"}
721 d = self.shouldFail2(error.Error, "test_HEAD_FILEURL_range_overrun",
722 "416 Requested Range not satisfiable",
724 self.HEAD, self.public_url + "/foo/bar.txt",
728 def test_GET_FILEURL_range_bad(self):
729 headers = {"range": "BOGUS=fizbop-quarnak"}
730 d = self.GET(self.public_url + "/foo/bar.txt", headers=headers,
731 return_response=True)
732 def _got((res, status, headers)):
733 self.failUnlessReallyEqual(int(status), 200)
734 self.failUnless(not headers.has_key("content-range"))
735 self.failUnlessReallyEqual(res, self.BAR_CONTENTS)
739 def test_HEAD_FILEURL(self):
740 d = self.HEAD(self.public_url + "/foo/bar.txt", return_response=True)
741 def _got((res, status, headers)):
742 self.failUnlessReallyEqual(res, "")
743 self.failUnlessReallyEqual(headers["content-length"][0],
744 str(len(self.BAR_CONTENTS)))
745 self.failUnlessReallyEqual(headers["content-type"], ["text/plain"])
749 def test_GET_FILEURL_named(self):
750 base = "/file/%s" % urllib.quote(self._bar_txt_uri)
751 base2 = "/named/%s" % urllib.quote(self._bar_txt_uri)
752 d = self.GET(base + "/@@name=/blah.txt")
753 d.addCallback(self.failUnlessIsBarDotTxt)
754 d.addCallback(lambda res: self.GET(base + "/blah.txt"))
755 d.addCallback(self.failUnlessIsBarDotTxt)
756 d.addCallback(lambda res: self.GET(base + "/ignore/lots/blah.txt"))
757 d.addCallback(self.failUnlessIsBarDotTxt)
758 d.addCallback(lambda res: self.GET(base2 + "/@@name=/blah.txt"))
759 d.addCallback(self.failUnlessIsBarDotTxt)
760 save_url = base + "?save=true&filename=blah.txt"
761 d.addCallback(lambda res: self.GET(save_url))
762 d.addCallback(self.failUnlessIsBarDotTxt) # TODO: check headers
763 u_filename = u"n\u00e9wer.txt" # n e-acute w e r . t x t
764 u_fn_e = urllib.quote(u_filename.encode("utf-8"))
765 u_url = base + "?save=true&filename=" + u_fn_e
766 d.addCallback(lambda res: self.GET(u_url))
767 d.addCallback(self.failUnlessIsBarDotTxt) # TODO: check headers
770 def test_PUT_FILEURL_named_bad(self):
771 base = "/file/%s" % urllib.quote(self._bar_txt_uri)
772 d = self.shouldFail2(error.Error, "test_PUT_FILEURL_named_bad",
774 "/file can only be used with GET or HEAD",
775 self.PUT, base + "/@@name=/blah.txt", "")
778 def test_GET_DIRURL_named_bad(self):
779 base = "/file/%s" % urllib.quote(self._foo_uri)
780 d = self.shouldFail2(error.Error, "test_PUT_DIRURL_named_bad",
783 self.GET, base + "/@@name=/blah.txt")
786 def test_GET_slash_file_bad(self):
787 d = self.shouldFail2(error.Error, "test_GET_slash_file_bad",
789 "/file must be followed by a file-cap and a name",
793 def test_GET_unhandled_URI_named(self):
794 contents, n, newuri = self.makefile(12)
795 verifier_cap = n.get_verify_cap().to_string()
796 base = "/file/%s" % urllib.quote(verifier_cap)
797 # client.create_node_from_uri() can't handle verify-caps
798 d = self.shouldFail2(error.Error, "GET_unhandled_URI_named",
799 "400 Bad Request", "is not a file-cap",
803 def test_GET_unhandled_URI(self):
804 contents, n, newuri = self.makefile(12)
805 verifier_cap = n.get_verify_cap().to_string()
806 base = "/uri/%s" % urllib.quote(verifier_cap)
807 # client.create_node_from_uri() can't handle verify-caps
808 d = self.shouldFail2(error.Error, "test_GET_unhandled_URI",
810 "GET unknown URI type: can only do t=info",
814 def test_GET_FILE_URI(self):
815 base = "/uri/%s" % urllib.quote(self._bar_txt_uri)
817 d.addCallback(self.failUnlessIsBarDotTxt)
820 def test_GET_FILE_URI_badchild(self):
821 base = "/uri/%s/boguschild" % urllib.quote(self._bar_txt_uri)
822 errmsg = "Files have no children, certainly not named 'boguschild'"
823 d = self.shouldFail2(error.Error, "test_GET_FILE_URI_badchild",
824 "400 Bad Request", errmsg,
828 def test_PUT_FILE_URI_badchild(self):
829 base = "/uri/%s/boguschild" % urllib.quote(self._bar_txt_uri)
830 errmsg = "Cannot create directory 'boguschild', because its parent is a file, not a directory"
831 d = self.shouldFail2(error.Error, "test_GET_FILE_URI_badchild",
832 "400 Bad Request", errmsg,
836 # TODO: version of this with a Unicode filename
837 def test_GET_FILEURL_save(self):
838 d = self.GET(self.public_url + "/foo/bar.txt?filename=bar.txt&save=true",
839 return_response=True)
840 def _got((res, statuscode, headers)):
841 content_disposition = headers["content-disposition"][0]
842 self.failUnless(content_disposition == 'attachment; filename="bar.txt"', content_disposition)
843 self.failUnlessIsBarDotTxt(res)
847 def test_GET_FILEURL_missing(self):
848 d = self.GET(self.public_url + "/foo/missing")
849 d.addBoth(self.should404, "test_GET_FILEURL_missing")
852 def test_PUT_overwrite_only_files(self):
853 # create a directory, put a file in that directory.
854 contents, n, filecap = self.makefile(8)
855 d = self.PUT(self.public_url + "/foo/dir?t=mkdir", "")
856 d.addCallback(lambda res:
857 self.PUT(self.public_url + "/foo/dir/file1.txt",
858 self.NEWFILE_CONTENTS))
859 # try to overwrite the file with replace=only-files
861 d.addCallback(lambda res:
862 self.PUT(self.public_url + "/foo/dir/file1.txt?t=uri&replace=only-files",
864 d.addCallback(lambda res:
865 self.shouldFail2(error.Error, "PUT_bad_t", "409 Conflict",
866 "There was already a child by that name, and you asked me "
868 self.PUT, self.public_url + "/foo/dir?t=uri&replace=only-files",
872 def test_PUT_NEWFILEURL(self):
873 d = self.PUT(self.public_url + "/foo/new.txt", self.NEWFILE_CONTENTS)
874 # TODO: we lose the response code, so we can't check this
875 #self.failUnlessReallyEqual(responsecode, 201)
876 d.addCallback(self.failUnlessURIMatchesROChild, self._foo_node, u"new.txt")
877 d.addCallback(lambda res:
878 self.failUnlessChildContentsAre(self._foo_node, u"new.txt",
879 self.NEWFILE_CONTENTS))
882 def test_PUT_NEWFILEURL_not_mutable(self):
883 d = self.PUT(self.public_url + "/foo/new.txt?mutable=false",
884 self.NEWFILE_CONTENTS)
885 # TODO: we lose the response code, so we can't check this
886 #self.failUnlessReallyEqual(responsecode, 201)
887 d.addCallback(self.failUnlessURIMatchesROChild, self._foo_node, u"new.txt")
888 d.addCallback(lambda res:
889 self.failUnlessChildContentsAre(self._foo_node, u"new.txt",
890 self.NEWFILE_CONTENTS))
893 def test_PUT_NEWFILEURL_range_bad(self):
894 headers = {"content-range": "bytes 1-10/%d" % len(self.NEWFILE_CONTENTS)}
895 target = self.public_url + "/foo/new.txt"
896 d = self.shouldFail2(error.Error, "test_PUT_NEWFILEURL_range_bad",
897 "501 Not Implemented",
898 "Content-Range in PUT not yet supported",
899 # (and certainly not for immutable files)
900 self.PUT, target, self.NEWFILE_CONTENTS[1:11],
902 d.addCallback(lambda res:
903 self.failIfNodeHasChild(self._foo_node, u"new.txt"))
906 def test_PUT_NEWFILEURL_mutable(self):
907 d = self.PUT(self.public_url + "/foo/new.txt?mutable=true",
908 self.NEWFILE_CONTENTS)
909 # TODO: we lose the response code, so we can't check this
910 #self.failUnlessReallyEqual(responsecode, 201)
912 u = uri.from_string_mutable_filenode(res)
913 self.failUnless(u.is_mutable())
914 self.failIf(u.is_readonly())
916 d.addCallback(_check_uri)
917 d.addCallback(self.failUnlessURIMatchesRWChild, self._foo_node, u"new.txt")
918 d.addCallback(lambda res:
919 self.failUnlessMutableChildContentsAre(self._foo_node,
921 self.NEWFILE_CONTENTS))
924 def test_PUT_NEWFILEURL_mutable_toobig(self):
925 d = self.shouldFail2(error.Error, "test_PUT_NEWFILEURL_mutable_toobig",
926 "413 Request Entity Too Large",
927 "SDMF is limited to one segment, and 10001 > 10000",
929 self.public_url + "/foo/new.txt?mutable=true",
930 "b" * (self.s.MUTABLE_SIZELIMIT+1))
933 def test_PUT_NEWFILEURL_replace(self):
934 d = self.PUT(self.public_url + "/foo/bar.txt", self.NEWFILE_CONTENTS)
935 # TODO: we lose the response code, so we can't check this
936 #self.failUnlessReallyEqual(responsecode, 200)
937 d.addCallback(self.failUnlessURIMatchesROChild, self._foo_node, u"bar.txt")
938 d.addCallback(lambda res:
939 self.failUnlessChildContentsAre(self._foo_node, u"bar.txt",
940 self.NEWFILE_CONTENTS))
943 def test_PUT_NEWFILEURL_bad_t(self):
944 d = self.shouldFail2(error.Error, "PUT_bad_t", "400 Bad Request",
945 "PUT to a file: bad t=bogus",
946 self.PUT, self.public_url + "/foo/bar.txt?t=bogus",
950 def test_PUT_NEWFILEURL_no_replace(self):
951 d = self.PUT(self.public_url + "/foo/bar.txt?replace=false",
952 self.NEWFILE_CONTENTS)
953 d.addBoth(self.shouldFail, error.Error, "PUT_NEWFILEURL_no_replace",
955 "There was already a child by that name, and you asked me "
959 def test_PUT_NEWFILEURL_mkdirs(self):
960 d = self.PUT(self.public_url + "/foo/newdir/new.txt", self.NEWFILE_CONTENTS)
962 d.addCallback(self.failUnlessURIMatchesROChild, fn, u"newdir/new.txt")
963 d.addCallback(lambda res: self.failIfNodeHasChild(fn, u"new.txt"))
964 d.addCallback(lambda res: self.failUnlessNodeHasChild(fn, u"newdir"))
965 d.addCallback(lambda res:
966 self.failUnlessChildContentsAre(fn, u"newdir/new.txt",
967 self.NEWFILE_CONTENTS))
970 def test_PUT_NEWFILEURL_blocked(self):
971 d = self.PUT(self.public_url + "/foo/blockingfile/new.txt",
972 self.NEWFILE_CONTENTS)
973 d.addBoth(self.shouldFail, error.Error, "PUT_NEWFILEURL_blocked",
975 "Unable to create directory 'blockingfile': a file was in the way")
978 def test_PUT_NEWFILEURL_emptyname(self):
979 # an empty pathname component (i.e. a double-slash) is disallowed
980 d = self.shouldFail2(error.Error, "test_PUT_NEWFILEURL_emptyname",
982 "The webapi does not allow empty pathname components",
983 self.PUT, self.public_url + "/foo//new.txt", "")
986 def test_DELETE_FILEURL(self):
987 d = self.DELETE(self.public_url + "/foo/bar.txt")
988 d.addCallback(lambda res:
989 self.failIfNodeHasChild(self._foo_node, u"bar.txt"))
992 def test_DELETE_FILEURL_missing(self):
993 d = self.DELETE(self.public_url + "/foo/missing")
994 d.addBoth(self.should404, "test_DELETE_FILEURL_missing")
997 def test_DELETE_FILEURL_missing2(self):
998 d = self.DELETE(self.public_url + "/missing/missing")
999 d.addBoth(self.should404, "test_DELETE_FILEURL_missing2")
1002 def failUnlessHasBarDotTxtMetadata(self, res):
1003 data = simplejson.loads(res)
1004 self.failUnless(isinstance(data, list))
1005 self.failUnlessIn("metadata", data[1])
1006 self.failUnlessIn("tahoe", data[1]["metadata"])
1007 self.failUnlessIn("linkcrtime", data[1]["metadata"]["tahoe"])
1008 self.failUnlessIn("linkmotime", data[1]["metadata"]["tahoe"])
1009 self.failUnlessReallyEqual(data[1]["metadata"]["tahoe"]["linkcrtime"],
1010 self._bar_txt_metadata["tahoe"]["linkcrtime"])
1012 def test_GET_FILEURL_json(self):
1013 # twisted.web.http.parse_qs ignores any query args without an '=', so
1014 # I can't do "GET /path?json", I have to do "GET /path/t=json"
1015 # instead. This may make it tricky to emulate the S3 interface
1017 d = self.GET(self.public_url + "/foo/bar.txt?t=json")
1019 self.failUnlessIsBarJSON(data)
1020 self.failUnlessHasBarDotTxtMetadata(data)
1022 d.addCallback(_check1)
1025 def test_GET_FILEURL_json_missing(self):
1026 d = self.GET(self.public_url + "/foo/missing?json")
1027 d.addBoth(self.should404, "test_GET_FILEURL_json_missing")
1030 def test_GET_FILEURL_uri(self):
1031 d = self.GET(self.public_url + "/foo/bar.txt?t=uri")
1033 self.failUnlessReallyEqual(res, self._bar_txt_uri)
1034 d.addCallback(_check)
1035 d.addCallback(lambda res:
1036 self.GET(self.public_url + "/foo/bar.txt?t=readonly-uri"))
1038 # for now, for files, uris and readonly-uris are the same
1039 self.failUnlessReallyEqual(res, self._bar_txt_uri)
1040 d.addCallback(_check2)
1043 def test_GET_FILEURL_badtype(self):
1044 d = self.shouldHTTPError("GET t=bogus", 400, "Bad Request",
1047 self.public_url + "/foo/bar.txt?t=bogus")
1050 def test_CSS_FILE(self):
1051 d = self.GET("/tahoe_css", followRedirect=True)
1053 CSS_STYLE=re.compile('toolbar\s{.+text-align:\scenter.+toolbar-item.+display:\sinline',re.DOTALL)
1054 self.failUnless(CSS_STYLE.search(res), res)
1055 d.addCallback(_check)
1058 def test_GET_FILEURL_uri_missing(self):
1059 d = self.GET(self.public_url + "/foo/missing?t=uri")
1060 d.addBoth(self.should404, "test_GET_FILEURL_uri_missing")
1063 def test_GET_DIRECTORY_html_banner(self):
1064 d = self.GET(self.public_url + "/foo", followRedirect=True)
1066 self.failUnlessIn('<div class="toolbar-item"><a href="../../..">Return to Welcome page</a></div>',res)
1067 d.addCallback(_check)
1070 def test_GET_DIRURL(self):
1071 # the addSlash means we get a redirect here
1072 # from /uri/$URI/foo/ , we need ../../../ to get back to the root
1074 d = self.GET(self.public_url + "/foo", followRedirect=True)
1076 self.failUnless(('<a href="%s">Return to Welcome page' % ROOT)
1078 # the FILE reference points to a URI, but it should end in bar.txt
1079 bar_url = ("%s/file/%s/@@named=/bar.txt" %
1080 (ROOT, urllib.quote(self._bar_txt_uri)))
1081 get_bar = "".join([r'<td>FILE</td>',
1083 r'<a href="%s">bar.txt</a>' % bar_url,
1085 r'\s+<td align="right">%d</td>' % len(self.BAR_CONTENTS),
1087 self.failUnless(re.search(get_bar, res), res)
1088 for line in res.split("\n"):
1089 # find the line that contains the delete button for bar.txt
1090 if ("form action" in line and
1091 'value="delete"' in line and
1092 'value="bar.txt"' in line):
1093 # the form target should use a relative URL
1094 foo_url = urllib.quote("%s/uri/%s/" % (ROOT, self._foo_uri))
1095 self.failUnless(('action="%s"' % foo_url) in line, line)
1096 # and the when_done= should too
1097 #done_url = urllib.quote(???)
1098 #self.failUnless(('name="when_done" value="%s"' % done_url)
1102 self.fail("unable to find delete-bar.txt line", res)
1104 # the DIR reference just points to a URI
1105 sub_url = ("%s/uri/%s/" % (ROOT, urllib.quote(self._sub_uri)))
1106 get_sub = ((r'<td>DIR</td>')
1107 +r'\s+<td><a href="%s">sub</a></td>' % sub_url)
1108 self.failUnless(re.search(get_sub, res), res)
1109 d.addCallback(_check)
1111 # look at a readonly directory
1112 d.addCallback(lambda res:
1113 self.GET(self.public_url + "/reedownlee", followRedirect=True))
1115 self.failUnless("(read-only)" in res, res)
1116 self.failIf("Upload a file" in res, res)
1117 d.addCallback(_check2)
1119 # and at a directory that contains a readonly directory
1120 d.addCallback(lambda res:
1121 self.GET(self.public_url, followRedirect=True))
1123 self.failUnless(re.search('<td>DIR-RO</td>'
1124 r'\s+<td><a href="[\.\/]+/uri/URI%3ADIR2-RO%3A[^"]+">reedownlee</a></td>', res), res)
1125 d.addCallback(_check3)
1127 # and an empty directory
1128 d.addCallback(lambda res: self.GET(self.public_url + "/foo/empty/"))
1130 self.failUnless("directory is empty" in res, res)
1131 MKDIR_BUTTON_RE=re.compile('<input type="hidden" name="t" value="mkdir" />.*<legend class="freeform-form-label">Create a new directory in this directory</legend>.*<input type="submit" value="Create" />', re.I)
1132 self.failUnless(MKDIR_BUTTON_RE.search(res), res)
1133 d.addCallback(_check4)
1135 # and at a literal directory
1136 tiny_litdir_uri = "URI:DIR2-LIT:gqytunj2onug64tufqzdcosvkjetutcjkq5gw4tvm5vwszdgnz5hgyzufqydulbshj5x2lbm" # contains one child which is itself also LIT
1137 d.addCallback(lambda res:
1138 self.GET("/uri/" + tiny_litdir_uri + "/", followRedirect=True))
1140 self.failUnless('(immutable)' in res, res)
1141 self.failUnless(re.search('<td>FILE</td>'
1142 r'\s+<td><a href="[\.\/]+/file/URI%3ALIT%3Akrugkidfnzsc4/@@named=/short">short</a></td>', res), res)
1143 d.addCallback(_check5)
1146 def test_GET_DIRURL_badtype(self):
1147 d = self.shouldHTTPError("test_GET_DIRURL_badtype",
1151 self.public_url + "/foo?t=bogus")
1154 def test_GET_DIRURL_json(self):
1155 d = self.GET(self.public_url + "/foo?t=json")
1156 d.addCallback(self.failUnlessIsFooJSON)
1160 def test_POST_DIRURL_manifest_no_ophandle(self):
1161 d = self.shouldFail2(error.Error,
1162 "test_POST_DIRURL_manifest_no_ophandle",
1164 "slow operation requires ophandle=",
1165 self.POST, self.public_url, t="start-manifest")
1168 def test_POST_DIRURL_manifest(self):
1169 d = defer.succeed(None)
1170 def getman(ignored, output):
1171 d = self.POST(self.public_url + "/foo/?t=start-manifest&ophandle=125",
1172 followRedirect=True)
1173 d.addCallback(self.wait_for_operation, "125")
1174 d.addCallback(self.get_operation_results, "125", output)
1176 d.addCallback(getman, None)
1177 def _got_html(manifest):
1178 self.failUnless("Manifest of SI=" in manifest)
1179 self.failUnless("<td>sub</td>" in manifest)
1180 self.failUnless(self._sub_uri in manifest)
1181 self.failUnless("<td>sub/baz.txt</td>" in manifest)
1182 d.addCallback(_got_html)
1184 # both t=status and unadorned GET should be identical
1185 d.addCallback(lambda res: self.GET("/operations/125"))
1186 d.addCallback(_got_html)
1188 d.addCallback(getman, "html")
1189 d.addCallback(_got_html)
1190 d.addCallback(getman, "text")
1191 def _got_text(manifest):
1192 self.failUnless("\nsub " + self._sub_uri + "\n" in manifest)
1193 self.failUnless("\nsub/baz.txt URI:CHK:" in manifest)
1194 d.addCallback(_got_text)
1195 d.addCallback(getman, "JSON")
1197 data = res["manifest"]
1199 for (path_list, cap) in data:
1200 got[tuple(path_list)] = cap
1201 self.failUnlessReallyEqual(to_str(got[(u"sub",)]), self._sub_uri)
1202 self.failUnless((u"sub",u"baz.txt") in got)
1203 self.failUnless("finished" in res)
1204 self.failUnless("origin" in res)
1205 self.failUnless("storage-index" in res)
1206 self.failUnless("verifycaps" in res)
1207 self.failUnless("stats" in res)
1208 d.addCallback(_got_json)
1211 def test_POST_DIRURL_deepsize_no_ophandle(self):
1212 d = self.shouldFail2(error.Error,
1213 "test_POST_DIRURL_deepsize_no_ophandle",
1215 "slow operation requires ophandle=",
1216 self.POST, self.public_url, t="start-deep-size")
1219 def test_POST_DIRURL_deepsize(self):
1220 d = self.POST(self.public_url + "/foo/?t=start-deep-size&ophandle=126",
1221 followRedirect=True)
1222 d.addCallback(self.wait_for_operation, "126")
1223 d.addCallback(self.get_operation_results, "126", "json")
1224 def _got_json(data):
1225 self.failUnlessReallyEqual(data["finished"], True)
1227 self.failUnless(size > 1000)
1228 d.addCallback(_got_json)
1229 d.addCallback(self.get_operation_results, "126", "text")
1231 mo = re.search(r'^size: (\d+)$', res, re.M)
1232 self.failUnless(mo, res)
1233 size = int(mo.group(1))
1234 # with directories, the size varies.
1235 self.failUnless(size > 1000)
1236 d.addCallback(_got_text)
1239 def test_POST_DIRURL_deepstats_no_ophandle(self):
1240 d = self.shouldFail2(error.Error,
1241 "test_POST_DIRURL_deepstats_no_ophandle",
1243 "slow operation requires ophandle=",
1244 self.POST, self.public_url, t="start-deep-stats")
1247 def test_POST_DIRURL_deepstats(self):
1248 d = self.POST(self.public_url + "/foo/?t=start-deep-stats&ophandle=127",
1249 followRedirect=True)
1250 d.addCallback(self.wait_for_operation, "127")
1251 d.addCallback(self.get_operation_results, "127", "json")
1252 def _got_json(stats):
1253 expected = {"count-immutable-files": 3,
1254 "count-mutable-files": 0,
1255 "count-literal-files": 0,
1257 "count-directories": 3,
1258 "size-immutable-files": 57,
1259 "size-literal-files": 0,
1260 #"size-directories": 1912, # varies
1261 #"largest-directory": 1590,
1262 "largest-directory-children": 5,
1263 "largest-immutable-file": 19,
1265 for k,v in expected.iteritems():
1266 self.failUnlessReallyEqual(stats[k], v,
1267 "stats[%s] was %s, not %s" %
1269 self.failUnlessReallyEqual(stats["size-files-histogram"],
1271 d.addCallback(_got_json)
1274 def test_POST_DIRURL_stream_manifest(self):
1275 d = self.POST(self.public_url + "/foo/?t=stream-manifest")
1277 self.failUnless(res.endswith("\n"))
1278 units = [simplejson.loads(t) for t in res[:-1].split("\n")]
1279 self.failUnlessReallyEqual(len(units), 7)
1280 self.failUnlessEqual(units[-1]["type"], "stats")
1282 self.failUnlessEqual(first["path"], [])
1283 self.failUnlessReallyEqual(to_str(first["cap"]), self._foo_uri)
1284 self.failUnlessEqual(first["type"], "directory")
1285 baz = [u for u in units[:-1] if to_str(u["cap"]) == self._baz_file_uri][0]
1286 self.failUnlessEqual(baz["path"], ["sub", "baz.txt"])
1287 self.failIfEqual(baz["storage-index"], None)
1288 self.failIfEqual(baz["verifycap"], None)
1289 self.failIfEqual(baz["repaircap"], None)
1291 d.addCallback(_check)
1294 def test_GET_DIRURL_uri(self):
1295 d = self.GET(self.public_url + "/foo?t=uri")
1297 self.failUnlessReallyEqual(to_str(res), self._foo_uri)
1298 d.addCallback(_check)
1301 def test_GET_DIRURL_readonly_uri(self):
1302 d = self.GET(self.public_url + "/foo?t=readonly-uri")
1304 self.failUnlessReallyEqual(to_str(res), self._foo_readonly_uri)
1305 d.addCallback(_check)
1308 def test_PUT_NEWDIRURL(self):
1309 d = self.PUT(self.public_url + "/foo/newdir?t=mkdir", "")
1310 d.addCallback(lambda res:
1311 self.failUnlessNodeHasChild(self._foo_node, u"newdir"))
1312 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
1313 d.addCallback(self.failUnlessNodeKeysAre, [])
1316 def test_POST_NEWDIRURL(self):
1317 d = self.POST2(self.public_url + "/foo/newdir?t=mkdir", "")
1318 d.addCallback(lambda res:
1319 self.failUnlessNodeHasChild(self._foo_node, u"newdir"))
1320 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
1321 d.addCallback(self.failUnlessNodeKeysAre, [])
1324 def test_POST_NEWDIRURL_emptyname(self):
1325 # an empty pathname component (i.e. a double-slash) is disallowed
1326 d = self.shouldFail2(error.Error, "test_POST_NEWDIRURL_emptyname",
1328 "The webapi does not allow empty pathname components, i.e. a double slash",
1329 self.POST, self.public_url + "//?t=mkdir")
1332 def test_POST_NEWDIRURL_initial_children(self):
1333 (newkids, caps) = self._create_initial_children()
1334 d = self.POST2(self.public_url + "/foo/newdir?t=mkdir-with-children",
1335 simplejson.dumps(newkids))
1337 n = self.s.create_node_from_uri(uri.strip())
1338 d2 = self.failUnlessNodeKeysAre(n, newkids.keys())
1339 d2.addCallback(lambda ign:
1340 self.failUnlessROChildURIIs(n, u"child-imm",
1342 d2.addCallback(lambda ign:
1343 self.failUnlessRWChildURIIs(n, u"child-mutable",
1345 d2.addCallback(lambda ign:
1346 self.failUnlessROChildURIIs(n, u"child-mutable-ro",
1348 d2.addCallback(lambda ign:
1349 self.failUnlessROChildURIIs(n, u"unknownchild-ro",
1350 caps['unknown_rocap']))
1351 d2.addCallback(lambda ign:
1352 self.failUnlessRWChildURIIs(n, u"unknownchild-rw",
1353 caps['unknown_rwcap']))
1354 d2.addCallback(lambda ign:
1355 self.failUnlessROChildURIIs(n, u"unknownchild-imm",
1356 caps['unknown_immcap']))
1357 d2.addCallback(lambda ign:
1358 self.failUnlessRWChildURIIs(n, u"dirchild",
1360 d2.addCallback(lambda ign:
1361 self.failUnlessROChildURIIs(n, u"dirchild-lit",
1363 d2.addCallback(lambda ign:
1364 self.failUnlessROChildURIIs(n, u"dirchild-empty",
1365 caps['emptydircap']))
1367 d.addCallback(_check)
1368 d.addCallback(lambda res:
1369 self.failUnlessNodeHasChild(self._foo_node, u"newdir"))
1370 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
1371 d.addCallback(self.failUnlessNodeKeysAre, newkids.keys())
1372 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
1373 d.addCallback(self.failUnlessROChildURIIs, u"child-imm", caps['filecap1'])
1376 def test_POST_NEWDIRURL_immutable(self):
1377 (newkids, caps) = self._create_immutable_children()
1378 d = self.POST2(self.public_url + "/foo/newdir?t=mkdir-immutable",
1379 simplejson.dumps(newkids))
1381 n = self.s.create_node_from_uri(uri.strip())
1382 d2 = self.failUnlessNodeKeysAre(n, newkids.keys())
1383 d2.addCallback(lambda ign:
1384 self.failUnlessROChildURIIs(n, u"child-imm",
1386 d2.addCallback(lambda ign:
1387 self.failUnlessROChildURIIs(n, u"unknownchild-imm",
1388 caps['unknown_immcap']))
1389 d2.addCallback(lambda ign:
1390 self.failUnlessROChildURIIs(n, u"dirchild-imm",
1392 d2.addCallback(lambda ign:
1393 self.failUnlessROChildURIIs(n, u"dirchild-lit",
1395 d2.addCallback(lambda ign:
1396 self.failUnlessROChildURIIs(n, u"dirchild-empty",
1397 caps['emptydircap']))
1399 d.addCallback(_check)
1400 d.addCallback(lambda res:
1401 self.failUnlessNodeHasChild(self._foo_node, u"newdir"))
1402 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
1403 d.addCallback(self.failUnlessNodeKeysAre, newkids.keys())
1404 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
1405 d.addCallback(self.failUnlessROChildURIIs, u"child-imm", caps['filecap1'])
1406 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
1407 d.addCallback(self.failUnlessROChildURIIs, u"unknownchild-imm", caps['unknown_immcap'])
1408 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
1409 d.addCallback(self.failUnlessROChildURIIs, u"dirchild-imm", caps['immdircap'])
1410 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
1411 d.addCallback(self.failUnlessROChildURIIs, u"dirchild-lit", caps['litdircap'])
1412 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
1413 d.addCallback(self.failUnlessROChildURIIs, u"dirchild-empty", caps['emptydircap'])
1414 d.addErrback(self.explain_web_error)
1417 def test_POST_NEWDIRURL_immutable_bad(self):
1418 (newkids, caps) = self._create_initial_children()
1419 d = self.shouldFail2(error.Error, "test_POST_NEWDIRURL_immutable_bad",
1421 "needed to be immutable but was not",
1423 self.public_url + "/foo/newdir?t=mkdir-immutable",
1424 simplejson.dumps(newkids))
1427 def test_PUT_NEWDIRURL_exists(self):
1428 d = self.PUT(self.public_url + "/foo/sub?t=mkdir", "")
1429 d.addCallback(lambda res:
1430 self.failUnlessNodeHasChild(self._foo_node, u"sub"))
1431 d.addCallback(lambda res: self._foo_node.get(u"sub"))
1432 d.addCallback(self.failUnlessNodeKeysAre, [u"baz.txt"])
1435 def test_PUT_NEWDIRURL_blocked(self):
1436 d = self.shouldFail2(error.Error, "PUT_NEWDIRURL_blocked",
1437 "409 Conflict", "Unable to create directory 'bar.txt': a file was in the way",
1439 self.public_url + "/foo/bar.txt/sub?t=mkdir", "")
1440 d.addCallback(lambda res:
1441 self.failUnlessNodeHasChild(self._foo_node, u"sub"))
1442 d.addCallback(lambda res: self._foo_node.get(u"sub"))
1443 d.addCallback(self.failUnlessNodeKeysAre, [u"baz.txt"])
1446 def test_PUT_NEWDIRURL_mkdir_p(self):
1447 d = defer.succeed(None)
1448 d.addCallback(lambda res: self.POST(self.public_url + "/foo", t='mkdir', name='mkp'))
1449 d.addCallback(lambda res: self.failUnlessNodeHasChild(self._foo_node, u"mkp"))
1450 d.addCallback(lambda res: self._foo_node.get(u"mkp"))
1451 def mkdir_p(mkpnode):
1452 url = '/uri/%s?t=mkdir-p&path=/sub1/sub2' % urllib.quote(mkpnode.get_uri())
1454 def made_subsub(ssuri):
1455 d = self._foo_node.get_child_at_path(u"mkp/sub1/sub2")
1456 d.addCallback(lambda ssnode: self.failUnlessReallyEqual(ssnode.get_uri(), ssuri))
1458 d.addCallback(lambda uri2: self.failUnlessReallyEqual(uri2, ssuri))
1460 d.addCallback(made_subsub)
1462 d.addCallback(mkdir_p)
1465 def test_PUT_NEWDIRURL_mkdirs(self):
1466 d = self.PUT(self.public_url + "/foo/subdir/newdir?t=mkdir", "")
1467 d.addCallback(lambda res:
1468 self.failIfNodeHasChild(self._foo_node, u"newdir"))
1469 d.addCallback(lambda res:
1470 self.failUnlessNodeHasChild(self._foo_node, u"subdir"))
1471 d.addCallback(lambda res:
1472 self._foo_node.get_child_at_path(u"subdir/newdir"))
1473 d.addCallback(self.failUnlessNodeKeysAre, [])
1476 def test_DELETE_DIRURL(self):
1477 d = self.DELETE(self.public_url + "/foo")
1478 d.addCallback(lambda res:
1479 self.failIfNodeHasChild(self.public_root, u"foo"))
1482 def test_DELETE_DIRURL_missing(self):
1483 d = self.DELETE(self.public_url + "/foo/missing")
1484 d.addBoth(self.should404, "test_DELETE_DIRURL_missing")
1485 d.addCallback(lambda res:
1486 self.failUnlessNodeHasChild(self.public_root, u"foo"))
1489 def test_DELETE_DIRURL_missing2(self):
1490 d = self.DELETE(self.public_url + "/missing")
1491 d.addBoth(self.should404, "test_DELETE_DIRURL_missing2")
1494 def dump_root(self):
1496 w = webish.DirnodeWalkerMixin()
1497 def visitor(childpath, childnode, metadata):
1499 d = w.walk(self.public_root, visitor)
1502 def failUnlessNodeKeysAre(self, node, expected_keys):
1503 for k in expected_keys:
1504 assert isinstance(k, unicode)
1506 def _check(children):
1507 self.failUnlessReallyEqual(sorted(children.keys()), sorted(expected_keys))
1508 d.addCallback(_check)
1510 def failUnlessNodeHasChild(self, node, name):
1511 assert isinstance(name, unicode)
1513 def _check(children):
1514 self.failUnless(name in children)
1515 d.addCallback(_check)
1517 def failIfNodeHasChild(self, node, name):
1518 assert isinstance(name, unicode)
1520 def _check(children):
1521 self.failIf(name in children)
1522 d.addCallback(_check)
1525 def failUnlessChildContentsAre(self, node, name, expected_contents):
1526 assert isinstance(name, unicode)
1527 d = node.get_child_at_path(name)
1528 d.addCallback(lambda node: download_to_data(node))
1529 def _check(contents):
1530 self.failUnlessReallyEqual(contents, expected_contents)
1531 d.addCallback(_check)
1534 def failUnlessMutableChildContentsAre(self, node, name, expected_contents):
1535 assert isinstance(name, unicode)
1536 d = node.get_child_at_path(name)
1537 d.addCallback(lambda node: node.download_best_version())
1538 def _check(contents):
1539 self.failUnlessReallyEqual(contents, expected_contents)
1540 d.addCallback(_check)
1543 def failUnlessRWChildURIIs(self, node, name, expected_uri):
1544 assert isinstance(name, unicode)
1545 d = node.get_child_at_path(name)
1547 self.failUnless(child.is_unknown() or not child.is_readonly())
1548 self.failUnlessReallyEqual(child.get_uri(), expected_uri.strip())
1549 self.failUnlessReallyEqual(child.get_write_uri(), expected_uri.strip())
1550 expected_ro_uri = self._make_readonly(expected_uri)
1552 self.failUnlessReallyEqual(child.get_readonly_uri(), expected_ro_uri.strip())
1553 d.addCallback(_check)
1556 def failUnlessROChildURIIs(self, node, name, expected_uri):
1557 assert isinstance(name, unicode)
1558 d = node.get_child_at_path(name)
1560 self.failUnless(child.is_unknown() or child.is_readonly())
1561 self.failUnlessReallyEqual(child.get_write_uri(), None)
1562 self.failUnlessReallyEqual(child.get_uri(), expected_uri.strip())
1563 self.failUnlessReallyEqual(child.get_readonly_uri(), expected_uri.strip())
1564 d.addCallback(_check)
1567 def failUnlessURIMatchesRWChild(self, got_uri, node, name):
1568 assert isinstance(name, unicode)
1569 d = node.get_child_at_path(name)
1571 self.failUnless(child.is_unknown() or not child.is_readonly())
1572 self.failUnlessReallyEqual(child.get_uri(), got_uri.strip())
1573 self.failUnlessReallyEqual(child.get_write_uri(), got_uri.strip())
1574 expected_ro_uri = self._make_readonly(got_uri)
1576 self.failUnlessReallyEqual(child.get_readonly_uri(), expected_ro_uri.strip())
1577 d.addCallback(_check)
1580 def failUnlessURIMatchesROChild(self, got_uri, node, name):
1581 assert isinstance(name, unicode)
1582 d = node.get_child_at_path(name)
1584 self.failUnless(child.is_unknown() or child.is_readonly())
1585 self.failUnlessReallyEqual(child.get_write_uri(), None)
1586 self.failUnlessReallyEqual(got_uri.strip(), child.get_uri())
1587 self.failUnlessReallyEqual(got_uri.strip(), child.get_readonly_uri())
1588 d.addCallback(_check)
1591 def failUnlessCHKURIHasContents(self, got_uri, contents):
1592 self.failUnless(FakeCHKFileNode.all_contents[got_uri] == contents)
1594 def test_POST_upload(self):
1595 d = self.POST(self.public_url + "/foo", t="upload",
1596 file=("new.txt", self.NEWFILE_CONTENTS))
1598 d.addCallback(self.failUnlessURIMatchesROChild, fn, u"new.txt")
1599 d.addCallback(lambda res:
1600 self.failUnlessChildContentsAre(fn, u"new.txt",
1601 self.NEWFILE_CONTENTS))
1604 def test_POST_upload_unicode(self):
1605 filename = u"n\u00e9wer.txt" # n e-acute w e r . t x t
1606 d = self.POST(self.public_url + "/foo", t="upload",
1607 file=(filename, self.NEWFILE_CONTENTS))
1609 d.addCallback(self.failUnlessURIMatchesROChild, fn, filename)
1610 d.addCallback(lambda res:
1611 self.failUnlessChildContentsAre(fn, filename,
1612 self.NEWFILE_CONTENTS))
1613 target_url = self.public_url + "/foo/" + filename.encode("utf-8")
1614 d.addCallback(lambda res: self.GET(target_url))
1615 d.addCallback(lambda contents: self.failUnlessReallyEqual(contents,
1616 self.NEWFILE_CONTENTS,
1620 def test_POST_upload_unicode_named(self):
1621 filename = u"n\u00e9wer.txt" # n e-acute w e r . t x t
1622 d = self.POST(self.public_url + "/foo", t="upload",
1624 file=("overridden", self.NEWFILE_CONTENTS))
1626 d.addCallback(self.failUnlessURIMatchesROChild, fn, filename)
1627 d.addCallback(lambda res:
1628 self.failUnlessChildContentsAre(fn, filename,
1629 self.NEWFILE_CONTENTS))
1630 target_url = self.public_url + "/foo/" + filename.encode("utf-8")
1631 d.addCallback(lambda res: self.GET(target_url))
1632 d.addCallback(lambda contents: self.failUnlessReallyEqual(contents,
1633 self.NEWFILE_CONTENTS,
1637 def test_POST_upload_no_link(self):
1638 d = self.POST("/uri", t="upload",
1639 file=("new.txt", self.NEWFILE_CONTENTS))
1640 def _check_upload_results(page):
1641 # this should be a page which describes the results of the upload
1642 # that just finished.
1643 self.failUnless("Upload Results:" in page)
1644 self.failUnless("URI:" in page)
1645 uri_re = re.compile("URI: <tt><span>(.*)</span>")
1646 mo = uri_re.search(page)
1647 self.failUnless(mo, page)
1648 new_uri = mo.group(1)
1650 d.addCallback(_check_upload_results)
1651 d.addCallback(self.failUnlessCHKURIHasContents, self.NEWFILE_CONTENTS)
1654 def test_POST_upload_no_link_whendone(self):
1655 d = self.POST("/uri", t="upload", when_done="/",
1656 file=("new.txt", self.NEWFILE_CONTENTS))
1657 d.addBoth(self.shouldRedirect, "/")
1660 def shouldRedirect2(self, which, checker, callable, *args, **kwargs):
1661 d = defer.maybeDeferred(callable, *args, **kwargs)
1663 if isinstance(res, failure.Failure):
1664 res.trap(error.PageRedirect)
1665 statuscode = res.value.status
1666 target = res.value.location
1667 return checker(statuscode, target)
1668 self.fail("%s: callable was supposed to redirect, not return '%s'"
1673 def test_POST_upload_no_link_whendone_results(self):
1674 def check(statuscode, target):
1675 self.failUnlessReallyEqual(statuscode, str(http.FOUND))
1676 self.failUnless(target.startswith(self.webish_url), target)
1677 return client.getPage(target, method="GET")
1678 d = self.shouldRedirect2("test_POST_upload_no_link_whendone_results",
1680 self.POST, "/uri", t="upload",
1681 when_done="/uri/%(uri)s",
1682 file=("new.txt", self.NEWFILE_CONTENTS))
1683 d.addCallback(lambda res:
1684 self.failUnlessReallyEqual(res, self.NEWFILE_CONTENTS))
1687 def test_POST_upload_no_link_mutable(self):
1688 d = self.POST("/uri", t="upload", mutable="true",
1689 file=("new.txt", self.NEWFILE_CONTENTS))
1690 def _check(filecap):
1691 filecap = filecap.strip()
1692 self.failUnless(filecap.startswith("URI:SSK:"), filecap)
1693 self.filecap = filecap
1694 u = uri.WriteableSSKFileURI.init_from_string(filecap)
1695 self.failUnless(u.get_storage_index() in FakeMutableFileNode.all_contents)
1696 n = self.s.create_node_from_uri(filecap)
1697 return n.download_best_version()
1698 d.addCallback(_check)
1700 self.failUnlessReallyEqual(data, self.NEWFILE_CONTENTS)
1701 return self.GET("/uri/%s" % urllib.quote(self.filecap))
1702 d.addCallback(_check2)
1704 self.failUnlessReallyEqual(data, self.NEWFILE_CONTENTS)
1705 return self.GET("/file/%s" % urllib.quote(self.filecap))
1706 d.addCallback(_check3)
1708 self.failUnlessReallyEqual(data, self.NEWFILE_CONTENTS)
1709 d.addCallback(_check4)
1712 def test_POST_upload_no_link_mutable_toobig(self):
1713 d = self.shouldFail2(error.Error,
1714 "test_POST_upload_no_link_mutable_toobig",
1715 "413 Request Entity Too Large",
1716 "SDMF is limited to one segment, and 10001 > 10000",
1718 "/uri", t="upload", mutable="true",
1720 "b" * (self.s.MUTABLE_SIZELIMIT+1)) )
1723 def test_POST_upload_mutable(self):
1724 # this creates a mutable file
1725 d = self.POST(self.public_url + "/foo", t="upload", mutable="true",
1726 file=("new.txt", self.NEWFILE_CONTENTS))
1728 d.addCallback(self.failUnlessURIMatchesRWChild, fn, u"new.txt")
1729 d.addCallback(lambda res:
1730 self.failUnlessMutableChildContentsAre(fn, u"new.txt",
1731 self.NEWFILE_CONTENTS))
1732 d.addCallback(lambda res: self._foo_node.get(u"new.txt"))
1734 self.failUnless(IMutableFileNode.providedBy(newnode))
1735 self.failUnless(newnode.is_mutable())
1736 self.failIf(newnode.is_readonly())
1737 self._mutable_node = newnode
1738 self._mutable_uri = newnode.get_uri()
1741 # now upload it again and make sure that the URI doesn't change
1742 NEWER_CONTENTS = self.NEWFILE_CONTENTS + "newer\n"
1743 d.addCallback(lambda res:
1744 self.POST(self.public_url + "/foo", t="upload",
1746 file=("new.txt", NEWER_CONTENTS)))
1747 d.addCallback(self.failUnlessURIMatchesRWChild, fn, u"new.txt")
1748 d.addCallback(lambda res:
1749 self.failUnlessMutableChildContentsAre(fn, u"new.txt",
1751 d.addCallback(lambda res: self._foo_node.get(u"new.txt"))
1753 self.failUnless(IMutableFileNode.providedBy(newnode))
1754 self.failUnless(newnode.is_mutable())
1755 self.failIf(newnode.is_readonly())
1756 self.failUnlessReallyEqual(self._mutable_uri, newnode.get_uri())
1757 d.addCallback(_got2)
1759 # upload a second time, using PUT instead of POST
1760 NEW2_CONTENTS = NEWER_CONTENTS + "overwrite with PUT\n"
1761 d.addCallback(lambda res:
1762 self.PUT(self.public_url + "/foo/new.txt", NEW2_CONTENTS))
1763 d.addCallback(self.failUnlessURIMatchesRWChild, fn, u"new.txt")
1764 d.addCallback(lambda res:
1765 self.failUnlessMutableChildContentsAre(fn, u"new.txt",
1768 # finally list the directory, since mutable files are displayed
1769 # slightly differently
1771 d.addCallback(lambda res:
1772 self.GET(self.public_url + "/foo/",
1773 followRedirect=True))
1774 def _check_page(res):
1775 # TODO: assert more about the contents
1776 self.failUnless("SSK" in res)
1778 d.addCallback(_check_page)
1780 d.addCallback(lambda res: self._foo_node.get(u"new.txt"))
1782 self.failUnless(IMutableFileNode.providedBy(newnode))
1783 self.failUnless(newnode.is_mutable())
1784 self.failIf(newnode.is_readonly())
1785 self.failUnlessReallyEqual(self._mutable_uri, newnode.get_uri())
1786 d.addCallback(_got3)
1788 # look at the JSON form of the enclosing directory
1789 d.addCallback(lambda res:
1790 self.GET(self.public_url + "/foo/?t=json",
1791 followRedirect=True))
1792 def _check_page_json(res):
1793 parsed = simplejson.loads(res)
1794 self.failUnlessEqual(parsed[0], "dirnode")
1795 children = dict( [(unicode(name),value)
1797 in parsed[1]["children"].iteritems()] )
1798 self.failUnless(u"new.txt" in children)
1799 new_json = children[u"new.txt"]
1800 self.failUnlessEqual(new_json[0], "filenode")
1801 self.failUnless(new_json[1]["mutable"])
1802 self.failUnlessReallyEqual(to_str(new_json[1]["rw_uri"]), self._mutable_uri)
1803 ro_uri = self._mutable_node.get_readonly().to_string()
1804 self.failUnlessReallyEqual(to_str(new_json[1]["ro_uri"]), ro_uri)
1805 d.addCallback(_check_page_json)
1807 # and the JSON form of the file
1808 d.addCallback(lambda res:
1809 self.GET(self.public_url + "/foo/new.txt?t=json"))
1810 def _check_file_json(res):
1811 parsed = simplejson.loads(res)
1812 self.failUnlessEqual(parsed[0], "filenode")
1813 self.failUnless(parsed[1]["mutable"])
1814 self.failUnlessReallyEqual(to_str(parsed[1]["rw_uri"]), self._mutable_uri)
1815 ro_uri = self._mutable_node.get_readonly().to_string()
1816 self.failUnlessReallyEqual(to_str(parsed[1]["ro_uri"]), ro_uri)
1817 d.addCallback(_check_file_json)
1819 # and look at t=uri and t=readonly-uri
1820 d.addCallback(lambda res:
1821 self.GET(self.public_url + "/foo/new.txt?t=uri"))
1822 d.addCallback(lambda res: self.failUnlessReallyEqual(res, self._mutable_uri))
1823 d.addCallback(lambda res:
1824 self.GET(self.public_url + "/foo/new.txt?t=readonly-uri"))
1825 def _check_ro_uri(res):
1826 ro_uri = self._mutable_node.get_readonly().to_string()
1827 self.failUnlessReallyEqual(res, ro_uri)
1828 d.addCallback(_check_ro_uri)
1830 # make sure we can get to it from /uri/URI
1831 d.addCallback(lambda res:
1832 self.GET("/uri/%s" % urllib.quote(self._mutable_uri)))
1833 d.addCallback(lambda res:
1834 self.failUnlessReallyEqual(res, NEW2_CONTENTS))
1836 # and that HEAD computes the size correctly
1837 d.addCallback(lambda res:
1838 self.HEAD(self.public_url + "/foo/new.txt",
1839 return_response=True))
1840 def _got_headers((res, status, headers)):
1841 self.failUnlessReallyEqual(res, "")
1842 self.failUnlessReallyEqual(headers["content-length"][0],
1843 str(len(NEW2_CONTENTS)))
1844 self.failUnlessReallyEqual(headers["content-type"], ["text/plain"])
1845 d.addCallback(_got_headers)
1847 # make sure that size errors are displayed correctly for overwrite
1848 d.addCallback(lambda res:
1849 self.shouldFail2(error.Error,
1850 "test_POST_upload_mutable-toobig",
1851 "413 Request Entity Too Large",
1852 "SDMF is limited to one segment, and 10001 > 10000",
1854 self.public_url + "/foo", t="upload",
1857 "b" * (self.s.MUTABLE_SIZELIMIT+1)),
1860 d.addErrback(self.dump_error)
1863 def test_POST_upload_mutable_toobig(self):
1864 d = self.shouldFail2(error.Error,
1865 "test_POST_upload_mutable_toobig",
1866 "413 Request Entity Too Large",
1867 "SDMF is limited to one segment, and 10001 > 10000",
1869 self.public_url + "/foo",
1870 t="upload", mutable="true",
1872 "b" * (self.s.MUTABLE_SIZELIMIT+1)) )
1875 def dump_error(self, f):
1876 # if the web server returns an error code (like 400 Bad Request),
1877 # web.client.getPage puts the HTTP response body into the .response
1878 # attribute of the exception object that it gives back. It does not
1879 # appear in the Failure's repr(), so the ERROR that trial displays
1880 # will be rather terse and unhelpful. addErrback this method to the
1881 # end of your chain to get more information out of these errors.
1882 if f.check(error.Error):
1883 print "web.error.Error:"
1885 print f.value.response
1888 def test_POST_upload_replace(self):
1889 d = self.POST(self.public_url + "/foo", t="upload",
1890 file=("bar.txt", self.NEWFILE_CONTENTS))
1892 d.addCallback(self.failUnlessURIMatchesROChild, fn, u"bar.txt")
1893 d.addCallback(lambda res:
1894 self.failUnlessChildContentsAre(fn, u"bar.txt",
1895 self.NEWFILE_CONTENTS))
1898 def test_POST_upload_no_replace_ok(self):
1899 d = self.POST(self.public_url + "/foo?replace=false", t="upload",
1900 file=("new.txt", self.NEWFILE_CONTENTS))
1901 d.addCallback(lambda res: self.GET(self.public_url + "/foo/new.txt"))
1902 d.addCallback(lambda res: self.failUnlessReallyEqual(res,
1903 self.NEWFILE_CONTENTS))
1906 def test_POST_upload_no_replace_queryarg(self):
1907 d = self.POST(self.public_url + "/foo?replace=false", t="upload",
1908 file=("bar.txt", self.NEWFILE_CONTENTS))
1909 d.addBoth(self.shouldFail, error.Error,
1910 "POST_upload_no_replace_queryarg",
1912 "There was already a child by that name, and you asked me "
1913 "to not replace it")
1914 d.addCallback(lambda res: self.GET(self.public_url + "/foo/bar.txt"))
1915 d.addCallback(self.failUnlessIsBarDotTxt)
1918 def test_POST_upload_no_replace_field(self):
1919 d = self.POST(self.public_url + "/foo", t="upload", replace="false",
1920 file=("bar.txt", self.NEWFILE_CONTENTS))
1921 d.addBoth(self.shouldFail, error.Error, "POST_upload_no_replace_field",
1923 "There was already a child by that name, and you asked me "
1924 "to not replace it")
1925 d.addCallback(lambda res: self.GET(self.public_url + "/foo/bar.txt"))
1926 d.addCallback(self.failUnlessIsBarDotTxt)
1929 def test_POST_upload_whendone(self):
1930 d = self.POST(self.public_url + "/foo", t="upload", when_done="/THERE",
1931 file=("new.txt", self.NEWFILE_CONTENTS))
1932 d.addBoth(self.shouldRedirect, "/THERE")
1934 d.addCallback(lambda res:
1935 self.failUnlessChildContentsAre(fn, u"new.txt",
1936 self.NEWFILE_CONTENTS))
1939 def test_POST_upload_named(self):
1941 d = self.POST(self.public_url + "/foo", t="upload",
1942 name="new.txt", file=self.NEWFILE_CONTENTS)
1943 d.addCallback(self.failUnlessURIMatchesROChild, fn, u"new.txt")
1944 d.addCallback(lambda res:
1945 self.failUnlessChildContentsAre(fn, u"new.txt",
1946 self.NEWFILE_CONTENTS))
1949 def test_POST_upload_named_badfilename(self):
1950 d = self.POST(self.public_url + "/foo", t="upload",
1951 name="slashes/are/bad.txt", file=self.NEWFILE_CONTENTS)
1952 d.addBoth(self.shouldFail, error.Error,
1953 "test_POST_upload_named_badfilename",
1955 "name= may not contain a slash",
1957 # make sure that nothing was added
1958 d.addCallback(lambda res:
1959 self.failUnlessNodeKeysAre(self._foo_node,
1960 [u"bar.txt", u"blockingfile",
1961 u"empty", u"n\u00fc.txt",
1965 def test_POST_FILEURL_check(self):
1966 bar_url = self.public_url + "/foo/bar.txt"
1967 d = self.POST(bar_url, t="check")
1969 self.failUnless("Healthy :" in res)
1970 d.addCallback(_check)
1971 redir_url = "http://allmydata.org/TARGET"
1972 def _check2(statuscode, target):
1973 self.failUnlessReallyEqual(statuscode, str(http.FOUND))
1974 self.failUnlessReallyEqual(target, redir_url)
1975 d.addCallback(lambda res:
1976 self.shouldRedirect2("test_POST_FILEURL_check",
1980 when_done=redir_url))
1981 d.addCallback(lambda res:
1982 self.POST(bar_url, t="check", return_to=redir_url))
1984 self.failUnless("Healthy :" in res)
1985 self.failUnless("Return to file" in res)
1986 self.failUnless(redir_url in res)
1987 d.addCallback(_check3)
1989 d.addCallback(lambda res:
1990 self.POST(bar_url, t="check", output="JSON"))
1991 def _check_json(res):
1992 data = simplejson.loads(res)
1993 self.failUnless("storage-index" in data)
1994 self.failUnless(data["results"]["healthy"])
1995 d.addCallback(_check_json)
1999 def test_POST_FILEURL_check_and_repair(self):
2000 bar_url = self.public_url + "/foo/bar.txt"
2001 d = self.POST(bar_url, t="check", repair="true")
2003 self.failUnless("Healthy :" in res)
2004 d.addCallback(_check)
2005 redir_url = "http://allmydata.org/TARGET"
2006 def _check2(statuscode, target):
2007 self.failUnlessReallyEqual(statuscode, str(http.FOUND))
2008 self.failUnlessReallyEqual(target, redir_url)
2009 d.addCallback(lambda res:
2010 self.shouldRedirect2("test_POST_FILEURL_check_and_repair",
2013 t="check", repair="true",
2014 when_done=redir_url))
2015 d.addCallback(lambda res:
2016 self.POST(bar_url, t="check", return_to=redir_url))
2018 self.failUnless("Healthy :" in res)
2019 self.failUnless("Return to file" in res)
2020 self.failUnless(redir_url in res)
2021 d.addCallback(_check3)
2024 def test_POST_DIRURL_check(self):
2025 foo_url = self.public_url + "/foo/"
2026 d = self.POST(foo_url, t="check")
2028 self.failUnless("Healthy :" in res, res)
2029 d.addCallback(_check)
2030 redir_url = "http://allmydata.org/TARGET"
2031 def _check2(statuscode, target):
2032 self.failUnlessReallyEqual(statuscode, str(http.FOUND))
2033 self.failUnlessReallyEqual(target, redir_url)
2034 d.addCallback(lambda res:
2035 self.shouldRedirect2("test_POST_DIRURL_check",
2039 when_done=redir_url))
2040 d.addCallback(lambda res:
2041 self.POST(foo_url, t="check", return_to=redir_url))
2043 self.failUnless("Healthy :" in res, res)
2044 self.failUnless("Return to file/directory" in res)
2045 self.failUnless(redir_url in res)
2046 d.addCallback(_check3)
2048 d.addCallback(lambda res:
2049 self.POST(foo_url, t="check", output="JSON"))
2050 def _check_json(res):
2051 data = simplejson.loads(res)
2052 self.failUnless("storage-index" in data)
2053 self.failUnless(data["results"]["healthy"])
2054 d.addCallback(_check_json)
2058 def test_POST_DIRURL_check_and_repair(self):
2059 foo_url = self.public_url + "/foo/"
2060 d = self.POST(foo_url, t="check", repair="true")
2062 self.failUnless("Healthy :" in res, res)
2063 d.addCallback(_check)
2064 redir_url = "http://allmydata.org/TARGET"
2065 def _check2(statuscode, target):
2066 self.failUnlessReallyEqual(statuscode, str(http.FOUND))
2067 self.failUnlessReallyEqual(target, redir_url)
2068 d.addCallback(lambda res:
2069 self.shouldRedirect2("test_POST_DIRURL_check_and_repair",
2072 t="check", repair="true",
2073 when_done=redir_url))
2074 d.addCallback(lambda res:
2075 self.POST(foo_url, t="check", return_to=redir_url))
2077 self.failUnless("Healthy :" in res)
2078 self.failUnless("Return to file/directory" in res)
2079 self.failUnless(redir_url in res)
2080 d.addCallback(_check3)
2083 def wait_for_operation(self, ignored, ophandle):
2084 url = "/operations/" + ophandle
2085 url += "?t=status&output=JSON"
2088 data = simplejson.loads(res)
2089 if not data["finished"]:
2090 d = self.stall(delay=1.0)
2091 d.addCallback(self.wait_for_operation, ophandle)
2097 def get_operation_results(self, ignored, ophandle, output=None):
2098 url = "/operations/" + ophandle
2101 url += "&output=" + output
2104 if output and output.lower() == "json":
2105 return simplejson.loads(res)
2110 def test_POST_DIRURL_deepcheck_no_ophandle(self):
2111 d = self.shouldFail2(error.Error,
2112 "test_POST_DIRURL_deepcheck_no_ophandle",
2114 "slow operation requires ophandle=",
2115 self.POST, self.public_url, t="start-deep-check")
2118 def test_POST_DIRURL_deepcheck(self):
2119 def _check_redirect(statuscode, target):
2120 self.failUnlessReallyEqual(statuscode, str(http.FOUND))
2121 self.failUnless(target.endswith("/operations/123"))
2122 d = self.shouldRedirect2("test_POST_DIRURL_deepcheck", _check_redirect,
2123 self.POST, self.public_url,
2124 t="start-deep-check", ophandle="123")
2125 d.addCallback(self.wait_for_operation, "123")
2126 def _check_json(data):
2127 self.failUnlessReallyEqual(data["finished"], True)
2128 self.failUnlessReallyEqual(data["count-objects-checked"], 8)
2129 self.failUnlessReallyEqual(data["count-objects-healthy"], 8)
2130 d.addCallback(_check_json)
2131 d.addCallback(self.get_operation_results, "123", "html")
2132 def _check_html(res):
2133 self.failUnless("Objects Checked: <span>8</span>" in res)
2134 self.failUnless("Objects Healthy: <span>8</span>" in res)
2135 d.addCallback(_check_html)
2137 d.addCallback(lambda res:
2138 self.GET("/operations/123/"))
2139 d.addCallback(_check_html) # should be the same as without the slash
2141 d.addCallback(lambda res:
2142 self.shouldFail2(error.Error, "one", "404 Not Found",
2143 "No detailed results for SI bogus",
2144 self.GET, "/operations/123/bogus"))
2146 foo_si = self._foo_node.get_storage_index()
2147 foo_si_s = base32.b2a(foo_si)
2148 d.addCallback(lambda res:
2149 self.GET("/operations/123/%s?output=JSON" % foo_si_s))
2150 def _check_foo_json(res):
2151 data = simplejson.loads(res)
2152 self.failUnlessEqual(data["storage-index"], foo_si_s)
2153 self.failUnless(data["results"]["healthy"])
2154 d.addCallback(_check_foo_json)
2157 def test_POST_DIRURL_deepcheck_and_repair(self):
2158 d = self.POST(self.public_url, t="start-deep-check", repair="true",
2159 ophandle="124", output="json", followRedirect=True)
2160 d.addCallback(self.wait_for_operation, "124")
2161 def _check_json(data):
2162 self.failUnlessReallyEqual(data["finished"], True)
2163 self.failUnlessReallyEqual(data["count-objects-checked"], 8)
2164 self.failUnlessReallyEqual(data["count-objects-healthy-pre-repair"], 8)
2165 self.failUnlessReallyEqual(data["count-objects-unhealthy-pre-repair"], 0)
2166 self.failUnlessReallyEqual(data["count-corrupt-shares-pre-repair"], 0)
2167 self.failUnlessReallyEqual(data["count-repairs-attempted"], 0)
2168 self.failUnlessReallyEqual(data["count-repairs-successful"], 0)
2169 self.failUnlessReallyEqual(data["count-repairs-unsuccessful"], 0)
2170 self.failUnlessReallyEqual(data["count-objects-healthy-post-repair"], 8)
2171 self.failUnlessReallyEqual(data["count-objects-unhealthy-post-repair"], 0)
2172 self.failUnlessReallyEqual(data["count-corrupt-shares-post-repair"], 0)
2173 d.addCallback(_check_json)
2174 d.addCallback(self.get_operation_results, "124", "html")
2175 def _check_html(res):
2176 self.failUnless("Objects Checked: <span>8</span>" in res)
2178 self.failUnless("Objects Healthy (before repair): <span>8</span>" in res)
2179 self.failUnless("Objects Unhealthy (before repair): <span>0</span>" in res)
2180 self.failUnless("Corrupt Shares (before repair): <span>0</span>" in res)
2182 self.failUnless("Repairs Attempted: <span>0</span>" in res)
2183 self.failUnless("Repairs Successful: <span>0</span>" in res)
2184 self.failUnless("Repairs Unsuccessful: <span>0</span>" in res)
2186 self.failUnless("Objects Healthy (after repair): <span>8</span>" in res)
2187 self.failUnless("Objects Unhealthy (after repair): <span>0</span>" in res)
2188 self.failUnless("Corrupt Shares (after repair): <span>0</span>" in res)
2189 d.addCallback(_check_html)
2192 def test_POST_FILEURL_bad_t(self):
2193 d = self.shouldFail2(error.Error, "POST_bad_t", "400 Bad Request",
2194 "POST to file: bad t=bogus",
2195 self.POST, self.public_url + "/foo/bar.txt",
2199 def test_POST_mkdir(self): # return value?
2200 d = self.POST(self.public_url + "/foo", t="mkdir", name="newdir")
2201 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
2202 d.addCallback(self.failUnlessNodeKeysAre, [])
2205 def test_POST_mkdir_initial_children(self):
2206 (newkids, caps) = self._create_initial_children()
2207 d = self.POST2(self.public_url +
2208 "/foo?t=mkdir-with-children&name=newdir",
2209 simplejson.dumps(newkids))
2210 d.addCallback(lambda res:
2211 self.failUnlessNodeHasChild(self._foo_node, u"newdir"))
2212 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
2213 d.addCallback(self.failUnlessNodeKeysAre, newkids.keys())
2214 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
2215 d.addCallback(self.failUnlessROChildURIIs, u"child-imm", caps['filecap1'])
2218 def test_POST_mkdir_immutable(self):
2219 (newkids, caps) = self._create_immutable_children()
2220 d = self.POST2(self.public_url +
2221 "/foo?t=mkdir-immutable&name=newdir",
2222 simplejson.dumps(newkids))
2223 d.addCallback(lambda res:
2224 self.failUnlessNodeHasChild(self._foo_node, u"newdir"))
2225 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
2226 d.addCallback(self.failUnlessNodeKeysAre, newkids.keys())
2227 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
2228 d.addCallback(self.failUnlessROChildURIIs, u"child-imm", caps['filecap1'])
2229 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
2230 d.addCallback(self.failUnlessROChildURIIs, u"unknownchild-imm", caps['unknown_immcap'])
2231 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
2232 d.addCallback(self.failUnlessROChildURIIs, u"dirchild-imm", caps['immdircap'])
2233 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
2234 d.addCallback(self.failUnlessROChildURIIs, u"dirchild-lit", caps['litdircap'])
2235 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
2236 d.addCallback(self.failUnlessROChildURIIs, u"dirchild-empty", caps['emptydircap'])
2239 def test_POST_mkdir_immutable_bad(self):
2240 (newkids, caps) = self._create_initial_children()
2241 d = self.shouldFail2(error.Error, "test_POST_mkdir_immutable_bad",
2243 "needed to be immutable but was not",
2246 "/foo?t=mkdir-immutable&name=newdir",
2247 simplejson.dumps(newkids))
2250 def test_POST_mkdir_2(self):
2251 d = self.POST(self.public_url + "/foo/newdir?t=mkdir", "")
2252 d.addCallback(lambda res:
2253 self.failUnlessNodeHasChild(self._foo_node, u"newdir"))
2254 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
2255 d.addCallback(self.failUnlessNodeKeysAre, [])
2258 def test_POST_mkdirs_2(self):
2259 d = self.POST(self.public_url + "/foo/bardir/newdir?t=mkdir", "")
2260 d.addCallback(lambda res:
2261 self.failUnlessNodeHasChild(self._foo_node, u"bardir"))
2262 d.addCallback(lambda res: self._foo_node.get(u"bardir"))
2263 d.addCallback(lambda bardirnode: bardirnode.get(u"newdir"))
2264 d.addCallback(self.failUnlessNodeKeysAre, [])
2267 def test_POST_mkdir_no_parentdir_noredirect(self):
2268 d = self.POST("/uri?t=mkdir")
2269 def _after_mkdir(res):
2270 uri.DirectoryURI.init_from_string(res)
2271 d.addCallback(_after_mkdir)
2274 def test_POST_mkdir_no_parentdir_noredirect2(self):
2275 # make sure form-based arguments (as on the welcome page) still work
2276 d = self.POST("/uri", t="mkdir")
2277 def _after_mkdir(res):
2278 uri.DirectoryURI.init_from_string(res)
2279 d.addCallback(_after_mkdir)
2280 d.addErrback(self.explain_web_error)
2283 def test_POST_mkdir_no_parentdir_redirect(self):
2284 d = self.POST("/uri?t=mkdir&redirect_to_result=true")
2285 d.addBoth(self.shouldRedirect, None, statuscode='303')
2286 def _check_target(target):
2287 target = urllib.unquote(target)
2288 self.failUnless(target.startswith("uri/URI:DIR2:"), target)
2289 d.addCallback(_check_target)
2292 def test_POST_mkdir_no_parentdir_redirect2(self):
2293 d = self.POST("/uri", t="mkdir", redirect_to_result="true")
2294 d.addBoth(self.shouldRedirect, None, statuscode='303')
2295 def _check_target(target):
2296 target = urllib.unquote(target)
2297 self.failUnless(target.startswith("uri/URI:DIR2:"), target)
2298 d.addCallback(_check_target)
2299 d.addErrback(self.explain_web_error)
2302 def _make_readonly(self, u):
2303 ro_uri = uri.from_string(u).get_readonly()
2306 return ro_uri.to_string()
2308 def _create_initial_children(self):
2309 contents, n, filecap1 = self.makefile(12)
2310 md1 = {"metakey1": "metavalue1"}
2311 filecap2 = make_mutable_file_uri()
2312 node3 = self.s.create_node_from_uri(make_mutable_file_uri())
2313 filecap3 = node3.get_readonly_uri()
2314 node4 = self.s.create_node_from_uri(make_mutable_file_uri())
2315 dircap = DirectoryNode(node4, None, None).get_uri()
2316 litdircap = "URI:DIR2-LIT:ge3dumj2mewdcotyfqydulbshj5x2lbm"
2317 emptydircap = "URI:DIR2-LIT:"
2318 newkids = {u"child-imm": ["filenode", {"rw_uri": filecap1,
2319 "ro_uri": self._make_readonly(filecap1),
2320 "metadata": md1, }],
2321 u"child-mutable": ["filenode", {"rw_uri": filecap2,
2322 "ro_uri": self._make_readonly(filecap2)}],
2323 u"child-mutable-ro": ["filenode", {"ro_uri": filecap3}],
2324 u"unknownchild-rw": ["unknown", {"rw_uri": unknown_rwcap,
2325 "ro_uri": unknown_rocap}],
2326 u"unknownchild-ro": ["unknown", {"ro_uri": unknown_rocap}],
2327 u"unknownchild-imm": ["unknown", {"ro_uri": unknown_immcap}],
2328 u"dirchild": ["dirnode", {"rw_uri": dircap,
2329 "ro_uri": self._make_readonly(dircap)}],
2330 u"dirchild-lit": ["dirnode", {"ro_uri": litdircap}],
2331 u"dirchild-empty": ["dirnode", {"ro_uri": emptydircap}],
2333 return newkids, {'filecap1': filecap1,
2334 'filecap2': filecap2,
2335 'filecap3': filecap3,
2336 'unknown_rwcap': unknown_rwcap,
2337 'unknown_rocap': unknown_rocap,
2338 'unknown_immcap': unknown_immcap,
2340 'litdircap': litdircap,
2341 'emptydircap': emptydircap}
2343 def _create_immutable_children(self):
2344 contents, n, filecap1 = self.makefile(12)
2345 md1 = {"metakey1": "metavalue1"}
2346 tnode = create_chk_filenode("immutable directory contents\n"*10)
2347 dnode = DirectoryNode(tnode, None, None)
2348 assert not dnode.is_mutable()
2349 immdircap = dnode.get_uri()
2350 litdircap = "URI:DIR2-LIT:ge3dumj2mewdcotyfqydulbshj5x2lbm"
2351 emptydircap = "URI:DIR2-LIT:"
2352 newkids = {u"child-imm": ["filenode", {"ro_uri": filecap1,
2353 "metadata": md1, }],
2354 u"unknownchild-imm": ["unknown", {"ro_uri": unknown_immcap}],
2355 u"dirchild-imm": ["dirnode", {"ro_uri": immdircap}],
2356 u"dirchild-lit": ["dirnode", {"ro_uri": litdircap}],
2357 u"dirchild-empty": ["dirnode", {"ro_uri": emptydircap}],
2359 return newkids, {'filecap1': filecap1,
2360 'unknown_immcap': unknown_immcap,
2361 'immdircap': immdircap,
2362 'litdircap': litdircap,
2363 'emptydircap': emptydircap}
2365 def test_POST_mkdir_no_parentdir_initial_children(self):
2366 (newkids, caps) = self._create_initial_children()
2367 d = self.POST2("/uri?t=mkdir-with-children", simplejson.dumps(newkids))
2368 def _after_mkdir(res):
2369 self.failUnless(res.startswith("URI:DIR"), res)
2370 n = self.s.create_node_from_uri(res)
2371 d2 = self.failUnlessNodeKeysAre(n, newkids.keys())
2372 d2.addCallback(lambda ign:
2373 self.failUnlessROChildURIIs(n, u"child-imm",
2375 d2.addCallback(lambda ign:
2376 self.failUnlessRWChildURIIs(n, u"child-mutable",
2378 d2.addCallback(lambda ign:
2379 self.failUnlessROChildURIIs(n, u"child-mutable-ro",
2381 d2.addCallback(lambda ign:
2382 self.failUnlessRWChildURIIs(n, u"unknownchild-rw",
2383 caps['unknown_rwcap']))
2384 d2.addCallback(lambda ign:
2385 self.failUnlessROChildURIIs(n, u"unknownchild-ro",
2386 caps['unknown_rocap']))
2387 d2.addCallback(lambda ign:
2388 self.failUnlessROChildURIIs(n, u"unknownchild-imm",
2389 caps['unknown_immcap']))
2390 d2.addCallback(lambda ign:
2391 self.failUnlessRWChildURIIs(n, u"dirchild",
2394 d.addCallback(_after_mkdir)
2397 def test_POST_mkdir_no_parentdir_unexpected_children(self):
2398 # the regular /uri?t=mkdir operation is specified to ignore its body.
2399 # Only t=mkdir-with-children pays attention to it.
2400 (newkids, caps) = self._create_initial_children()
2401 d = self.shouldHTTPError("POST t=mkdir unexpected children",
2403 "t=mkdir does not accept children=, "
2404 "try t=mkdir-with-children instead",
2405 self.POST2, "/uri?t=mkdir", # without children
2406 simplejson.dumps(newkids))
2409 def test_POST_noparent_bad(self):
2410 d = self.shouldHTTPError("POST /uri?t=bogus", 400, "Bad Request",
2411 "/uri accepts only PUT, PUT?t=mkdir, "
2412 "POST?t=upload, and POST?t=mkdir",
2413 self.POST, "/uri?t=bogus")
2416 def test_POST_mkdir_no_parentdir_immutable(self):
2417 (newkids, caps) = self._create_immutable_children()
2418 d = self.POST2("/uri?t=mkdir-immutable", simplejson.dumps(newkids))
2419 def _after_mkdir(res):
2420 self.failUnless(res.startswith("URI:DIR"), res)
2421 n = self.s.create_node_from_uri(res)
2422 d2 = self.failUnlessNodeKeysAre(n, newkids.keys())
2423 d2.addCallback(lambda ign:
2424 self.failUnlessROChildURIIs(n, u"child-imm",
2426 d2.addCallback(lambda ign:
2427 self.failUnlessROChildURIIs(n, u"unknownchild-imm",
2428 caps['unknown_immcap']))
2429 d2.addCallback(lambda ign:
2430 self.failUnlessROChildURIIs(n, u"dirchild-imm",
2432 d2.addCallback(lambda ign:
2433 self.failUnlessROChildURIIs(n, u"dirchild-lit",
2435 d2.addCallback(lambda ign:
2436 self.failUnlessROChildURIIs(n, u"dirchild-empty",
2437 caps['emptydircap']))
2439 d.addCallback(_after_mkdir)
2442 def test_POST_mkdir_no_parentdir_immutable_bad(self):
2443 (newkids, caps) = self._create_initial_children()
2444 d = self.shouldFail2(error.Error,
2445 "test_POST_mkdir_no_parentdir_immutable_bad",
2447 "needed to be immutable but was not",
2449 "/uri?t=mkdir-immutable",
2450 simplejson.dumps(newkids))
2453 def test_welcome_page_mkdir_button(self):
2454 # Fetch the welcome page.
2456 def _after_get_welcome_page(res):
2457 MKDIR_BUTTON_RE = re.compile(
2458 '<form action="([^"]*)" method="post".*?'
2459 '<input type="hidden" name="t" value="([^"]*)" />'
2460 '<input type="hidden" name="([^"]*)" value="([^"]*)" />'
2461 '<input type="submit" value="Create a directory" />',
2463 mo = MKDIR_BUTTON_RE.search(res)
2464 formaction = mo.group(1)
2466 formaname = mo.group(3)
2467 formavalue = mo.group(4)
2468 return (formaction, formt, formaname, formavalue)
2469 d.addCallback(_after_get_welcome_page)
2470 def _after_parse_form(res):
2471 (formaction, formt, formaname, formavalue) = res
2472 return self.POST("/%s?t=%s&%s=%s" % (formaction, formt, formaname, formavalue))
2473 d.addCallback(_after_parse_form)
2474 d.addBoth(self.shouldRedirect, None, statuscode='303')
2477 def test_POST_mkdir_replace(self): # return value?
2478 d = self.POST(self.public_url + "/foo", t="mkdir", name="sub")
2479 d.addCallback(lambda res: self._foo_node.get(u"sub"))
2480 d.addCallback(self.failUnlessNodeKeysAre, [])
2483 def test_POST_mkdir_no_replace_queryarg(self): # return value?
2484 d = self.POST(self.public_url + "/foo?replace=false", t="mkdir", name="sub")
2485 d.addBoth(self.shouldFail, error.Error,
2486 "POST_mkdir_no_replace_queryarg",
2488 "There was already a child by that name, and you asked me "
2489 "to not replace it")
2490 d.addCallback(lambda res: self._foo_node.get(u"sub"))
2491 d.addCallback(self.failUnlessNodeKeysAre, [u"baz.txt"])
2494 def test_POST_mkdir_no_replace_field(self): # return value?
2495 d = self.POST(self.public_url + "/foo", t="mkdir", name="sub",
2497 d.addBoth(self.shouldFail, error.Error, "POST_mkdir_no_replace_field",
2499 "There was already a child by that name, and you asked me "
2500 "to not replace it")
2501 d.addCallback(lambda res: self._foo_node.get(u"sub"))
2502 d.addCallback(self.failUnlessNodeKeysAre, [u"baz.txt"])
2505 def test_POST_mkdir_whendone_field(self):
2506 d = self.POST(self.public_url + "/foo",
2507 t="mkdir", name="newdir", when_done="/THERE")
2508 d.addBoth(self.shouldRedirect, "/THERE")
2509 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
2510 d.addCallback(self.failUnlessNodeKeysAre, [])
2513 def test_POST_mkdir_whendone_queryarg(self):
2514 d = self.POST(self.public_url + "/foo?when_done=/THERE",
2515 t="mkdir", name="newdir")
2516 d.addBoth(self.shouldRedirect, "/THERE")
2517 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
2518 d.addCallback(self.failUnlessNodeKeysAre, [])
2521 def test_POST_bad_t(self):
2522 d = self.shouldFail2(error.Error, "POST_bad_t", "400 Bad Request",
2523 "POST to a directory with bad t=BOGUS",
2524 self.POST, self.public_url + "/foo", t="BOGUS")
2527 def test_POST_set_children(self, command_name="set_children"):
2528 contents9, n9, newuri9 = self.makefile(9)
2529 contents10, n10, newuri10 = self.makefile(10)
2530 contents11, n11, newuri11 = self.makefile(11)
2533 "atomic_added_1": [ "filenode", { "rw_uri": "%s",
2536 "ctime": 1002777696.7564139,
2537 "mtime": 1002777696.7564139
2540 "atomic_added_2": [ "filenode", { "rw_uri": "%s",
2543 "ctime": 1002777696.7564139,
2544 "mtime": 1002777696.7564139
2547 "atomic_added_3": [ "filenode", { "rw_uri": "%s",
2550 "ctime": 1002777696.7564139,
2551 "mtime": 1002777696.7564139
2554 }""" % (newuri9, newuri10, newuri11)
2556 url = self.webish_url + self.public_url + "/foo" + "?t=" + command_name
2558 d = client.getPage(url, method="POST", postdata=reqbody)
2560 self.failUnlessURIMatchesROChild(newuri9, self._foo_node, u"atomic_added_1")
2561 self.failUnlessURIMatchesROChild(newuri10, self._foo_node, u"atomic_added_2")
2562 self.failUnlessURIMatchesROChild(newuri11, self._foo_node, u"atomic_added_3")
2564 d.addCallback(_then)
2565 d.addErrback(self.dump_error)
2568 def test_POST_set_children_with_hyphen(self):
2569 return self.test_POST_set_children(command_name="set-children")
2571 def test_POST_link_uri(self):
2572 contents, n, newuri = self.makefile(8)
2573 d = self.POST(self.public_url + "/foo", t="uri", name="new.txt", uri=newuri)
2574 d.addCallback(self.failUnlessURIMatchesROChild, self._foo_node, u"new.txt")
2575 d.addCallback(lambda res:
2576 self.failUnlessChildContentsAre(self._foo_node, u"new.txt",
2580 def test_POST_link_uri_replace(self):
2581 contents, n, newuri = self.makefile(8)
2582 d = self.POST(self.public_url + "/foo", t="uri", name="bar.txt", uri=newuri)
2583 d.addCallback(self.failUnlessURIMatchesROChild, self._foo_node, u"bar.txt")
2584 d.addCallback(lambda res:
2585 self.failUnlessChildContentsAre(self._foo_node, u"bar.txt",
2589 def test_POST_link_uri_unknown_bad(self):
2590 d = self.POST(self.public_url + "/foo", t="uri", name="future.txt", uri=unknown_rwcap)
2591 d.addBoth(self.shouldFail, error.Error,
2592 "POST_link_uri_unknown_bad",
2594 "unknown cap in a write slot")
2597 def test_POST_link_uri_unknown_ro_good(self):
2598 d = self.POST(self.public_url + "/foo", t="uri", name="future-ro.txt", uri=unknown_rocap)
2599 d.addCallback(self.failUnlessURIMatchesROChild, self._foo_node, u"future-ro.txt")
2602 def test_POST_link_uri_unknown_imm_good(self):
2603 d = self.POST(self.public_url + "/foo", t="uri", name="future-imm.txt", uri=unknown_immcap)
2604 d.addCallback(self.failUnlessURIMatchesROChild, self._foo_node, u"future-imm.txt")
2607 def test_POST_link_uri_no_replace_queryarg(self):
2608 contents, n, newuri = self.makefile(8)
2609 d = self.POST(self.public_url + "/foo?replace=false", t="uri",
2610 name="bar.txt", uri=newuri)
2611 d.addBoth(self.shouldFail, error.Error,
2612 "POST_link_uri_no_replace_queryarg",
2614 "There was already a child by that name, and you asked me "
2615 "to not replace it")
2616 d.addCallback(lambda res: self.GET(self.public_url + "/foo/bar.txt"))
2617 d.addCallback(self.failUnlessIsBarDotTxt)
2620 def test_POST_link_uri_no_replace_field(self):
2621 contents, n, newuri = self.makefile(8)
2622 d = self.POST(self.public_url + "/foo", t="uri", replace="false",
2623 name="bar.txt", uri=newuri)
2624 d.addBoth(self.shouldFail, error.Error,
2625 "POST_link_uri_no_replace_field",
2627 "There was already a child by that name, and you asked me "
2628 "to not replace it")
2629 d.addCallback(lambda res: self.GET(self.public_url + "/foo/bar.txt"))
2630 d.addCallback(self.failUnlessIsBarDotTxt)
2633 def test_POST_delete(self):
2634 d = self.POST(self.public_url + "/foo", t="delete", name="bar.txt")
2635 d.addCallback(lambda res: self._foo_node.list())
2636 def _check(children):
2637 self.failIf(u"bar.txt" in children)
2638 d.addCallback(_check)
2641 def test_POST_rename_file(self):
2642 d = self.POST(self.public_url + "/foo", t="rename",
2643 from_name="bar.txt", to_name='wibble.txt')
2644 d.addCallback(lambda res:
2645 self.failIfNodeHasChild(self._foo_node, u"bar.txt"))
2646 d.addCallback(lambda res:
2647 self.failUnlessNodeHasChild(self._foo_node, u"wibble.txt"))
2648 d.addCallback(lambda res: self.GET(self.public_url + "/foo/wibble.txt"))
2649 d.addCallback(self.failUnlessIsBarDotTxt)
2650 d.addCallback(lambda res: self.GET(self.public_url + "/foo/wibble.txt?t=json"))
2651 d.addCallback(self.failUnlessIsBarJSON)
2654 def test_POST_rename_file_redundant(self):
2655 d = self.POST(self.public_url + "/foo", t="rename",
2656 from_name="bar.txt", to_name='bar.txt')
2657 d.addCallback(lambda res:
2658 self.failUnlessNodeHasChild(self._foo_node, u"bar.txt"))
2659 d.addCallback(lambda res: self.GET(self.public_url + "/foo/bar.txt"))
2660 d.addCallback(self.failUnlessIsBarDotTxt)
2661 d.addCallback(lambda res: self.GET(self.public_url + "/foo/bar.txt?t=json"))
2662 d.addCallback(self.failUnlessIsBarJSON)
2665 def test_POST_rename_file_replace(self):
2666 # rename a file and replace a directory with it
2667 d = self.POST(self.public_url + "/foo", t="rename",
2668 from_name="bar.txt", to_name='empty')
2669 d.addCallback(lambda res:
2670 self.failIfNodeHasChild(self._foo_node, u"bar.txt"))
2671 d.addCallback(lambda res:
2672 self.failUnlessNodeHasChild(self._foo_node, u"empty"))
2673 d.addCallback(lambda res: self.GET(self.public_url + "/foo/empty"))
2674 d.addCallback(self.failUnlessIsBarDotTxt)
2675 d.addCallback(lambda res: self.GET(self.public_url + "/foo/empty?t=json"))
2676 d.addCallback(self.failUnlessIsBarJSON)
2679 def test_POST_rename_file_no_replace_queryarg(self):
2680 # rename a file and replace a directory with it
2681 d = self.POST(self.public_url + "/foo?replace=false", t="rename",
2682 from_name="bar.txt", to_name='empty')
2683 d.addBoth(self.shouldFail, error.Error,
2684 "POST_rename_file_no_replace_queryarg",
2686 "There was already a child by that name, and you asked me "
2687 "to not replace it")
2688 d.addCallback(lambda res: self.GET(self.public_url + "/foo/empty?t=json"))
2689 d.addCallback(self.failUnlessIsEmptyJSON)
2692 def test_POST_rename_file_no_replace_field(self):
2693 # rename a file and replace a directory with it
2694 d = self.POST(self.public_url + "/foo", t="rename", replace="false",
2695 from_name="bar.txt", to_name='empty')
2696 d.addBoth(self.shouldFail, error.Error,
2697 "POST_rename_file_no_replace_field",
2699 "There was already a child by that name, and you asked me "
2700 "to not replace it")
2701 d.addCallback(lambda res: self.GET(self.public_url + "/foo/empty?t=json"))
2702 d.addCallback(self.failUnlessIsEmptyJSON)
2705 def failUnlessIsEmptyJSON(self, res):
2706 data = simplejson.loads(res)
2707 self.failUnlessEqual(data[0], "dirnode", data)
2708 self.failUnlessReallyEqual(len(data[1]["children"]), 0)
2710 def test_POST_rename_file_slash_fail(self):
2711 d = self.POST(self.public_url + "/foo", t="rename",
2712 from_name="bar.txt", to_name='kirk/spock.txt')
2713 d.addBoth(self.shouldFail, error.Error,
2714 "test_POST_rename_file_slash_fail",
2716 "to_name= may not contain a slash",
2718 d.addCallback(lambda res:
2719 self.failUnlessNodeHasChild(self._foo_node, u"bar.txt"))
2722 def test_POST_rename_dir(self):
2723 d = self.POST(self.public_url, t="rename",
2724 from_name="foo", to_name='plunk')
2725 d.addCallback(lambda res:
2726 self.failIfNodeHasChild(self.public_root, u"foo"))
2727 d.addCallback(lambda res:
2728 self.failUnlessNodeHasChild(self.public_root, u"plunk"))
2729 d.addCallback(lambda res: self.GET(self.public_url + "/plunk?t=json"))
2730 d.addCallback(self.failUnlessIsFooJSON)
2733 def shouldRedirect(self, res, target=None, statuscode=None, which=""):
2734 """ If target is not None then the redirection has to go to target. If
2735 statuscode is not None then the redirection has to be accomplished with
2736 that HTTP status code."""
2737 if not isinstance(res, failure.Failure):
2738 to_where = (target is None) and "somewhere" or ("to " + target)
2739 self.fail("%s: we were expecting to get redirected %s, not get an"
2740 " actual page: %s" % (which, to_where, res))
2741 res.trap(error.PageRedirect)
2742 if statuscode is not None:
2743 self.failUnlessReallyEqual(res.value.status, statuscode,
2744 "%s: not a redirect" % which)
2745 if target is not None:
2746 # the PageRedirect does not seem to capture the uri= query arg
2747 # properly, so we can't check for it.
2748 realtarget = self.webish_url + target
2749 self.failUnlessReallyEqual(res.value.location, realtarget,
2750 "%s: wrong target" % which)
2751 return res.value.location
2753 def test_GET_URI_form(self):
2754 base = "/uri?uri=%s" % self._bar_txt_uri
2755 # this is supposed to give us a redirect to /uri/$URI, plus arguments
2756 targetbase = "/uri/%s" % urllib.quote(self._bar_txt_uri)
2758 d.addBoth(self.shouldRedirect, targetbase)
2759 d.addCallback(lambda res: self.GET(base+"&filename=bar.txt"))
2760 d.addBoth(self.shouldRedirect, targetbase+"?filename=bar.txt")
2761 d.addCallback(lambda res: self.GET(base+"&t=json"))
2762 d.addBoth(self.shouldRedirect, targetbase+"?t=json")
2763 d.addCallback(self.log, "about to get file by uri")
2764 d.addCallback(lambda res: self.GET(base, followRedirect=True))
2765 d.addCallback(self.failUnlessIsBarDotTxt)
2766 d.addCallback(self.log, "got file by uri, about to get dir by uri")
2767 d.addCallback(lambda res: self.GET("/uri?uri=%s&t=json" % self._foo_uri,
2768 followRedirect=True))
2769 d.addCallback(self.failUnlessIsFooJSON)
2770 d.addCallback(self.log, "got dir by uri")
2774 def test_GET_URI_form_bad(self):
2775 d = self.shouldFail2(error.Error, "test_GET_URI_form_bad",
2776 "400 Bad Request", "GET /uri requires uri=",
2780 def test_GET_rename_form(self):
2781 d = self.GET(self.public_url + "/foo?t=rename-form&name=bar.txt",
2782 followRedirect=True)
2784 self.failUnless('name="when_done" value="."' in res, res)
2785 self.failUnless(re.search(r'name="from_name" value="bar\.txt"', res))
2786 d.addCallback(_check)
2789 def log(self, res, msg):
2790 #print "MSG: %s RES: %s" % (msg, res)
2794 def test_GET_URI_URL(self):
2795 base = "/uri/%s" % self._bar_txt_uri
2797 d.addCallback(self.failUnlessIsBarDotTxt)
2798 d.addCallback(lambda res: self.GET(base+"?filename=bar.txt"))
2799 d.addCallback(self.failUnlessIsBarDotTxt)
2800 d.addCallback(lambda res: self.GET(base+"?filename=bar.txt&save=true"))
2801 d.addCallback(self.failUnlessIsBarDotTxt)
2804 def test_GET_URI_URL_dir(self):
2805 base = "/uri/%s?t=json" % self._foo_uri
2807 d.addCallback(self.failUnlessIsFooJSON)
2810 def test_GET_URI_URL_missing(self):
2811 base = "/uri/%s" % self._bad_file_uri
2812 d = self.shouldHTTPError("test_GET_URI_URL_missing",
2813 http.GONE, None, "NotEnoughSharesError",
2815 # TODO: how can we exercise both sides of WebDownloadTarget.fail
2816 # here? we must arrange for a download to fail after target.open()
2817 # has been called, and then inspect the response to see that it is
2818 # shorter than we expected.
2821 def test_PUT_DIRURL_uri(self):
2822 d = self.s.create_dirnode()
2824 new_uri = dn.get_uri()
2825 # replace /foo with a new (empty) directory
2826 d = self.PUT(self.public_url + "/foo?t=uri", new_uri)
2827 d.addCallback(lambda res:
2828 self.failUnlessReallyEqual(res.strip(), new_uri))
2829 d.addCallback(lambda res:
2830 self.failUnlessRWChildURIIs(self.public_root,
2834 d.addCallback(_made_dir)
2837 def test_PUT_DIRURL_uri_noreplace(self):
2838 d = self.s.create_dirnode()
2840 new_uri = dn.get_uri()
2841 # replace /foo with a new (empty) directory, but ask that
2842 # replace=false, so it should fail
2843 d = self.shouldFail2(error.Error, "test_PUT_DIRURL_uri_noreplace",
2844 "409 Conflict", "There was already a child by that name, and you asked me to not replace it",
2846 self.public_url + "/foo?t=uri&replace=false",
2848 d.addCallback(lambda res:
2849 self.failUnlessRWChildURIIs(self.public_root,
2853 d.addCallback(_made_dir)
2856 def test_PUT_DIRURL_bad_t(self):
2857 d = self.shouldFail2(error.Error, "test_PUT_DIRURL_bad_t",
2858 "400 Bad Request", "PUT to a directory",
2859 self.PUT, self.public_url + "/foo?t=BOGUS", "")
2860 d.addCallback(lambda res:
2861 self.failUnlessRWChildURIIs(self.public_root,
2866 def test_PUT_NEWFILEURL_uri(self):
2867 contents, n, new_uri = self.makefile(8)
2868 d = self.PUT(self.public_url + "/foo/new.txt?t=uri", new_uri)
2869 d.addCallback(lambda res: self.failUnlessReallyEqual(res.strip(), new_uri))
2870 d.addCallback(lambda res:
2871 self.failUnlessChildContentsAre(self._foo_node, u"new.txt",
2875 def test_PUT_NEWFILEURL_uri_replace(self):
2876 contents, n, new_uri = self.makefile(8)
2877 d = self.PUT(self.public_url + "/foo/bar.txt?t=uri", new_uri)
2878 d.addCallback(lambda res: self.failUnlessReallyEqual(res.strip(), new_uri))
2879 d.addCallback(lambda res:
2880 self.failUnlessChildContentsAre(self._foo_node, u"bar.txt",
2884 def test_PUT_NEWFILEURL_uri_no_replace(self):
2885 contents, n, new_uri = self.makefile(8)
2886 d = self.PUT(self.public_url + "/foo/bar.txt?t=uri&replace=false", new_uri)
2887 d.addBoth(self.shouldFail, error.Error, "PUT_NEWFILEURL_uri_no_replace",
2889 "There was already a child by that name, and you asked me "
2890 "to not replace it")
2893 def test_PUT_NEWFILEURL_uri_unknown_bad(self):
2894 d = self.PUT(self.public_url + "/foo/put-future.txt?t=uri", unknown_rwcap)
2895 d.addBoth(self.shouldFail, error.Error,
2896 "POST_put_uri_unknown_bad",
2898 "unknown cap in a write slot")
2901 def test_PUT_NEWFILEURL_uri_unknown_ro_good(self):
2902 d = self.PUT(self.public_url + "/foo/put-future-ro.txt?t=uri", unknown_rocap)
2903 d.addCallback(self.failUnlessURIMatchesROChild, self._foo_node,
2904 u"put-future-ro.txt")
2907 def test_PUT_NEWFILEURL_uri_unknown_imm_good(self):
2908 d = self.PUT(self.public_url + "/foo/put-future-imm.txt?t=uri", unknown_immcap)
2909 d.addCallback(self.failUnlessURIMatchesROChild, self._foo_node,
2910 u"put-future-imm.txt")
2913 def test_PUT_NEWFILE_URI(self):
2914 file_contents = "New file contents here\n"
2915 d = self.PUT("/uri", file_contents)
2917 assert isinstance(uri, str), uri
2918 self.failUnless(uri in FakeCHKFileNode.all_contents)
2919 self.failUnlessReallyEqual(FakeCHKFileNode.all_contents[uri],
2921 return self.GET("/uri/%s" % uri)
2922 d.addCallback(_check)
2924 self.failUnlessReallyEqual(res, file_contents)
2925 d.addCallback(_check2)
2928 def test_PUT_NEWFILE_URI_not_mutable(self):
2929 file_contents = "New file contents here\n"
2930 d = self.PUT("/uri?mutable=false", file_contents)
2932 assert isinstance(uri, str), uri
2933 self.failUnless(uri in FakeCHKFileNode.all_contents)
2934 self.failUnlessReallyEqual(FakeCHKFileNode.all_contents[uri],
2936 return self.GET("/uri/%s" % uri)
2937 d.addCallback(_check)
2939 self.failUnlessReallyEqual(res, file_contents)
2940 d.addCallback(_check2)
2943 def test_PUT_NEWFILE_URI_only_PUT(self):
2944 d = self.PUT("/uri?t=bogus", "")
2945 d.addBoth(self.shouldFail, error.Error,
2946 "PUT_NEWFILE_URI_only_PUT",
2948 "/uri accepts only PUT, PUT?t=mkdir, POST?t=upload, and POST?t=mkdir")
2951 def test_PUT_NEWFILE_URI_mutable(self):
2952 file_contents = "New file contents here\n"
2953 d = self.PUT("/uri?mutable=true", file_contents)
2954 def _check1(filecap):
2955 filecap = filecap.strip()
2956 self.failUnless(filecap.startswith("URI:SSK:"), filecap)
2957 self.filecap = filecap
2958 u = uri.WriteableSSKFileURI.init_from_string(filecap)
2959 self.failUnless(u.get_storage_index() in FakeMutableFileNode.all_contents)
2960 n = self.s.create_node_from_uri(filecap)
2961 return n.download_best_version()
2962 d.addCallback(_check1)
2964 self.failUnlessReallyEqual(data, file_contents)
2965 return self.GET("/uri/%s" % urllib.quote(self.filecap))
2966 d.addCallback(_check2)
2968 self.failUnlessReallyEqual(res, file_contents)
2969 d.addCallback(_check3)
2972 def test_PUT_mkdir(self):
2973 d = self.PUT("/uri?t=mkdir", "")
2975 n = self.s.create_node_from_uri(uri.strip())
2976 d2 = self.failUnlessNodeKeysAre(n, [])
2977 d2.addCallback(lambda res:
2978 self.GET("/uri/%s?t=json" % uri))
2980 d.addCallback(_check)
2981 d.addCallback(self.failUnlessIsEmptyJSON)
2984 def test_POST_check(self):
2985 d = self.POST(self.public_url + "/foo", t="check", name="bar.txt")
2987 # this returns a string form of the results, which are probably
2988 # None since we're using fake filenodes.
2989 # TODO: verify that the check actually happened, by changing
2990 # FakeCHKFileNode to count how many times .check() has been
2993 d.addCallback(_done)
2996 def test_bad_method(self):
2997 url = self.webish_url + self.public_url + "/foo/bar.txt"
2998 d = self.shouldHTTPError("test_bad_method",
2999 501, "Not Implemented",
3000 "I don't know how to treat a BOGUS request.",
3001 client.getPage, url, method="BOGUS")
3004 def test_short_url(self):
3005 url = self.webish_url + "/uri"
3006 d = self.shouldHTTPError("test_short_url", 501, "Not Implemented",
3007 "I don't know how to treat a DELETE request.",
3008 client.getPage, url, method="DELETE")
3011 def test_ophandle_bad(self):
3012 url = self.webish_url + "/operations/bogus?t=status"
3013 d = self.shouldHTTPError("test_ophandle_bad", 404, "404 Not Found",
3014 "unknown/expired handle 'bogus'",
3015 client.getPage, url)
3018 def test_ophandle_cancel(self):
3019 d = self.POST(self.public_url + "/foo/?t=start-manifest&ophandle=128",
3020 followRedirect=True)
3021 d.addCallback(lambda ignored:
3022 self.GET("/operations/128?t=status&output=JSON"))
3024 data = simplejson.loads(res)
3025 self.failUnless("finished" in data, res)
3026 monitor = self.ws.root.child_operations.handles["128"][0]
3027 d = self.POST("/operations/128?t=cancel&output=JSON")
3029 data = simplejson.loads(res)
3030 self.failUnless("finished" in data, res)
3031 # t=cancel causes the handle to be forgotten
3032 self.failUnless(monitor.is_cancelled())
3033 d.addCallback(_check2)
3035 d.addCallback(_check1)
3036 d.addCallback(lambda ignored:
3037 self.shouldHTTPError("test_ophandle_cancel",
3038 404, "404 Not Found",
3039 "unknown/expired handle '128'",
3041 "/operations/128?t=status&output=JSON"))
3044 def test_ophandle_retainfor(self):
3045 d = self.POST(self.public_url + "/foo/?t=start-manifest&ophandle=129&retain-for=60",
3046 followRedirect=True)
3047 d.addCallback(lambda ignored:
3048 self.GET("/operations/129?t=status&output=JSON&retain-for=0"))
3050 data = simplejson.loads(res)
3051 self.failUnless("finished" in data, res)
3052 d.addCallback(_check1)
3053 # the retain-for=0 will cause the handle to be expired very soon
3054 d.addCallback(lambda ign:
3055 self.clock.advance(2.0))
3056 d.addCallback(lambda ignored:
3057 self.shouldHTTPError("test_ophandle_retainfor",
3058 404, "404 Not Found",
3059 "unknown/expired handle '129'",
3061 "/operations/129?t=status&output=JSON"))
3064 def test_ophandle_release_after_complete(self):
3065 d = self.POST(self.public_url + "/foo/?t=start-manifest&ophandle=130",
3066 followRedirect=True)
3067 d.addCallback(self.wait_for_operation, "130")
3068 d.addCallback(lambda ignored:
3069 self.GET("/operations/130?t=status&output=JSON&release-after-complete=true"))
3070 # the release-after-complete=true will cause the handle to be expired
3071 d.addCallback(lambda ignored:
3072 self.shouldHTTPError("test_ophandle_release_after_complete",
3073 404, "404 Not Found",
3074 "unknown/expired handle '130'",
3076 "/operations/130?t=status&output=JSON"))
3079 def test_uncollected_ophandle_expiration(self):
3080 # uncollected ophandles should expire after 4 days
3081 def _make_uncollected_ophandle(ophandle):
3082 d = self.POST(self.public_url +
3083 "/foo/?t=start-manifest&ophandle=%d" % ophandle,
3084 followRedirect=False)
3085 # When we start the operation, the webapi server will want
3086 # to redirect us to the page for the ophandle, so we get
3087 # confirmation that the operation has started. If the
3088 # manifest operation has finished by the time we get there,
3089 # following that redirect (by setting followRedirect=True
3090 # above) has the side effect of collecting the ophandle that
3091 # we've just created, which means that we can't use the
3092 # ophandle to test the uncollected timeout anymore. So,
3093 # instead, catch the 302 here and don't follow it.
3094 d.addBoth(self.should302, "uncollected_ophandle_creation")
3096 # Create an ophandle, don't collect it, then advance the clock by
3097 # 4 days - 1 second and make sure that the ophandle is still there.
3098 d = _make_uncollected_ophandle(131)
3099 d.addCallback(lambda ign:
3100 self.clock.advance((96*60*60) - 1)) # 96 hours = 4 days
3101 d.addCallback(lambda ign:
3102 self.GET("/operations/131?t=status&output=JSON"))
3104 data = simplejson.loads(res)
3105 self.failUnless("finished" in data, res)
3106 d.addCallback(_check1)
3107 # Create an ophandle, don't collect it, then try to collect it
3108 # after 4 days. It should be gone.
3109 d.addCallback(lambda ign:
3110 _make_uncollected_ophandle(132))
3111 d.addCallback(lambda ign:
3112 self.clock.advance(96*60*60))
3113 d.addCallback(lambda ign:
3114 self.shouldHTTPError("test_uncollected_ophandle_expired_after_100_hours",
3115 404, "404 Not Found",
3116 "unknown/expired handle '132'",
3118 "/operations/132?t=status&output=JSON"))
3121 def test_collected_ophandle_expiration(self):
3122 # collected ophandles should expire after 1 day
3123 def _make_collected_ophandle(ophandle):
3124 d = self.POST(self.public_url +
3125 "/foo/?t=start-manifest&ophandle=%d" % ophandle,
3126 followRedirect=True)
3127 # By following the initial redirect, we collect the ophandle
3128 # we've just created.
3130 # Create a collected ophandle, then collect it after 23 hours
3131 # and 59 seconds to make sure that it is still there.
3132 d = _make_collected_ophandle(133)
3133 d.addCallback(lambda ign:
3134 self.clock.advance((24*60*60) - 1))
3135 d.addCallback(lambda ign:
3136 self.GET("/operations/133?t=status&output=JSON"))
3138 data = simplejson.loads(res)
3139 self.failUnless("finished" in data, res)
3140 d.addCallback(_check1)
3141 # Create another uncollected ophandle, then try to collect it
3142 # after 24 hours to make sure that it is gone.
3143 d.addCallback(lambda ign:
3144 _make_collected_ophandle(134))
3145 d.addCallback(lambda ign:
3146 self.clock.advance(24*60*60))
3147 d.addCallback(lambda ign:
3148 self.shouldHTTPError("test_collected_ophandle_expired_after_1000_minutes",
3149 404, "404 Not Found",
3150 "unknown/expired handle '134'",
3152 "/operations/134?t=status&output=JSON"))
3155 def test_incident(self):
3156 d = self.POST("/report_incident", details="eek")
3158 self.failUnless("Thank you for your report!" in res, res)
3159 d.addCallback(_done)
3162 def test_static(self):
3163 webdir = os.path.join(self.staticdir, "subdir")
3164 fileutil.make_dirs(webdir)
3165 f = open(os.path.join(webdir, "hello.txt"), "wb")
3169 d = self.GET("/static/subdir/hello.txt")
3171 self.failUnlessReallyEqual(res, "hello")
3172 d.addCallback(_check)
3176 class Util(ShouldFailMixin, testutil.ReallyEqualMixin, unittest.TestCase):
3177 def test_load_file(self):
3178 # This will raise an exception unless a well-formed XML file is found under that name.
3179 common.getxmlfile('directory.xhtml').load()
3181 def test_parse_replace_arg(self):
3182 self.failUnlessReallyEqual(common.parse_replace_arg("true"), True)
3183 self.failUnlessReallyEqual(common.parse_replace_arg("false"), False)
3184 self.failUnlessReallyEqual(common.parse_replace_arg("only-files"),
3186 self.shouldFail(AssertionError, "test_parse_replace_arg", "",
3187 common.parse_replace_arg, "only_fles")
3189 def test_abbreviate_time(self):
3190 self.failUnlessReallyEqual(common.abbreviate_time(None), "")
3191 self.failUnlessReallyEqual(common.abbreviate_time(1.234), "1.23s")
3192 self.failUnlessReallyEqual(common.abbreviate_time(0.123), "123ms")
3193 self.failUnlessReallyEqual(common.abbreviate_time(0.00123), "1.2ms")
3194 self.failUnlessReallyEqual(common.abbreviate_time(0.000123), "123us")
3195 self.failUnlessReallyEqual(common.abbreviate_time(-123000), "-123000000000us")
3197 def test_compute_rate(self):
3198 self.failUnlessReallyEqual(common.compute_rate(None, None), None)
3199 self.failUnlessReallyEqual(common.compute_rate(None, 1), None)
3200 self.failUnlessReallyEqual(common.compute_rate(250000, None), None)
3201 self.failUnlessReallyEqual(common.compute_rate(250000, 0), None)
3202 self.failUnlessReallyEqual(common.compute_rate(250000, 10), 25000.0)
3203 self.failUnlessReallyEqual(common.compute_rate(0, 10), 0.0)
3204 self.shouldFail(AssertionError, "test_compute_rate", "",
3205 common.compute_rate, -100, 10)
3206 self.shouldFail(AssertionError, "test_compute_rate", "",
3207 common.compute_rate, 100, -10)
3210 rate = common.compute_rate(10*1000*1000, 1)
3211 self.failUnlessReallyEqual(common.abbreviate_rate(rate), "10.00MBps")
3213 def test_abbreviate_rate(self):
3214 self.failUnlessReallyEqual(common.abbreviate_rate(None), "")
3215 self.failUnlessReallyEqual(common.abbreviate_rate(1234000), "1.23MBps")
3216 self.failUnlessReallyEqual(common.abbreviate_rate(12340), "12.3kBps")
3217 self.failUnlessReallyEqual(common.abbreviate_rate(123), "123Bps")
3219 def test_abbreviate_size(self):
3220 self.failUnlessReallyEqual(common.abbreviate_size(None), "")
3221 self.failUnlessReallyEqual(common.abbreviate_size(1.23*1000*1000*1000), "1.23GB")
3222 self.failUnlessReallyEqual(common.abbreviate_size(1.23*1000*1000), "1.23MB")
3223 self.failUnlessReallyEqual(common.abbreviate_size(1230), "1.2kB")
3224 self.failUnlessReallyEqual(common.abbreviate_size(123), "123B")
3226 def test_plural(self):
3228 return "%d second%s" % (s, status.plural(s))
3229 self.failUnlessReallyEqual(convert(0), "0 seconds")
3230 self.failUnlessReallyEqual(convert(1), "1 second")
3231 self.failUnlessReallyEqual(convert(2), "2 seconds")
3233 return "has share%s: %s" % (status.plural(s), ",".join(s))
3234 self.failUnlessReallyEqual(convert2([]), "has shares: ")
3235 self.failUnlessReallyEqual(convert2(["1"]), "has share: 1")
3236 self.failUnlessReallyEqual(convert2(["1","2"]), "has shares: 1,2")
3239 class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMixin, unittest.TestCase):
3241 def CHECK(self, ign, which, args, clientnum=0):
3242 fileurl = self.fileurls[which]
3243 url = fileurl + "?" + args
3244 return self.GET(url, method="POST", clientnum=clientnum)
3246 def test_filecheck(self):
3247 self.basedir = "web/Grid/filecheck"
3249 c0 = self.g.clients[0]
3252 d = c0.upload(upload.Data(DATA, convergence=""))
3253 def _stash_uri(ur, which):
3254 self.uris[which] = ur.uri
3255 d.addCallback(_stash_uri, "good")
3256 d.addCallback(lambda ign:
3257 c0.upload(upload.Data(DATA+"1", convergence="")))
3258 d.addCallback(_stash_uri, "sick")
3259 d.addCallback(lambda ign:
3260 c0.upload(upload.Data(DATA+"2", convergence="")))
3261 d.addCallback(_stash_uri, "dead")
3262 def _stash_mutable_uri(n, which):
3263 self.uris[which] = n.get_uri()
3264 assert isinstance(self.uris[which], str)
3265 d.addCallback(lambda ign: c0.create_mutable_file(DATA+"3"))
3266 d.addCallback(_stash_mutable_uri, "corrupt")
3267 d.addCallback(lambda ign:
3268 c0.upload(upload.Data("literal", convergence="")))
3269 d.addCallback(_stash_uri, "small")
3270 d.addCallback(lambda ign: c0.create_immutable_dirnode({}))
3271 d.addCallback(_stash_mutable_uri, "smalldir")
3273 def _compute_fileurls(ignored):
3275 for which in self.uris:
3276 self.fileurls[which] = "uri/" + urllib.quote(self.uris[which])
3277 d.addCallback(_compute_fileurls)
3279 def _clobber_shares(ignored):
3280 good_shares = self.find_uri_shares(self.uris["good"])
3281 self.failUnlessReallyEqual(len(good_shares), 10)
3282 sick_shares = self.find_uri_shares(self.uris["sick"])
3283 os.unlink(sick_shares[0][2])
3284 dead_shares = self.find_uri_shares(self.uris["dead"])
3285 for i in range(1, 10):
3286 os.unlink(dead_shares[i][2])
3287 c_shares = self.find_uri_shares(self.uris["corrupt"])
3288 cso = CorruptShareOptions()
3289 cso.stdout = StringIO()
3290 cso.parseOptions([c_shares[0][2]])
3292 d.addCallback(_clobber_shares)
3294 d.addCallback(self.CHECK, "good", "t=check")
3295 def _got_html_good(res):
3296 self.failUnless("Healthy" in res, res)
3297 self.failIf("Not Healthy" in res, res)
3298 d.addCallback(_got_html_good)
3299 d.addCallback(self.CHECK, "good", "t=check&return_to=somewhere")
3300 def _got_html_good_return_to(res):
3301 self.failUnless("Healthy" in res, res)
3302 self.failIf("Not Healthy" in res, res)
3303 self.failUnless('<a href="somewhere">Return to file'
3305 d.addCallback(_got_html_good_return_to)
3306 d.addCallback(self.CHECK, "good", "t=check&output=json")
3307 def _got_json_good(res):
3308 r = simplejson.loads(res)
3309 self.failUnlessEqual(r["summary"], "Healthy")
3310 self.failUnless(r["results"]["healthy"])
3311 self.failIf(r["results"]["needs-rebalancing"])
3312 self.failUnless(r["results"]["recoverable"])
3313 d.addCallback(_got_json_good)
3315 d.addCallback(self.CHECK, "small", "t=check")
3316 def _got_html_small(res):
3317 self.failUnless("Literal files are always healthy" in res, res)
3318 self.failIf("Not Healthy" in res, res)
3319 d.addCallback(_got_html_small)
3320 d.addCallback(self.CHECK, "small", "t=check&return_to=somewhere")
3321 def _got_html_small_return_to(res):
3322 self.failUnless("Literal files are always healthy" in res, res)
3323 self.failIf("Not Healthy" in res, res)
3324 self.failUnless('<a href="somewhere">Return to file'
3326 d.addCallback(_got_html_small_return_to)
3327 d.addCallback(self.CHECK, "small", "t=check&output=json")
3328 def _got_json_small(res):
3329 r = simplejson.loads(res)
3330 self.failUnlessEqual(r["storage-index"], "")
3331 self.failUnless(r["results"]["healthy"])
3332 d.addCallback(_got_json_small)
3334 d.addCallback(self.CHECK, "smalldir", "t=check")
3335 def _got_html_smalldir(res):
3336 self.failUnless("Literal files are always healthy" in res, res)
3337 self.failIf("Not Healthy" in res, res)
3338 d.addCallback(_got_html_smalldir)
3339 d.addCallback(self.CHECK, "smalldir", "t=check&output=json")
3340 def _got_json_smalldir(res):
3341 r = simplejson.loads(res)
3342 self.failUnlessEqual(r["storage-index"], "")
3343 self.failUnless(r["results"]["healthy"])
3344 d.addCallback(_got_json_smalldir)
3346 d.addCallback(self.CHECK, "sick", "t=check")
3347 def _got_html_sick(res):
3348 self.failUnless("Not Healthy" in res, res)
3349 d.addCallback(_got_html_sick)
3350 d.addCallback(self.CHECK, "sick", "t=check&output=json")
3351 def _got_json_sick(res):
3352 r = simplejson.loads(res)
3353 self.failUnlessEqual(r["summary"],
3354 "Not Healthy: 9 shares (enc 3-of-10)")
3355 self.failIf(r["results"]["healthy"])
3356 self.failIf(r["results"]["needs-rebalancing"])
3357 self.failUnless(r["results"]["recoverable"])
3358 d.addCallback(_got_json_sick)
3360 d.addCallback(self.CHECK, "dead", "t=check")
3361 def _got_html_dead(res):
3362 self.failUnless("Not Healthy" in res, res)
3363 d.addCallback(_got_html_dead)
3364 d.addCallback(self.CHECK, "dead", "t=check&output=json")
3365 def _got_json_dead(res):
3366 r = simplejson.loads(res)
3367 self.failUnlessEqual(r["summary"],
3368 "Not Healthy: 1 shares (enc 3-of-10)")
3369 self.failIf(r["results"]["healthy"])
3370 self.failIf(r["results"]["needs-rebalancing"])
3371 self.failIf(r["results"]["recoverable"])
3372 d.addCallback(_got_json_dead)
3374 d.addCallback(self.CHECK, "corrupt", "t=check&verify=true")
3375 def _got_html_corrupt(res):
3376 self.failUnless("Not Healthy! : Unhealthy" in res, res)
3377 d.addCallback(_got_html_corrupt)
3378 d.addCallback(self.CHECK, "corrupt", "t=check&verify=true&output=json")
3379 def _got_json_corrupt(res):
3380 r = simplejson.loads(res)
3381 self.failUnless("Unhealthy: 9 shares (enc 3-of-10)" in r["summary"],
3383 self.failIf(r["results"]["healthy"])
3384 self.failUnless(r["results"]["recoverable"])
3385 self.failUnlessReallyEqual(r["results"]["count-shares-good"], 9)
3386 self.failUnlessReallyEqual(r["results"]["count-corrupt-shares"], 1)
3387 d.addCallback(_got_json_corrupt)
3389 d.addErrback(self.explain_web_error)
3392 def test_repair_html(self):
3393 self.basedir = "web/Grid/repair_html"
3395 c0 = self.g.clients[0]
3398 d = c0.upload(upload.Data(DATA, convergence=""))
3399 def _stash_uri(ur, which):
3400 self.uris[which] = ur.uri
3401 d.addCallback(_stash_uri, "good")
3402 d.addCallback(lambda ign:
3403 c0.upload(upload.Data(DATA+"1", convergence="")))
3404 d.addCallback(_stash_uri, "sick")
3405 d.addCallback(lambda ign:
3406 c0.upload(upload.Data(DATA+"2", convergence="")))
3407 d.addCallback(_stash_uri, "dead")
3408 def _stash_mutable_uri(n, which):
3409 self.uris[which] = n.get_uri()
3410 assert isinstance(self.uris[which], str)
3411 d.addCallback(lambda ign: c0.create_mutable_file(DATA+"3"))
3412 d.addCallback(_stash_mutable_uri, "corrupt")
3414 def _compute_fileurls(ignored):
3416 for which in self.uris:
3417 self.fileurls[which] = "uri/" + urllib.quote(self.uris[which])
3418 d.addCallback(_compute_fileurls)
3420 def _clobber_shares(ignored):
3421 good_shares = self.find_uri_shares(self.uris["good"])
3422 self.failUnlessReallyEqual(len(good_shares), 10)
3423 sick_shares = self.find_uri_shares(self.uris["sick"])
3424 os.unlink(sick_shares[0][2])
3425 dead_shares = self.find_uri_shares(self.uris["dead"])
3426 for i in range(1, 10):
3427 os.unlink(dead_shares[i][2])
3428 c_shares = self.find_uri_shares(self.uris["corrupt"])
3429 cso = CorruptShareOptions()
3430 cso.stdout = StringIO()
3431 cso.parseOptions([c_shares[0][2]])
3433 d.addCallback(_clobber_shares)
3435 d.addCallback(self.CHECK, "good", "t=check&repair=true")
3436 def _got_html_good(res):
3437 self.failUnless("Healthy" in res, res)
3438 self.failIf("Not Healthy" in res, res)
3439 self.failUnless("No repair necessary" in res, res)
3440 d.addCallback(_got_html_good)
3442 d.addCallback(self.CHECK, "sick", "t=check&repair=true")
3443 def _got_html_sick(res):
3444 self.failUnless("Healthy : healthy" in res, res)
3445 self.failIf("Not Healthy" in res, res)
3446 self.failUnless("Repair successful" in res, res)
3447 d.addCallback(_got_html_sick)
3449 # repair of a dead file will fail, of course, but it isn't yet
3450 # clear how this should be reported. Right now it shows up as
3453 #d.addCallback(self.CHECK, "dead", "t=check&repair=true")
3454 #def _got_html_dead(res):
3456 # self.failUnless("Healthy : healthy" in res, res)
3457 # self.failIf("Not Healthy" in res, res)
3458 # self.failUnless("No repair necessary" in res, res)
3459 #d.addCallback(_got_html_dead)
3461 d.addCallback(self.CHECK, "corrupt", "t=check&verify=true&repair=true")
3462 def _got_html_corrupt(res):
3463 self.failUnless("Healthy : Healthy" in res, res)
3464 self.failIf("Not Healthy" in res, res)
3465 self.failUnless("Repair successful" in res, res)
3466 d.addCallback(_got_html_corrupt)
3468 d.addErrback(self.explain_web_error)
3471 def test_repair_json(self):
3472 self.basedir = "web/Grid/repair_json"
3474 c0 = self.g.clients[0]
3477 d = c0.upload(upload.Data(DATA+"1", convergence=""))
3478 def _stash_uri(ur, which):
3479 self.uris[which] = ur.uri
3480 d.addCallback(_stash_uri, "sick")
3482 def _compute_fileurls(ignored):
3484 for which in self.uris:
3485 self.fileurls[which] = "uri/" + urllib.quote(self.uris[which])
3486 d.addCallback(_compute_fileurls)
3488 def _clobber_shares(ignored):
3489 sick_shares = self.find_uri_shares(self.uris["sick"])
3490 os.unlink(sick_shares[0][2])
3491 d.addCallback(_clobber_shares)
3493 d.addCallback(self.CHECK, "sick", "t=check&repair=true&output=json")
3494 def _got_json_sick(res):
3495 r = simplejson.loads(res)
3496 self.failUnlessReallyEqual(r["repair-attempted"], True)
3497 self.failUnlessReallyEqual(r["repair-successful"], True)
3498 self.failUnlessEqual(r["pre-repair-results"]["summary"],
3499 "Not Healthy: 9 shares (enc 3-of-10)")
3500 self.failIf(r["pre-repair-results"]["results"]["healthy"])
3501 self.failUnlessEqual(r["post-repair-results"]["summary"], "healthy")
3502 self.failUnless(r["post-repair-results"]["results"]["healthy"])
3503 d.addCallback(_got_json_sick)
3505 d.addErrback(self.explain_web_error)
3508 def test_unknown(self, immutable=False):
3509 self.basedir = "web/Grid/unknown"
3511 self.basedir = "web/Grid/unknown-immutable"
3514 c0 = self.g.clients[0]
3518 # the future cap format may contain slashes, which must be tolerated
3519 expected_info_url = "uri/%s?t=info" % urllib.quote(unknown_rwcap,
3523 name = u"future-imm"
3524 future_node = UnknownNode(None, unknown_immcap, deep_immutable=True)
3525 d = c0.create_immutable_dirnode({name: (future_node, {})})
3528 future_node = UnknownNode(unknown_rwcap, unknown_rocap)
3529 d = c0.create_dirnode()
3531 def _stash_root_and_create_file(n):
3533 self.rooturl = "uri/" + urllib.quote(n.get_uri()) + "/"
3534 self.rourl = "uri/" + urllib.quote(n.get_readonly_uri()) + "/"
3536 return self.rootnode.set_node(name, future_node)
3537 d.addCallback(_stash_root_and_create_file)
3539 # make sure directory listing tolerates unknown nodes
3540 d.addCallback(lambda ign: self.GET(self.rooturl))
3541 def _check_directory_html(res, expected_type_suffix):
3542 pattern = re.compile(r'<td>\?%s</td>[ \t\n\r]*'
3543 '<td>%s</td>' % (expected_type_suffix, str(name)),
3545 self.failUnless(re.search(pattern, res), res)
3546 # find the More Info link for name, should be relative
3547 mo = re.search(r'<a href="([^"]+)">More Info</a>', res)
3548 info_url = mo.group(1)
3549 self.failUnlessReallyEqual(info_url, "%s?t=info" % (str(name),))
3551 d.addCallback(_check_directory_html, "-IMM")
3553 d.addCallback(_check_directory_html, "")
3555 d.addCallback(lambda ign: self.GET(self.rooturl+"?t=json"))
3556 def _check_directory_json(res, expect_rw_uri):
3557 data = simplejson.loads(res)
3558 self.failUnlessEqual(data[0], "dirnode")
3559 f = data[1]["children"][name]
3560 self.failUnlessEqual(f[0], "unknown")
3562 self.failUnlessReallyEqual(to_str(f[1]["rw_uri"]), unknown_rwcap, data)
3564 self.failIfIn("rw_uri", f[1])
3566 self.failUnlessReallyEqual(to_str(f[1]["ro_uri"]), unknown_immcap, data)
3568 self.failUnlessReallyEqual(to_str(f[1]["ro_uri"]), unknown_rocap, data)
3569 self.failUnless("metadata" in f[1])
3570 d.addCallback(_check_directory_json, expect_rw_uri=not immutable)
3572 def _check_info(res, expect_rw_uri, expect_ro_uri):
3573 self.failUnlessIn("Object Type: <span>unknown</span>", res)
3575 self.failUnlessIn(unknown_rwcap, res)
3578 self.failUnlessIn(unknown_immcap, res)
3580 self.failUnlessIn(unknown_rocap, res)
3582 self.failIfIn(unknown_rocap, res)
3583 self.failIfIn("Raw data as", res)
3584 self.failIfIn("Directory writecap", res)
3585 self.failIfIn("Checker Operations", res)
3586 self.failIfIn("Mutable File Operations", res)
3587 self.failIfIn("Directory Operations", res)
3589 # FIXME: these should have expect_rw_uri=not immutable; I don't know
3590 # why they fail. Possibly related to ticket #922.
3592 d.addCallback(lambda ign: self.GET(expected_info_url))
3593 d.addCallback(_check_info, expect_rw_uri=False, expect_ro_uri=False)
3594 d.addCallback(lambda ign: self.GET("%s%s?t=info" % (self.rooturl, str(name))))
3595 d.addCallback(_check_info, expect_rw_uri=False, expect_ro_uri=True)
3597 def _check_json(res, expect_rw_uri):
3598 data = simplejson.loads(res)
3599 self.failUnlessEqual(data[0], "unknown")
3601 self.failUnlessReallyEqual(to_str(data[1]["rw_uri"]), unknown_rwcap, data)
3603 self.failIfIn("rw_uri", data[1])
3606 self.failUnlessReallyEqual(to_str(data[1]["ro_uri"]), unknown_immcap, data)
3607 self.failUnlessReallyEqual(data[1]["mutable"], False)
3609 self.failUnlessReallyEqual(to_str(data[1]["ro_uri"]), unknown_rocap, data)
3610 self.failUnlessReallyEqual(data[1]["mutable"], True)
3612 self.failUnlessReallyEqual(to_str(data[1]["ro_uri"]), unknown_rocap, data)
3613 self.failIf("mutable" in data[1], data[1])
3615 # TODO: check metadata contents
3616 self.failUnless("metadata" in data[1])
3618 d.addCallback(lambda ign: self.GET("%s%s?t=json" % (self.rooturl, str(name))))
3619 d.addCallback(_check_json, expect_rw_uri=not immutable)
3621 # and make sure that a read-only version of the directory can be
3622 # rendered too. This version will not have unknown_rwcap, whether
3623 # or not future_node was immutable.
3624 d.addCallback(lambda ign: self.GET(self.rourl))
3626 d.addCallback(_check_directory_html, "-IMM")
3628 d.addCallback(_check_directory_html, "-RO")
3630 d.addCallback(lambda ign: self.GET(self.rourl+"?t=json"))
3631 d.addCallback(_check_directory_json, expect_rw_uri=False)
3633 d.addCallback(lambda ign: self.GET("%s%s?t=json" % (self.rourl, str(name))))
3634 d.addCallback(_check_json, expect_rw_uri=False)
3636 # TODO: check that getting t=info from the Info link in the ro directory
3637 # works, and does not include the writecap URI.
3640 def test_immutable_unknown(self):
3641 return self.test_unknown(immutable=True)
3643 def test_mutant_dirnodes_are_omitted(self):
3644 self.basedir = "web/Grid/mutant_dirnodes_are_omitted"
3647 c = self.g.clients[0]
3652 lonely_uri = "URI:LIT:n5xgk" # LIT for "one"
3653 mut_write_uri = "URI:SSK:vfvcbdfbszyrsaxchgevhmmlii:euw4iw7bbnkrrwpzuburbhppuxhc3gwxv26f6imekhz7zyw2ojnq"
3654 mut_read_uri = "URI:SSK-RO:e3mdrzfwhoq42hy5ubcz6rp3o4:ybyibhnp3vvwuq2vaw2ckjmesgkklfs6ghxleztqidihjyofgw7q"
3656 # This method tests mainly dirnode, but we'd have to duplicate code in order to
3657 # test the dirnode and web layers separately.
3659 # 'lonely' is a valid LIT child, 'ro' is a mutant child with an SSK-RO readcap,
3660 # and 'write-in-ro' is a mutant child with an SSK writecap in the ro_uri field.
3661 # When the directory is read, the mutants should be silently disposed of, leaving
3662 # their lonely sibling.
3663 # We don't test the case of a retrieving a cap from the encrypted rw_uri field,
3664 # because immutable directories don't have a writecap and therefore that field
3665 # isn't (and can't be) decrypted.
3666 # TODO: The field still exists in the netstring. Technically we should check what
3667 # happens if something is put there (_unpack_contents should raise ValueError),
3668 # but that can wait.
3670 lonely_child = nm.create_from_cap(lonely_uri)
3671 mutant_ro_child = nm.create_from_cap(mut_read_uri)
3672 mutant_write_in_ro_child = nm.create_from_cap(mut_write_uri)
3674 def _by_hook_or_by_crook():
3676 for n in [mutant_ro_child, mutant_write_in_ro_child]:
3677 n.is_allowed_in_immutable_directory = _by_hook_or_by_crook
3679 mutant_write_in_ro_child.get_write_uri = lambda: None
3680 mutant_write_in_ro_child.get_readonly_uri = lambda: mut_write_uri
3682 kids = {u"lonely": (lonely_child, {}),
3683 u"ro": (mutant_ro_child, {}),
3684 u"write-in-ro": (mutant_write_in_ro_child, {}),
3686 d = c.create_immutable_dirnode(kids)
3689 self.failUnless(isinstance(dn, dirnode.DirectoryNode))
3690 self.failIf(dn.is_mutable())
3691 self.failUnless(dn.is_readonly())
3692 # This checks that if we somehow ended up calling dn._decrypt_rwcapdata, it would fail.
3693 self.failIf(hasattr(dn._node, 'get_writekey'))
3695 self.failUnless("RO-IMM" in rep)
3697 self.failUnlessIn("CHK", cap.to_string())
3700 self.rooturl = "uri/" + urllib.quote(dn.get_uri()) + "/"
3701 return download_to_data(dn._node)
3702 d.addCallback(_created)
3704 def _check_data(data):
3705 # Decode the netstring representation of the directory to check that all children
3706 # are present. This is a bit of an abstraction violation, but there's not really
3707 # any other way to do it given that the real DirectoryNode._unpack_contents would
3708 # strip the mutant children out (which is what we're trying to test, later).
3711 while position < len(data):
3712 entries, position = split_netstring(data, 1, position)
3714 (name_utf8, ro_uri, rwcapdata, metadata_s), subpos = split_netstring(entry, 4)
3715 name = name_utf8.decode("utf-8")
3716 self.failUnless(rwcapdata == "")
3717 self.failUnless(name in kids)
3718 (expected_child, ign) = kids[name]
3719 self.failUnlessReallyEqual(ro_uri, expected_child.get_readonly_uri())
3722 self.failUnlessReallyEqual(numkids, 3)
3723 return self.rootnode.list()
3724 d.addCallback(_check_data)
3726 # Now when we use the real directory listing code, the mutants should be absent.
3727 def _check_kids(children):
3728 self.failUnlessReallyEqual(sorted(children.keys()), [u"lonely"])
3729 lonely_node, lonely_metadata = children[u"lonely"]
3731 self.failUnlessReallyEqual(lonely_node.get_write_uri(), None)
3732 self.failUnlessReallyEqual(lonely_node.get_readonly_uri(), lonely_uri)
3733 d.addCallback(_check_kids)
3735 d.addCallback(lambda ign: nm.create_from_cap(self.cap.to_string()))
3736 d.addCallback(lambda n: n.list())
3737 d.addCallback(_check_kids) # again with dirnode recreated from cap
3739 # Make sure the lonely child can be listed in HTML...
3740 d.addCallback(lambda ign: self.GET(self.rooturl))
3741 def _check_html(res):
3742 self.failIfIn("URI:SSK", res)
3743 get_lonely = "".join([r'<td>FILE</td>',
3745 r'<a href="[^"]+%s[^"]+">lonely</a>' % (urllib.quote(lonely_uri),),
3747 r'\s+<td align="right">%d</td>' % len("one"),
3749 self.failUnless(re.search(get_lonely, res), res)
3751 # find the More Info link for name, should be relative
3752 mo = re.search(r'<a href="([^"]+)">More Info</a>', res)
3753 info_url = mo.group(1)
3754 self.failUnless(info_url.endswith(urllib.quote(lonely_uri) + "?t=info"), info_url)
3755 d.addCallback(_check_html)
3758 d.addCallback(lambda ign: self.GET(self.rooturl+"?t=json"))
3759 def _check_json(res):
3760 data = simplejson.loads(res)
3761 self.failUnlessEqual(data[0], "dirnode")
3762 listed_children = data[1]["children"]
3763 self.failUnlessReallyEqual(sorted(listed_children.keys()), [u"lonely"])
3764 ll_type, ll_data = listed_children[u"lonely"]
3765 self.failUnlessEqual(ll_type, "filenode")
3766 self.failIf("rw_uri" in ll_data)
3767 self.failUnlessReallyEqual(to_str(ll_data["ro_uri"]), lonely_uri)
3768 d.addCallback(_check_json)
3771 def test_deep_check(self):
3772 self.basedir = "web/Grid/deep_check"
3774 c0 = self.g.clients[0]
3778 d = c0.create_dirnode()
3779 def _stash_root_and_create_file(n):
3781 self.fileurls["root"] = "uri/" + urllib.quote(n.get_uri()) + "/"
3782 return n.add_file(u"good", upload.Data(DATA, convergence=""))
3783 d.addCallback(_stash_root_and_create_file)
3784 def _stash_uri(fn, which):
3785 self.uris[which] = fn.get_uri()
3787 d.addCallback(_stash_uri, "good")
3788 d.addCallback(lambda ign:
3789 self.rootnode.add_file(u"small",
3790 upload.Data("literal",
3792 d.addCallback(_stash_uri, "small")
3793 d.addCallback(lambda ign:
3794 self.rootnode.add_file(u"sick",
3795 upload.Data(DATA+"1",
3797 d.addCallback(_stash_uri, "sick")
3799 # this tests that deep-check and stream-manifest will ignore
3800 # UnknownNode instances. Hopefully this will also cover deep-stats.
3801 future_node = UnknownNode(unknown_rwcap, unknown_rocap)
3802 d.addCallback(lambda ign: self.rootnode.set_node(u"future", future_node))
3804 def _clobber_shares(ignored):
3805 self.delete_shares_numbered(self.uris["sick"], [0,1])
3806 d.addCallback(_clobber_shares)
3814 d.addCallback(self.CHECK, "root", "t=stream-deep-check")
3817 units = [simplejson.loads(line)
3818 for line in res.splitlines()
3821 print "response is:", res
3822 print "undecodeable line was '%s'" % line
3824 self.failUnlessReallyEqual(len(units), 5+1)
3825 # should be parent-first
3827 self.failUnlessEqual(u0["path"], [])
3828 self.failUnlessEqual(u0["type"], "directory")
3829 self.failUnlessReallyEqual(to_str(u0["cap"]), self.rootnode.get_uri())
3830 u0cr = u0["check-results"]
3831 self.failUnlessReallyEqual(u0cr["results"]["count-shares-good"], 10)
3833 ugood = [u for u in units
3834 if u["type"] == "file" and u["path"] == [u"good"]][0]
3835 self.failUnlessReallyEqual(to_str(ugood["cap"]), self.uris["good"])
3836 ugoodcr = ugood["check-results"]
3837 self.failUnlessReallyEqual(ugoodcr["results"]["count-shares-good"], 10)
3840 self.failUnlessEqual(stats["type"], "stats")
3842 self.failUnlessReallyEqual(s["count-immutable-files"], 2)
3843 self.failUnlessReallyEqual(s["count-literal-files"], 1)
3844 self.failUnlessReallyEqual(s["count-directories"], 1)
3845 self.failUnlessReallyEqual(s["count-unknown"], 1)
3846 d.addCallback(_done)
3848 d.addCallback(self.CHECK, "root", "t=stream-manifest")
3849 def _check_manifest(res):
3850 self.failUnless(res.endswith("\n"))
3851 units = [simplejson.loads(t) for t in res[:-1].split("\n")]
3852 self.failUnlessReallyEqual(len(units), 5+1)
3853 self.failUnlessEqual(units[-1]["type"], "stats")
3855 self.failUnlessEqual(first["path"], [])
3856 self.failUnlessEqual(to_str(first["cap"]), self.rootnode.get_uri())
3857 self.failUnlessEqual(first["type"], "directory")
3858 stats = units[-1]["stats"]
3859 self.failUnlessReallyEqual(stats["count-immutable-files"], 2)
3860 self.failUnlessReallyEqual(stats["count-literal-files"], 1)
3861 self.failUnlessReallyEqual(stats["count-mutable-files"], 0)
3862 self.failUnlessReallyEqual(stats["count-immutable-files"], 2)
3863 self.failUnlessReallyEqual(stats["count-unknown"], 1)
3864 d.addCallback(_check_manifest)
3866 # now add root/subdir and root/subdir/grandchild, then make subdir
3867 # unrecoverable, then see what happens
3869 d.addCallback(lambda ign:
3870 self.rootnode.create_subdirectory(u"subdir"))
3871 d.addCallback(_stash_uri, "subdir")
3872 d.addCallback(lambda subdir_node:
3873 subdir_node.add_file(u"grandchild",
3874 upload.Data(DATA+"2",
3876 d.addCallback(_stash_uri, "grandchild")
3878 d.addCallback(lambda ign:
3879 self.delete_shares_numbered(self.uris["subdir"],
3887 # root/subdir [unrecoverable]
3888 # root/subdir/grandchild
3890 # how should a streaming-JSON API indicate fatal error?
3891 # answer: emit ERROR: instead of a JSON string
3893 d.addCallback(self.CHECK, "root", "t=stream-manifest")
3894 def _check_broken_manifest(res):
3895 lines = res.splitlines()
3897 for (i,line) in enumerate(lines)
3898 if line.startswith("ERROR:")]
3900 self.fail("no ERROR: in output: %s" % (res,))
3901 first_error = error_lines[0]
3902 error_line = lines[first_error]
3903 error_msg = lines[first_error+1:]
3904 error_msg_s = "\n".join(error_msg) + "\n"
3905 self.failUnlessIn("ERROR: UnrecoverableFileError(no recoverable versions)",
3907 self.failUnless(len(error_msg) > 2, error_msg_s) # some traceback
3908 units = [simplejson.loads(line) for line in lines[:first_error]]
3909 self.failUnlessReallyEqual(len(units), 6) # includes subdir
3910 last_unit = units[-1]
3911 self.failUnlessEqual(last_unit["path"], ["subdir"])
3912 d.addCallback(_check_broken_manifest)
3914 d.addCallback(self.CHECK, "root", "t=stream-deep-check")
3915 def _check_broken_deepcheck(res):
3916 lines = res.splitlines()
3918 for (i,line) in enumerate(lines)
3919 if line.startswith("ERROR:")]
3921 self.fail("no ERROR: in output: %s" % (res,))
3922 first_error = error_lines[0]
3923 error_line = lines[first_error]
3924 error_msg = lines[first_error+1:]
3925 error_msg_s = "\n".join(error_msg) + "\n"
3926 self.failUnlessIn("ERROR: UnrecoverableFileError(no recoverable versions)",
3928 self.failUnless(len(error_msg) > 2, error_msg_s) # some traceback
3929 units = [simplejson.loads(line) for line in lines[:first_error]]
3930 self.failUnlessReallyEqual(len(units), 6) # includes subdir
3931 last_unit = units[-1]
3932 self.failUnlessEqual(last_unit["path"], ["subdir"])
3933 r = last_unit["check-results"]["results"]
3934 self.failUnlessReallyEqual(r["count-recoverable-versions"], 0)
3935 self.failUnlessReallyEqual(r["count-shares-good"], 1)
3936 self.failUnlessReallyEqual(r["recoverable"], False)
3937 d.addCallback(_check_broken_deepcheck)
3939 d.addErrback(self.explain_web_error)
3942 def test_deep_check_and_repair(self):
3943 self.basedir = "web/Grid/deep_check_and_repair"
3945 c0 = self.g.clients[0]
3949 d = c0.create_dirnode()
3950 def _stash_root_and_create_file(n):
3952 self.fileurls["root"] = "uri/" + urllib.quote(n.get_uri()) + "/"
3953 return n.add_file(u"good", upload.Data(DATA, convergence=""))
3954 d.addCallback(_stash_root_and_create_file)
3955 def _stash_uri(fn, which):
3956 self.uris[which] = fn.get_uri()
3957 d.addCallback(_stash_uri, "good")
3958 d.addCallback(lambda ign:
3959 self.rootnode.add_file(u"small",
3960 upload.Data("literal",
3962 d.addCallback(_stash_uri, "small")
3963 d.addCallback(lambda ign:
3964 self.rootnode.add_file(u"sick",
3965 upload.Data(DATA+"1",
3967 d.addCallback(_stash_uri, "sick")
3968 #d.addCallback(lambda ign:
3969 # self.rootnode.add_file(u"dead",
3970 # upload.Data(DATA+"2",
3972 #d.addCallback(_stash_uri, "dead")
3974 #d.addCallback(lambda ign: c0.create_mutable_file("mutable"))
3975 #d.addCallback(lambda fn: self.rootnode.set_node(u"corrupt", fn))
3976 #d.addCallback(_stash_uri, "corrupt")
3978 def _clobber_shares(ignored):
3979 good_shares = self.find_uri_shares(self.uris["good"])
3980 self.failUnlessReallyEqual(len(good_shares), 10)
3981 sick_shares = self.find_uri_shares(self.uris["sick"])
3982 os.unlink(sick_shares[0][2])
3983 #dead_shares = self.find_uri_shares(self.uris["dead"])
3984 #for i in range(1, 10):
3985 # os.unlink(dead_shares[i][2])
3987 #c_shares = self.find_uri_shares(self.uris["corrupt"])
3988 #cso = CorruptShareOptions()
3989 #cso.stdout = StringIO()
3990 #cso.parseOptions([c_shares[0][2]])
3992 d.addCallback(_clobber_shares)
3995 # root/good CHK, 10 shares
3997 # root/sick CHK, 9 shares
3999 d.addCallback(self.CHECK, "root", "t=stream-deep-check&repair=true")
4001 units = [simplejson.loads(line)
4002 for line in res.splitlines()
4004 self.failUnlessReallyEqual(len(units), 4+1)
4005 # should be parent-first
4007 self.failUnlessEqual(u0["path"], [])
4008 self.failUnlessEqual(u0["type"], "directory")
4009 self.failUnlessReallyEqual(to_str(u0["cap"]), self.rootnode.get_uri())
4010 u0crr = u0["check-and-repair-results"]
4011 self.failUnlessReallyEqual(u0crr["repair-attempted"], False)
4012 self.failUnlessReallyEqual(u0crr["pre-repair-results"]["results"]["count-shares-good"], 10)
4014 ugood = [u for u in units
4015 if u["type"] == "file" and u["path"] == [u"good"]][0]
4016 self.failUnlessEqual(to_str(ugood["cap"]), self.uris["good"])
4017 ugoodcrr = ugood["check-and-repair-results"]
4018 self.failUnlessReallyEqual(ugoodcrr["repair-attempted"], False)
4019 self.failUnlessReallyEqual(ugoodcrr["pre-repair-results"]["results"]["count-shares-good"], 10)
4021 usick = [u for u in units
4022 if u["type"] == "file" and u["path"] == [u"sick"]][0]
4023 self.failUnlessReallyEqual(to_str(usick["cap"]), self.uris["sick"])
4024 usickcrr = usick["check-and-repair-results"]
4025 self.failUnlessReallyEqual(usickcrr["repair-attempted"], True)
4026 self.failUnlessReallyEqual(usickcrr["repair-successful"], True)
4027 self.failUnlessReallyEqual(usickcrr["pre-repair-results"]["results"]["count-shares-good"], 9)
4028 self.failUnlessReallyEqual(usickcrr["post-repair-results"]["results"]["count-shares-good"], 10)
4031 self.failUnlessEqual(stats["type"], "stats")
4033 self.failUnlessReallyEqual(s["count-immutable-files"], 2)
4034 self.failUnlessReallyEqual(s["count-literal-files"], 1)
4035 self.failUnlessReallyEqual(s["count-directories"], 1)
4036 d.addCallback(_done)
4038 d.addErrback(self.explain_web_error)
4041 def _count_leases(self, ignored, which):
4042 u = self.uris[which]
4043 shares = self.find_uri_shares(u)
4045 for shnum, serverid, fn in shares:
4046 sf = get_share_file(fn)
4047 num_leases = len(list(sf.get_leases()))
4048 lease_counts.append( (fn, num_leases) )
4051 def _assert_leasecount(self, lease_counts, expected):
4052 for (fn, num_leases) in lease_counts:
4053 if num_leases != expected:
4054 self.fail("expected %d leases, have %d, on %s" %
4055 (expected, num_leases, fn))
4057 def test_add_lease(self):
4058 self.basedir = "web/Grid/add_lease"
4059 self.set_up_grid(num_clients=2)
4060 c0 = self.g.clients[0]
4063 d = c0.upload(upload.Data(DATA, convergence=""))
4064 def _stash_uri(ur, which):
4065 self.uris[which] = ur.uri
4066 d.addCallback(_stash_uri, "one")
4067 d.addCallback(lambda ign:
4068 c0.upload(upload.Data(DATA+"1", convergence="")))
4069 d.addCallback(_stash_uri, "two")
4070 def _stash_mutable_uri(n, which):
4071 self.uris[which] = n.get_uri()
4072 assert isinstance(self.uris[which], str)
4073 d.addCallback(lambda ign: c0.create_mutable_file(DATA+"2"))
4074 d.addCallback(_stash_mutable_uri, "mutable")
4076 def _compute_fileurls(ignored):
4078 for which in self.uris:
4079 self.fileurls[which] = "uri/" + urllib.quote(self.uris[which])
4080 d.addCallback(_compute_fileurls)
4082 d.addCallback(self._count_leases, "one")
4083 d.addCallback(self._assert_leasecount, 1)
4084 d.addCallback(self._count_leases, "two")
4085 d.addCallback(self._assert_leasecount, 1)
4086 d.addCallback(self._count_leases, "mutable")
4087 d.addCallback(self._assert_leasecount, 1)
4089 d.addCallback(self.CHECK, "one", "t=check") # no add-lease
4090 def _got_html_good(res):
4091 self.failUnless("Healthy" in res, res)
4092 self.failIf("Not Healthy" in res, res)
4093 d.addCallback(_got_html_good)
4095 d.addCallback(self._count_leases, "one")
4096 d.addCallback(self._assert_leasecount, 1)
4097 d.addCallback(self._count_leases, "two")
4098 d.addCallback(self._assert_leasecount, 1)
4099 d.addCallback(self._count_leases, "mutable")
4100 d.addCallback(self._assert_leasecount, 1)
4102 # this CHECK uses the original client, which uses the same
4103 # lease-secrets, so it will just renew the original lease
4104 d.addCallback(self.CHECK, "one", "t=check&add-lease=true")
4105 d.addCallback(_got_html_good)
4107 d.addCallback(self._count_leases, "one")
4108 d.addCallback(self._assert_leasecount, 1)
4109 d.addCallback(self._count_leases, "two")
4110 d.addCallback(self._assert_leasecount, 1)
4111 d.addCallback(self._count_leases, "mutable")
4112 d.addCallback(self._assert_leasecount, 1)
4114 # this CHECK uses an alternate client, which adds a second lease
4115 d.addCallback(self.CHECK, "one", "t=check&add-lease=true", clientnum=1)
4116 d.addCallback(_got_html_good)
4118 d.addCallback(self._count_leases, "one")
4119 d.addCallback(self._assert_leasecount, 2)
4120 d.addCallback(self._count_leases, "two")
4121 d.addCallback(self._assert_leasecount, 1)
4122 d.addCallback(self._count_leases, "mutable")
4123 d.addCallback(self._assert_leasecount, 1)
4125 d.addCallback(self.CHECK, "mutable", "t=check&add-lease=true")
4126 d.addCallback(_got_html_good)
4128 d.addCallback(self._count_leases, "one")
4129 d.addCallback(self._assert_leasecount, 2)
4130 d.addCallback(self._count_leases, "two")
4131 d.addCallback(self._assert_leasecount, 1)
4132 d.addCallback(self._count_leases, "mutable")
4133 d.addCallback(self._assert_leasecount, 1)
4135 d.addCallback(self.CHECK, "mutable", "t=check&add-lease=true",
4137 d.addCallback(_got_html_good)
4139 d.addCallback(self._count_leases, "one")
4140 d.addCallback(self._assert_leasecount, 2)
4141 d.addCallback(self._count_leases, "two")
4142 d.addCallback(self._assert_leasecount, 1)
4143 d.addCallback(self._count_leases, "mutable")
4144 d.addCallback(self._assert_leasecount, 2)
4146 d.addErrback(self.explain_web_error)
4149 def test_deep_add_lease(self):
4150 self.basedir = "web/Grid/deep_add_lease"
4151 self.set_up_grid(num_clients=2)
4152 c0 = self.g.clients[0]
4156 d = c0.create_dirnode()
4157 def _stash_root_and_create_file(n):
4159 self.uris["root"] = n.get_uri()
4160 self.fileurls["root"] = "uri/" + urllib.quote(n.get_uri()) + "/"
4161 return n.add_file(u"one", upload.Data(DATA, convergence=""))
4162 d.addCallback(_stash_root_and_create_file)
4163 def _stash_uri(fn, which):
4164 self.uris[which] = fn.get_uri()
4165 d.addCallback(_stash_uri, "one")
4166 d.addCallback(lambda ign:
4167 self.rootnode.add_file(u"small",
4168 upload.Data("literal",
4170 d.addCallback(_stash_uri, "small")
4172 d.addCallback(lambda ign: c0.create_mutable_file("mutable"))
4173 d.addCallback(lambda fn: self.rootnode.set_node(u"mutable", fn))
4174 d.addCallback(_stash_uri, "mutable")
4176 d.addCallback(self.CHECK, "root", "t=stream-deep-check") # no add-lease
4178 units = [simplejson.loads(line)
4179 for line in res.splitlines()
4181 # root, one, small, mutable, stats
4182 self.failUnlessReallyEqual(len(units), 4+1)
4183 d.addCallback(_done)
4185 d.addCallback(self._count_leases, "root")
4186 d.addCallback(self._assert_leasecount, 1)
4187 d.addCallback(self._count_leases, "one")
4188 d.addCallback(self._assert_leasecount, 1)
4189 d.addCallback(self._count_leases, "mutable")
4190 d.addCallback(self._assert_leasecount, 1)
4192 d.addCallback(self.CHECK, "root", "t=stream-deep-check&add-lease=true")
4193 d.addCallback(_done)
4195 d.addCallback(self._count_leases, "root")
4196 d.addCallback(self._assert_leasecount, 1)
4197 d.addCallback(self._count_leases, "one")
4198 d.addCallback(self._assert_leasecount, 1)
4199 d.addCallback(self._count_leases, "mutable")
4200 d.addCallback(self._assert_leasecount, 1)
4202 d.addCallback(self.CHECK, "root", "t=stream-deep-check&add-lease=true",
4204 d.addCallback(_done)
4206 d.addCallback(self._count_leases, "root")
4207 d.addCallback(self._assert_leasecount, 2)
4208 d.addCallback(self._count_leases, "one")
4209 d.addCallback(self._assert_leasecount, 2)
4210 d.addCallback(self._count_leases, "mutable")
4211 d.addCallback(self._assert_leasecount, 2)
4213 d.addErrback(self.explain_web_error)
4217 def test_exceptions(self):
4218 self.basedir = "web/Grid/exceptions"
4219 self.set_up_grid(num_clients=1, num_servers=2)
4220 c0 = self.g.clients[0]
4221 c0.DEFAULT_ENCODING_PARAMETERS['happy'] = 2
4224 d = c0.create_dirnode()
4226 self.fileurls["root"] = "uri/" + urllib.quote(n.get_uri()) + "/"
4227 self.fileurls["imaginary"] = self.fileurls["root"] + "imaginary"
4229 d.addCallback(_stash_root)
4230 d.addCallback(lambda ign: c0.upload(upload.Data(DATA, convergence="")))
4232 self.fileurls["1share"] = "uri/" + urllib.quote(ur.uri)
4233 self.delete_shares_numbered(ur.uri, range(1,10))
4235 u = uri.from_string(ur.uri)
4236 u.key = testutil.flip_bit(u.key, 0)
4237 baduri = u.to_string()
4238 self.fileurls["0shares"] = "uri/" + urllib.quote(baduri)
4239 d.addCallback(_stash_bad)
4240 d.addCallback(lambda ign: c0.create_dirnode())
4241 def _mangle_dirnode_1share(n):
4243 url = self.fileurls["dir-1share"] = "uri/" + urllib.quote(u) + "/"
4244 self.fileurls["dir-1share-json"] = url + "?t=json"
4245 self.delete_shares_numbered(u, range(1,10))
4246 d.addCallback(_mangle_dirnode_1share)
4247 d.addCallback(lambda ign: c0.create_dirnode())
4248 def _mangle_dirnode_0share(n):
4250 url = self.fileurls["dir-0share"] = "uri/" + urllib.quote(u) + "/"
4251 self.fileurls["dir-0share-json"] = url + "?t=json"
4252 self.delete_shares_numbered(u, range(0,10))
4253 d.addCallback(_mangle_dirnode_0share)
4255 # NotEnoughSharesError should be reported sensibly, with a
4256 # text/plain explanation of the problem, and perhaps some
4257 # information on which shares *could* be found.
4259 d.addCallback(lambda ignored:
4260 self.shouldHTTPError("GET unrecoverable",
4261 410, "Gone", "NoSharesError",
4262 self.GET, self.fileurls["0shares"]))
4263 def _check_zero_shares(body):
4264 self.failIf("<html>" in body, body)
4265 body = " ".join(body.strip().split())
4266 exp = ("NoSharesError: no shares could be found. "
4267 "Zero shares usually indicates a corrupt URI, or that "
4268 "no servers were connected, but it might also indicate "
4269 "severe corruption. You should perform a filecheck on "
4270 "this object to learn more. The full error message is: "
4271 "no shares (need 3). Last failure: None")
4272 self.failUnlessReallyEqual(exp, body)
4273 d.addCallback(_check_zero_shares)
4276 d.addCallback(lambda ignored:
4277 self.shouldHTTPError("GET 1share",
4278 410, "Gone", "NotEnoughSharesError",
4279 self.GET, self.fileurls["1share"]))
4280 def _check_one_share(body):
4281 self.failIf("<html>" in body, body)
4282 body = " ".join(body.strip().split())
4283 msgbase = ("NotEnoughSharesError: This indicates that some "
4284 "servers were unavailable, or that shares have been "
4285 "lost to server departure, hard drive failure, or disk "
4286 "corruption. You should perform a filecheck on "
4287 "this object to learn more. The full error message is:"
4289 msg1 = msgbase + (" ran out of shares:"
4292 " overdue= unused= need 3. Last failure: None")
4293 msg2 = msgbase + (" ran out of shares:"
4295 " pending=Share(sh0-on-xgru5)"
4296 " overdue= unused= need 3. Last failure: None")
4297 self.failUnless(body == msg1 or body == msg2, body)
4298 d.addCallback(_check_one_share)
4300 d.addCallback(lambda ignored:
4301 self.shouldHTTPError("GET imaginary",
4302 404, "Not Found", None,
4303 self.GET, self.fileurls["imaginary"]))
4304 def _missing_child(body):
4305 self.failUnless("No such child: imaginary" in body, body)
4306 d.addCallback(_missing_child)
4308 d.addCallback(lambda ignored: self.GET(self.fileurls["dir-0share"]))
4309 def _check_0shares_dir_html(body):
4310 self.failUnless("<html>" in body, body)
4311 # we should see the regular page, but without the child table or
4313 body = " ".join(body.strip().split())
4314 self.failUnlessIn('href="?t=info">More info on this directory',
4316 exp = ("UnrecoverableFileError: the directory (or mutable file) "
4317 "could not be retrieved, because there were insufficient "
4318 "good shares. This might indicate that no servers were "
4319 "connected, insufficient servers were connected, the URI "
4320 "was corrupt, or that shares have been lost due to server "
4321 "departure, hard drive failure, or disk corruption. You "
4322 "should perform a filecheck on this object to learn more.")
4323 self.failUnlessIn(exp, body)
4324 self.failUnlessIn("No upload forms: directory is unreadable", body)
4325 d.addCallback(_check_0shares_dir_html)
4327 d.addCallback(lambda ignored: self.GET(self.fileurls["dir-1share"]))
4328 def _check_1shares_dir_html(body):
4329 # at some point, we'll split UnrecoverableFileError into 0-shares
4330 # and some-shares like we did for immutable files (since there
4331 # are different sorts of advice to offer in each case). For now,
4332 # they present the same way.
4333 self.failUnless("<html>" in body, body)
4334 body = " ".join(body.strip().split())
4335 self.failUnlessIn('href="?t=info">More info on this directory',
4337 exp = ("UnrecoverableFileError: the directory (or mutable file) "
4338 "could not be retrieved, because there were insufficient "
4339 "good shares. This might indicate that no servers were "
4340 "connected, insufficient servers were connected, the URI "
4341 "was corrupt, or that shares have been lost due to server "
4342 "departure, hard drive failure, or disk corruption. You "
4343 "should perform a filecheck on this object to learn more.")
4344 self.failUnlessIn(exp, body)
4345 self.failUnlessIn("No upload forms: directory is unreadable", body)
4346 d.addCallback(_check_1shares_dir_html)
4348 d.addCallback(lambda ignored:
4349 self.shouldHTTPError("GET dir-0share-json",
4350 410, "Gone", "UnrecoverableFileError",
4352 self.fileurls["dir-0share-json"]))
4353 def _check_unrecoverable_file(body):
4354 self.failIf("<html>" in body, body)
4355 body = " ".join(body.strip().split())
4356 exp = ("UnrecoverableFileError: the directory (or mutable file) "
4357 "could not be retrieved, because there were insufficient "
4358 "good shares. This might indicate that no servers were "
4359 "connected, insufficient servers were connected, the URI "
4360 "was corrupt, or that shares have been lost due to server "
4361 "departure, hard drive failure, or disk corruption. You "
4362 "should perform a filecheck on this object to learn more.")
4363 self.failUnlessReallyEqual(exp, body)
4364 d.addCallback(_check_unrecoverable_file)
4366 d.addCallback(lambda ignored:
4367 self.shouldHTTPError("GET dir-1share-json",
4368 410, "Gone", "UnrecoverableFileError",
4370 self.fileurls["dir-1share-json"]))
4371 d.addCallback(_check_unrecoverable_file)
4373 d.addCallback(lambda ignored:
4374 self.shouldHTTPError("GET imaginary",
4375 404, "Not Found", None,
4376 self.GET, self.fileurls["imaginary"]))
4378 # attach a webapi child that throws a random error, to test how it
4380 w = c0.getServiceNamed("webish")
4381 w.root.putChild("ERRORBOOM", ErrorBoom())
4383 # "Accept: */*" : should get a text/html stack trace
4384 # "Accept: text/plain" : should get a text/plain stack trace
4385 # "Accept: text/plain, application/octet-stream" : text/plain (CLI)
4386 # no Accept header: should get a text/html stack trace
4388 d.addCallback(lambda ignored:
4389 self.shouldHTTPError("GET errorboom_html",
4390 500, "Internal Server Error", None,
4391 self.GET, "ERRORBOOM",
4392 headers={"accept": ["*/*"]}))
4393 def _internal_error_html1(body):
4394 self.failUnless("<html>" in body, "expected HTML, not '%s'" % body)
4395 d.addCallback(_internal_error_html1)
4397 d.addCallback(lambda ignored:
4398 self.shouldHTTPError("GET errorboom_text",
4399 500, "Internal Server Error", None,
4400 self.GET, "ERRORBOOM",
4401 headers={"accept": ["text/plain"]}))
4402 def _internal_error_text2(body):
4403 self.failIf("<html>" in body, body)
4404 self.failUnless(body.startswith("Traceback "), body)
4405 d.addCallback(_internal_error_text2)
4407 CLI_accepts = "text/plain, application/octet-stream"
4408 d.addCallback(lambda ignored:
4409 self.shouldHTTPError("GET errorboom_text",
4410 500, "Internal Server Error", None,
4411 self.GET, "ERRORBOOM",
4412 headers={"accept": [CLI_accepts]}))
4413 def _internal_error_text3(body):
4414 self.failIf("<html>" in body, body)
4415 self.failUnless(body.startswith("Traceback "), body)
4416 d.addCallback(_internal_error_text3)
4418 d.addCallback(lambda ignored:
4419 self.shouldHTTPError("GET errorboom_text",
4420 500, "Internal Server Error", None,
4421 self.GET, "ERRORBOOM"))
4422 def _internal_error_html4(body):
4423 self.failUnless("<html>" in body, "expected HTML, not '%s'" % body)
4424 d.addCallback(_internal_error_html4)
4426 def _flush_errors(res):
4427 # Trial: please ignore the CompletelyUnhandledError in the logs
4428 self.flushLoggedErrors(CompletelyUnhandledError)
4430 d.addBoth(_flush_errors)
4434 class CompletelyUnhandledError(Exception):
4436 class ErrorBoom(rend.Page):
4437 def beforeRender(self, ctx):
4438 raise CompletelyUnhandledError("whoops")