1 import os.path, re, urllib, time
3 from StringIO import StringIO
4 from twisted.application import service
5 from twisted.trial import unittest
6 from twisted.internet import defer, reactor
7 from twisted.internet.task import Clock
8 from twisted.web import client, error, http
9 from twisted.python import failure, log
10 from nevow import rend
11 from allmydata import interfaces, uri, webish, dirnode
12 from allmydata.storage.shares import get_share_file
13 from allmydata.storage_client import StorageFarmBroker
14 from allmydata.immutable import upload
15 from allmydata.immutable.downloader.status import DownloadStatus
16 from allmydata.dirnode import DirectoryNode
17 from allmydata.nodemaker import NodeMaker
18 from allmydata.unknown import UnknownNode
19 from allmydata.web import status, common
20 from allmydata.scripts.debug import CorruptShareOptions, corrupt_share
21 from allmydata.util import fileutil, base32, hashutil
22 from allmydata.util.consumer import download_to_data
23 from allmydata.util.netstring import split_netstring
24 from allmydata.util.encodingutil import to_str
25 from allmydata.test.common import FakeCHKFileNode, FakeMutableFileNode, \
26 create_chk_filenode, WebErrorMixin, ShouldFailMixin, \
27 make_mutable_file_uri, create_mutable_filenode
28 from allmydata.interfaces import IMutableFileNode, SDMF_VERSION, MDMF_VERSION
29 from allmydata.mutable import servermap, publish, retrieve
30 import allmydata.test.common_util as testutil
31 from allmydata.test.no_network import GridTestMixin
32 from allmydata.test.common_web import HTTPClientGETFactory, \
34 from allmydata.client import Client, SecretHolder
36 # create a fake uploader/downloader, and a couple of fake dirnodes, then
37 # create a webserver that works against them
39 timeout = 480 # Most of these take longer than 240 seconds on Francois's arm box.
41 unknown_rwcap = u"lafs://from_the_future_rw_\u263A".encode('utf-8')
42 unknown_rocap = u"ro.lafs://readonly_from_the_future_ro_\u263A".encode('utf-8')
43 unknown_immcap = u"imm.lafs://immutable_from_the_future_imm_\u263A".encode('utf-8')
45 class FakeStatsProvider:
47 stats = {'stats': {}, 'counters': {}}
50 class FakeNodeMaker(NodeMaker):
55 'max_segment_size':128*1024 # 1024=KiB
57 def _create_lit(self, cap):
58 return FakeCHKFileNode(cap)
59 def _create_immutable(self, cap):
60 return FakeCHKFileNode(cap)
61 def _create_mutable(self, cap):
62 return FakeMutableFileNode(None,
64 self.encoding_params, None).init_from_cap(cap)
65 def create_mutable_file(self, contents="", keysize=None,
66 version=SDMF_VERSION):
67 n = FakeMutableFileNode(None, None, self.encoding_params, None)
68 return n.create(contents, version=version)
70 class FakeUploader(service.Service):
72 def upload(self, uploadable):
73 d = uploadable.get_size()
74 d.addCallback(lambda size: uploadable.read(size))
77 n = create_chk_filenode(data)
78 results = upload.UploadResults()
79 results.uri = n.get_uri()
81 d.addCallback(_got_data)
83 def get_helper_info(self):
87 def __init__(self, binaryserverid):
88 self.binaryserverid = binaryserverid
89 def get_name(self): return "short"
90 def get_longname(self): return "long"
91 def get_serverid(self): return self.binaryserverid
94 ds = DownloadStatus("storage_index", 1234)
97 serverA = FakeIServer(hashutil.tagged_hash("foo", "serverid_a")[:20])
98 serverB = FakeIServer(hashutil.tagged_hash("foo", "serverid_b")[:20])
99 storage_index = hashutil.storage_index_hash("SI")
100 e0 = ds.add_segment_request(0, now)
102 e0.deliver(now+1, 0, 100, 0.5) # when, start,len, decodetime
103 e1 = ds.add_segment_request(1, now+2)
105 # two outstanding requests
106 e2 = ds.add_segment_request(2, now+4)
107 e3 = ds.add_segment_request(3, now+5)
108 del e2,e3 # hush pyflakes
110 # simulate a segment which gets delivered faster than a system clock tick (ticket #1166)
111 e = ds.add_segment_request(4, now)
113 e.deliver(now, 0, 140, 0.5)
115 e = ds.add_dyhb_request(serverA, now)
116 e.finished([1,2], now+1)
117 e = ds.add_dyhb_request(serverB, now+2) # left unfinished
119 e = ds.add_read_event(0, 120, now)
120 e.update(60, 0.5, 0.1) # bytes, decrypttime, pausetime
122 e = ds.add_read_event(120, 30, now+2) # left unfinished
124 e = ds.add_block_request(serverA, 1, 100, 20, now)
125 e.finished(20, now+1)
126 e = ds.add_block_request(serverB, 1, 120, 30, now+1) # left unfinished
128 # make sure that add_read_event() can come first too
129 ds1 = DownloadStatus(storage_index, 1234)
130 e = ds1.add_read_event(0, 120, now)
131 e.update(60, 0.5, 0.1) # bytes, decrypttime, pausetime
137 _all_upload_status = [upload.UploadStatus()]
138 _all_download_status = [build_one_ds()]
139 _all_mapupdate_statuses = [servermap.UpdateStatus()]
140 _all_publish_statuses = [publish.PublishStatus()]
141 _all_retrieve_statuses = [retrieve.RetrieveStatus()]
143 def list_all_upload_statuses(self):
144 return self._all_upload_status
145 def list_all_download_statuses(self):
146 return self._all_download_status
147 def list_all_mapupdate_statuses(self):
148 return self._all_mapupdate_statuses
149 def list_all_publish_statuses(self):
150 return self._all_publish_statuses
151 def list_all_retrieve_statuses(self):
152 return self._all_retrieve_statuses
153 def list_all_helper_statuses(self):
156 class FakeClient(Client):
158 # don't upcall to Client.__init__, since we only want to initialize a
160 service.MultiService.__init__(self)
161 self.nodeid = "fake_nodeid"
162 self.nickname = "fake_nickname"
163 self.introducer_furl = "None"
164 self.stats_provider = FakeStatsProvider()
165 self._secret_holder = SecretHolder("lease secret", "convergence secret")
167 self.convergence = "some random string"
168 self.storage_broker = StorageFarmBroker(None, permute_peers=True)
169 self.introducer_client = None
170 self.history = FakeHistory()
171 self.uploader = FakeUploader()
172 self.uploader.setServiceParent(self)
173 self.blacklist = None
174 self.nodemaker = FakeNodeMaker(None, self._secret_holder, None,
177 self.mutable_file_default = SDMF_VERSION
179 def startService(self):
180 return service.MultiService.startService(self)
181 def stopService(self):
182 return service.MultiService.stopService(self)
184 MUTABLE_SIZELIMIT = FakeMutableFileNode.MUTABLE_SIZELIMIT
186 class WebMixin(object):
188 self.s = FakeClient()
189 self.s.startService()
190 self.staticdir = self.mktemp()
192 self.ws = webish.WebishServer(self.s, "0", staticdir=self.staticdir,
194 self.ws.setServiceParent(self.s)
195 self.webish_port = self.ws.getPortnum()
196 self.webish_url = self.ws.getURL()
197 assert self.webish_url.endswith("/")
198 self.webish_url = self.webish_url[:-1] # these tests add their own /
200 l = [ self.s.create_dirnode() for x in range(6) ]
201 d = defer.DeferredList(l)
203 self.public_root = res[0][1]
204 assert interfaces.IDirectoryNode.providedBy(self.public_root), res
205 self.public_url = "/uri/" + self.public_root.get_uri()
206 self.private_root = res[1][1]
210 self._foo_uri = foo.get_uri()
211 self._foo_readonly_uri = foo.get_readonly_uri()
212 self._foo_verifycap = foo.get_verify_cap().to_string()
213 # NOTE: we ignore the deferred on all set_uri() calls, because we
214 # know the fake nodes do these synchronously
215 self.public_root.set_uri(u"foo", foo.get_uri(),
216 foo.get_readonly_uri())
218 self.BAR_CONTENTS, n, self._bar_txt_uri = self.makefile(0)
219 foo.set_uri(u"bar.txt", self._bar_txt_uri, self._bar_txt_uri)
220 self._bar_txt_verifycap = n.get_verify_cap().to_string()
223 # XXX: Do we ever use this?
224 self.BAZ_CONTENTS, n, self._baz_txt_uri, self._baz_txt_readonly_uri = self.makefile_mutable(0)
226 foo.set_uri(u"baz.txt", self._baz_txt_uri, self._baz_txt_readonly_uri)
229 self.QUUX_CONTENTS, n, self._quux_txt_uri, self._quux_txt_readonly_uri = self.makefile_mutable(0, mdmf=True)
230 assert self._quux_txt_uri.startswith("URI:MDMF")
231 foo.set_uri(u"quux.txt", self._quux_txt_uri, self._quux_txt_readonly_uri)
233 foo.set_uri(u"empty", res[3][1].get_uri(),
234 res[3][1].get_readonly_uri())
235 sub_uri = res[4][1].get_uri()
236 self._sub_uri = sub_uri
237 foo.set_uri(u"sub", sub_uri, sub_uri)
238 sub = self.s.create_node_from_uri(sub_uri)
240 _ign, n, blocking_uri = self.makefile(1)
241 foo.set_uri(u"blockingfile", blocking_uri, blocking_uri)
243 unicode_filename = u"n\u00fc.txt" # n u-umlaut . t x t
244 # ok, unicode calls it LATIN SMALL LETTER U WITH DIAERESIS but I
245 # still think of it as an umlaut
246 foo.set_uri(unicode_filename, self._bar_txt_uri, self._bar_txt_uri)
248 _ign, n, baz_file = self.makefile(2)
249 self._baz_file_uri = baz_file
250 sub.set_uri(u"baz.txt", baz_file, baz_file)
252 _ign, n, self._bad_file_uri = self.makefile(3)
253 # this uri should not be downloadable
254 del FakeCHKFileNode.all_contents[self._bad_file_uri]
257 self.public_root.set_uri(u"reedownlee", rodir.get_readonly_uri(),
258 rodir.get_readonly_uri())
259 rodir.set_uri(u"nor", baz_file, baz_file)
265 # public/foo/quux.txt
266 # public/foo/blockingfile
269 # public/foo/sub/baz.txt
271 # public/reedownlee/nor
272 self.NEWFILE_CONTENTS = "newfile contents\n"
274 return foo.get_metadata_for(u"bar.txt")
276 def _got_metadata(metadata):
277 self._bar_txt_metadata = metadata
278 d.addCallback(_got_metadata)
281 def makefile(self, number):
282 contents = "contents of file %s\n" % number
283 n = create_chk_filenode(contents)
284 return contents, n, n.get_uri()
286 def makefile_mutable(self, number, mdmf=False):
287 contents = "contents of mutable file %s\n" % number
288 n = create_mutable_filenode(contents, mdmf)
289 return contents, n, n.get_uri(), n.get_readonly_uri()
292 return self.s.stopService()
294 def failUnlessIsBarDotTxt(self, res):
295 self.failUnlessReallyEqual(res, self.BAR_CONTENTS, res)
297 def failUnlessIsQuuxDotTxt(self, res):
298 self.failUnlessReallyEqual(res, self.QUUX_CONTENTS, res)
300 def failUnlessIsBazDotTxt(self, res):
301 self.failUnlessReallyEqual(res, self.BAZ_CONTENTS, res)
303 def failUnlessIsBarJSON(self, res):
304 data = simplejson.loads(res)
305 self.failUnless(isinstance(data, list))
306 self.failUnlessEqual(data[0], "filenode")
307 self.failUnless(isinstance(data[1], dict))
308 self.failIf(data[1]["mutable"])
309 self.failIf("rw_uri" in data[1]) # immutable
310 self.failUnlessReallyEqual(to_str(data[1]["ro_uri"]), self._bar_txt_uri)
311 self.failUnlessReallyEqual(to_str(data[1]["verify_uri"]), self._bar_txt_verifycap)
312 self.failUnlessReallyEqual(data[1]["size"], len(self.BAR_CONTENTS))
314 def failUnlessIsQuuxJSON(self, res, readonly=False):
315 data = simplejson.loads(res)
316 self.failUnless(isinstance(data, list))
317 self.failUnlessEqual(data[0], "filenode")
318 self.failUnless(isinstance(data[1], dict))
320 return self.failUnlessIsQuuxDotTxtMetadata(metadata, readonly)
322 def failUnlessIsQuuxDotTxtMetadata(self, metadata, readonly):
323 self.failUnless(metadata['mutable'])
325 self.failIf("rw_uri" in metadata)
327 self.failUnless("rw_uri" in metadata)
328 self.failUnlessEqual(metadata['rw_uri'], self._quux_txt_uri)
329 self.failUnless("ro_uri" in metadata)
330 self.failUnlessEqual(metadata['ro_uri'], self._quux_txt_readonly_uri)
331 self.failUnlessReallyEqual(metadata['size'], len(self.QUUX_CONTENTS))
333 def failUnlessIsFooJSON(self, res):
334 data = simplejson.loads(res)
335 self.failUnless(isinstance(data, list))
336 self.failUnlessEqual(data[0], "dirnode", res)
337 self.failUnless(isinstance(data[1], dict))
338 self.failUnless(data[1]["mutable"])
339 self.failUnless("rw_uri" in data[1]) # mutable
340 self.failUnlessReallyEqual(to_str(data[1]["rw_uri"]), self._foo_uri)
341 self.failUnlessReallyEqual(to_str(data[1]["ro_uri"]), self._foo_readonly_uri)
342 self.failUnlessReallyEqual(to_str(data[1]["verify_uri"]), self._foo_verifycap)
344 kidnames = sorted([unicode(n) for n in data[1]["children"]])
345 self.failUnlessEqual(kidnames,
346 [u"bar.txt", u"baz.txt", u"blockingfile",
347 u"empty", u"n\u00fc.txt", u"quux.txt", u"sub"])
348 kids = dict( [(unicode(name),value)
350 in data[1]["children"].iteritems()] )
351 self.failUnlessEqual(kids[u"sub"][0], "dirnode")
352 self.failUnlessIn("metadata", kids[u"sub"][1])
353 self.failUnlessIn("tahoe", kids[u"sub"][1]["metadata"])
354 tahoe_md = kids[u"sub"][1]["metadata"]["tahoe"]
355 self.failUnlessIn("linkcrtime", tahoe_md)
356 self.failUnlessIn("linkmotime", tahoe_md)
357 self.failUnlessEqual(kids[u"bar.txt"][0], "filenode")
358 self.failUnlessReallyEqual(kids[u"bar.txt"][1]["size"], len(self.BAR_CONTENTS))
359 self.failUnlessReallyEqual(to_str(kids[u"bar.txt"][1]["ro_uri"]), self._bar_txt_uri)
360 self.failUnlessReallyEqual(to_str(kids[u"bar.txt"][1]["verify_uri"]),
361 self._bar_txt_verifycap)
362 self.failUnlessIn("metadata", kids[u"bar.txt"][1])
363 self.failUnlessIn("tahoe", kids[u"bar.txt"][1]["metadata"])
364 self.failUnlessReallyEqual(kids[u"bar.txt"][1]["metadata"]["tahoe"]["linkcrtime"],
365 self._bar_txt_metadata["tahoe"]["linkcrtime"])
366 self.failUnlessReallyEqual(to_str(kids[u"n\u00fc.txt"][1]["ro_uri"]),
368 self.failUnlessIn("quux.txt", kids)
369 self.failUnlessReallyEqual(to_str(kids[u"quux.txt"][1]["rw_uri"]),
371 self.failUnlessReallyEqual(to_str(kids[u"quux.txt"][1]["ro_uri"]),
372 self._quux_txt_readonly_uri)
374 def GET(self, urlpath, followRedirect=False, return_response=False,
376 # if return_response=True, this fires with (data, statuscode,
377 # respheaders) instead of just data.
378 assert not isinstance(urlpath, unicode)
379 url = self.webish_url + urlpath
380 factory = HTTPClientGETFactory(url, method="GET",
381 followRedirect=followRedirect, **kwargs)
382 reactor.connectTCP("localhost", self.webish_port, factory)
385 return (data, factory.status, factory.response_headers)
387 d.addCallback(_got_data)
388 return factory.deferred
390 def HEAD(self, urlpath, return_response=False, **kwargs):
391 # this requires some surgery, because twisted.web.client doesn't want
392 # to give us back the response headers.
393 factory = HTTPClientHEADFactory(urlpath, method="HEAD", **kwargs)
394 reactor.connectTCP("localhost", self.webish_port, factory)
397 return (data, factory.status, factory.response_headers)
399 d.addCallback(_got_data)
400 return factory.deferred
402 def PUT(self, urlpath, data, **kwargs):
403 url = self.webish_url + urlpath
404 return client.getPage(url, method="PUT", postdata=data, **kwargs)
406 def DELETE(self, urlpath):
407 url = self.webish_url + urlpath
408 return client.getPage(url, method="DELETE")
410 def POST(self, urlpath, followRedirect=False, **fields):
411 sepbase = "boogabooga"
415 form.append('Content-Disposition: form-data; name="_charset"')
419 for name, value in fields.iteritems():
420 if isinstance(value, tuple):
421 filename, value = value
422 form.append('Content-Disposition: form-data; name="%s"; '
423 'filename="%s"' % (name, filename.encode("utf-8")))
425 form.append('Content-Disposition: form-data; name="%s"' % name)
427 if isinstance(value, unicode):
428 value = value.encode("utf-8")
431 assert isinstance(value, str)
438 body = "\r\n".join(form) + "\r\n"
439 headers["content-type"] = "multipart/form-data; boundary=%s" % sepbase
440 return self.POST2(urlpath, body, headers, followRedirect)
442 def POST2(self, urlpath, body="", headers={}, followRedirect=False):
443 url = self.webish_url + urlpath
444 return client.getPage(url, method="POST", postdata=body,
445 headers=headers, followRedirect=followRedirect)
447 def shouldFail(self, res, expected_failure, which,
448 substring=None, response_substring=None):
449 if isinstance(res, failure.Failure):
450 res.trap(expected_failure)
452 self.failUnless(substring in str(res),
453 "substring '%s' not in '%s'"
454 % (substring, str(res)))
455 if response_substring:
456 self.failUnless(response_substring in res.value.response,
457 "response substring '%s' not in '%s'"
458 % (response_substring, res.value.response))
460 self.fail("%s was supposed to raise %s, not get '%s'" %
461 (which, expected_failure, res))
463 def shouldFail2(self, expected_failure, which, substring,
465 callable, *args, **kwargs):
466 assert substring is None or isinstance(substring, str)
467 assert response_substring is None or isinstance(response_substring, str)
468 d = defer.maybeDeferred(callable, *args, **kwargs)
470 if isinstance(res, failure.Failure):
471 res.trap(expected_failure)
473 self.failUnless(substring in str(res),
474 "%s: substring '%s' not in '%s'"
475 % (which, substring, str(res)))
476 if response_substring:
477 self.failUnless(response_substring in res.value.response,
478 "%s: response substring '%s' not in '%s'"
480 response_substring, res.value.response))
482 self.fail("%s was supposed to raise %s, not get '%s'" %
483 (which, expected_failure, res))
487 def should404(self, res, which):
488 if isinstance(res, failure.Failure):
489 res.trap(error.Error)
490 self.failUnlessReallyEqual(res.value.status, "404")
492 self.fail("%s was supposed to Error(404), not get '%s'" %
495 def should302(self, res, which):
496 if isinstance(res, failure.Failure):
497 res.trap(error.Error)
498 self.failUnlessReallyEqual(res.value.status, "302")
500 self.fail("%s was supposed to Error(302), not get '%s'" %
504 class Web(WebMixin, WebErrorMixin, testutil.StallMixin, testutil.ReallyEqualMixin, unittest.TestCase):
505 def test_create(self):
508 def test_welcome(self):
511 self.failUnless('Welcome To Tahoe-LAFS' in res, res)
513 self.s.basedir = 'web/test_welcome'
514 fileutil.make_dirs("web/test_welcome")
515 fileutil.make_dirs("web/test_welcome/private")
517 d.addCallback(_check)
520 def test_provisioning(self):
521 d = self.GET("/provisioning/")
523 self.failUnless('Provisioning Tool' in res)
524 fields = {'filled': True,
525 "num_users": int(50e3),
526 "files_per_user": 1000,
527 "space_per_user": int(1e9),
528 "sharing_ratio": 1.0,
529 "encoding_parameters": "3-of-10-5",
531 "ownership_mode": "A",
532 "download_rate": 100,
537 return self.POST("/provisioning/", **fields)
539 d.addCallback(_check)
541 self.failUnless('Provisioning Tool' in res)
542 self.failUnless("Share space consumed: 167.01TB" in res)
544 fields = {'filled': True,
545 "num_users": int(50e6),
546 "files_per_user": 1000,
547 "space_per_user": int(5e9),
548 "sharing_ratio": 1.0,
549 "encoding_parameters": "25-of-100-50",
550 "num_servers": 30000,
551 "ownership_mode": "E",
552 "drive_failure_model": "U",
554 "download_rate": 1000,
559 return self.POST("/provisioning/", **fields)
560 d.addCallback(_check2)
562 self.failUnless("Share space consumed: huge!" in res)
563 fields = {'filled': True}
564 return self.POST("/provisioning/", **fields)
565 d.addCallback(_check3)
567 self.failUnless("Share space consumed:" in res)
568 d.addCallback(_check4)
571 def test_reliability_tool(self):
573 from allmydata import reliability
574 _hush_pyflakes = reliability
577 raise unittest.SkipTest("reliability tool requires NumPy")
579 d = self.GET("/reliability/")
581 self.failUnless('Reliability Tool' in res)
582 fields = {'drive_lifetime': "8Y",
587 "check_period": "1M",
588 "report_period": "3M",
591 return self.POST("/reliability/", **fields)
593 d.addCallback(_check)
595 self.failUnless('Reliability Tool' in res)
596 r = r'Probability of loss \(no maintenance\):\s+<span>0.033591'
597 self.failUnless(re.search(r, res), res)
598 d.addCallback(_check2)
601 def test_status(self):
602 h = self.s.get_history()
603 dl_num = h.list_all_download_statuses()[0].get_counter()
604 ul_num = h.list_all_upload_statuses()[0].get_counter()
605 mu_num = h.list_all_mapupdate_statuses()[0].get_counter()
606 pub_num = h.list_all_publish_statuses()[0].get_counter()
607 ret_num = h.list_all_retrieve_statuses()[0].get_counter()
608 d = self.GET("/status", followRedirect=True)
610 self.failUnless('Upload and Download Status' in res, res)
611 self.failUnless('"down-%d"' % dl_num in res, res)
612 self.failUnless('"up-%d"' % ul_num in res, res)
613 self.failUnless('"mapupdate-%d"' % mu_num in res, res)
614 self.failUnless('"publish-%d"' % pub_num in res, res)
615 self.failUnless('"retrieve-%d"' % ret_num in res, res)
616 d.addCallback(_check)
617 d.addCallback(lambda res: self.GET("/status/?t=json"))
618 def _check_json(res):
619 data = simplejson.loads(res)
620 self.failUnless(isinstance(data, dict))
621 #active = data["active"]
622 # TODO: test more. We need a way to fake an active operation
624 d.addCallback(_check_json)
626 d.addCallback(lambda res: self.GET("/status/down-%d" % dl_num))
628 self.failUnless("File Download Status" in res, res)
629 d.addCallback(_check_dl)
630 d.addCallback(lambda res: self.GET("/status/down-%d/event_json" % dl_num))
631 def _check_dl_json(res):
632 data = simplejson.loads(res)
633 self.failUnless(isinstance(data, dict))
634 self.failUnless("read" in data)
635 self.failUnlessEqual(data["read"][0]["length"], 120)
636 self.failUnlessEqual(data["segment"][0]["segment_length"], 100)
637 self.failUnlessEqual(data["segment"][2]["segment_number"], 2)
638 self.failUnlessEqual(data["segment"][2]["finish_time"], None)
639 phwr_id = base32.b2a(hashutil.tagged_hash("foo", "serverid_a")[:20])
640 cmpu_id = base32.b2a(hashutil.tagged_hash("foo", "serverid_b")[:20])
641 # serverids[] keys are strings, since that's what JSON does, but
642 # we'd really like them to be ints
643 self.failUnlessEqual(data["serverids"]["0"], "phwr")
644 self.failUnless(data["serverids"].has_key("1"), data["serverids"])
645 self.failUnlessEqual(data["serverids"]["1"], "cmpu", data["serverids"])
646 self.failUnlessEqual(data["server_info"][phwr_id]["short"], "phwr")
647 self.failUnlessEqual(data["server_info"][cmpu_id]["short"], "cmpu")
648 self.failUnless("dyhb" in data)
649 d.addCallback(_check_dl_json)
650 d.addCallback(lambda res: self.GET("/status/up-%d" % ul_num))
652 self.failUnless("File Upload Status" in res, res)
653 d.addCallback(_check_ul)
654 d.addCallback(lambda res: self.GET("/status/mapupdate-%d" % mu_num))
655 def _check_mapupdate(res):
656 self.failUnless("Mutable File Servermap Update Status" in res, res)
657 d.addCallback(_check_mapupdate)
658 d.addCallback(lambda res: self.GET("/status/publish-%d" % pub_num))
659 def _check_publish(res):
660 self.failUnless("Mutable File Publish Status" in res, res)
661 d.addCallback(_check_publish)
662 d.addCallback(lambda res: self.GET("/status/retrieve-%d" % ret_num))
663 def _check_retrieve(res):
664 self.failUnless("Mutable File Retrieve Status" in res, res)
665 d.addCallback(_check_retrieve)
669 def test_status_numbers(self):
670 drrm = status.DownloadResultsRendererMixin()
671 self.failUnlessReallyEqual(drrm.render_time(None, None), "")
672 self.failUnlessReallyEqual(drrm.render_time(None, 2.5), "2.50s")
673 self.failUnlessReallyEqual(drrm.render_time(None, 0.25), "250ms")
674 self.failUnlessReallyEqual(drrm.render_time(None, 0.0021), "2.1ms")
675 self.failUnlessReallyEqual(drrm.render_time(None, 0.000123), "123us")
676 self.failUnlessReallyEqual(drrm.render_rate(None, None), "")
677 self.failUnlessReallyEqual(drrm.render_rate(None, 2500000), "2.50MBps")
678 self.failUnlessReallyEqual(drrm.render_rate(None, 30100), "30.1kBps")
679 self.failUnlessReallyEqual(drrm.render_rate(None, 123), "123Bps")
681 urrm = status.UploadResultsRendererMixin()
682 self.failUnlessReallyEqual(urrm.render_time(None, None), "")
683 self.failUnlessReallyEqual(urrm.render_time(None, 2.5), "2.50s")
684 self.failUnlessReallyEqual(urrm.render_time(None, 0.25), "250ms")
685 self.failUnlessReallyEqual(urrm.render_time(None, 0.0021), "2.1ms")
686 self.failUnlessReallyEqual(urrm.render_time(None, 0.000123), "123us")
687 self.failUnlessReallyEqual(urrm.render_rate(None, None), "")
688 self.failUnlessReallyEqual(urrm.render_rate(None, 2500000), "2.50MBps")
689 self.failUnlessReallyEqual(urrm.render_rate(None, 30100), "30.1kBps")
690 self.failUnlessReallyEqual(urrm.render_rate(None, 123), "123Bps")
692 def test_GET_FILEURL(self):
693 d = self.GET(self.public_url + "/foo/bar.txt")
694 d.addCallback(self.failUnlessIsBarDotTxt)
697 def test_GET_FILEURL_range(self):
698 headers = {"range": "bytes=1-10"}
699 d = self.GET(self.public_url + "/foo/bar.txt", headers=headers,
700 return_response=True)
701 def _got((res, status, headers)):
702 self.failUnlessReallyEqual(int(status), 206)
703 self.failUnless(headers.has_key("content-range"))
704 self.failUnlessReallyEqual(headers["content-range"][0],
705 "bytes 1-10/%d" % len(self.BAR_CONTENTS))
706 self.failUnlessReallyEqual(res, self.BAR_CONTENTS[1:11])
710 def test_GET_FILEURL_partial_range(self):
711 headers = {"range": "bytes=5-"}
712 length = len(self.BAR_CONTENTS)
713 d = self.GET(self.public_url + "/foo/bar.txt", headers=headers,
714 return_response=True)
715 def _got((res, status, headers)):
716 self.failUnlessReallyEqual(int(status), 206)
717 self.failUnless(headers.has_key("content-range"))
718 self.failUnlessReallyEqual(headers["content-range"][0],
719 "bytes 5-%d/%d" % (length-1, length))
720 self.failUnlessReallyEqual(res, self.BAR_CONTENTS[5:])
724 def test_GET_FILEURL_partial_end_range(self):
725 headers = {"range": "bytes=-5"}
726 length = len(self.BAR_CONTENTS)
727 d = self.GET(self.public_url + "/foo/bar.txt", headers=headers,
728 return_response=True)
729 def _got((res, status, headers)):
730 self.failUnlessReallyEqual(int(status), 206)
731 self.failUnless(headers.has_key("content-range"))
732 self.failUnlessReallyEqual(headers["content-range"][0],
733 "bytes %d-%d/%d" % (length-5, length-1, length))
734 self.failUnlessReallyEqual(res, self.BAR_CONTENTS[-5:])
738 def test_GET_FILEURL_partial_range_overrun(self):
739 headers = {"range": "bytes=100-200"}
740 d = self.shouldFail2(error.Error, "test_GET_FILEURL_range_overrun",
741 "416 Requested Range not satisfiable",
742 "First beyond end of file",
743 self.GET, self.public_url + "/foo/bar.txt",
747 def test_HEAD_FILEURL_range(self):
748 headers = {"range": "bytes=1-10"}
749 d = self.HEAD(self.public_url + "/foo/bar.txt", headers=headers,
750 return_response=True)
751 def _got((res, status, headers)):
752 self.failUnlessReallyEqual(res, "")
753 self.failUnlessReallyEqual(int(status), 206)
754 self.failUnless(headers.has_key("content-range"))
755 self.failUnlessReallyEqual(headers["content-range"][0],
756 "bytes 1-10/%d" % len(self.BAR_CONTENTS))
760 def test_HEAD_FILEURL_partial_range(self):
761 headers = {"range": "bytes=5-"}
762 length = len(self.BAR_CONTENTS)
763 d = self.HEAD(self.public_url + "/foo/bar.txt", headers=headers,
764 return_response=True)
765 def _got((res, status, headers)):
766 self.failUnlessReallyEqual(int(status), 206)
767 self.failUnless(headers.has_key("content-range"))
768 self.failUnlessReallyEqual(headers["content-range"][0],
769 "bytes 5-%d/%d" % (length-1, length))
773 def test_HEAD_FILEURL_partial_end_range(self):
774 headers = {"range": "bytes=-5"}
775 length = len(self.BAR_CONTENTS)
776 d = self.HEAD(self.public_url + "/foo/bar.txt", headers=headers,
777 return_response=True)
778 def _got((res, status, headers)):
779 self.failUnlessReallyEqual(int(status), 206)
780 self.failUnless(headers.has_key("content-range"))
781 self.failUnlessReallyEqual(headers["content-range"][0],
782 "bytes %d-%d/%d" % (length-5, length-1, length))
786 def test_HEAD_FILEURL_partial_range_overrun(self):
787 headers = {"range": "bytes=100-200"}
788 d = self.shouldFail2(error.Error, "test_HEAD_FILEURL_range_overrun",
789 "416 Requested Range not satisfiable",
791 self.HEAD, self.public_url + "/foo/bar.txt",
795 def test_GET_FILEURL_range_bad(self):
796 headers = {"range": "BOGUS=fizbop-quarnak"}
797 d = self.GET(self.public_url + "/foo/bar.txt", headers=headers,
798 return_response=True)
799 def _got((res, status, headers)):
800 self.failUnlessReallyEqual(int(status), 200)
801 self.failUnless(not headers.has_key("content-range"))
802 self.failUnlessReallyEqual(res, self.BAR_CONTENTS)
806 def test_HEAD_FILEURL(self):
807 d = self.HEAD(self.public_url + "/foo/bar.txt", return_response=True)
808 def _got((res, status, headers)):
809 self.failUnlessReallyEqual(res, "")
810 self.failUnlessReallyEqual(headers["content-length"][0],
811 str(len(self.BAR_CONTENTS)))
812 self.failUnlessReallyEqual(headers["content-type"], ["text/plain"])
816 def test_GET_FILEURL_named(self):
817 base = "/file/%s" % urllib.quote(self._bar_txt_uri)
818 base2 = "/named/%s" % urllib.quote(self._bar_txt_uri)
819 d = self.GET(base + "/@@name=/blah.txt")
820 d.addCallback(self.failUnlessIsBarDotTxt)
821 d.addCallback(lambda res: self.GET(base + "/blah.txt"))
822 d.addCallback(self.failUnlessIsBarDotTxt)
823 d.addCallback(lambda res: self.GET(base + "/ignore/lots/blah.txt"))
824 d.addCallback(self.failUnlessIsBarDotTxt)
825 d.addCallback(lambda res: self.GET(base2 + "/@@name=/blah.txt"))
826 d.addCallback(self.failUnlessIsBarDotTxt)
827 save_url = base + "?save=true&filename=blah.txt"
828 d.addCallback(lambda res: self.GET(save_url))
829 d.addCallback(self.failUnlessIsBarDotTxt) # TODO: check headers
830 u_filename = u"n\u00e9wer.txt" # n e-acute w e r . t x t
831 u_fn_e = urllib.quote(u_filename.encode("utf-8"))
832 u_url = base + "?save=true&filename=" + u_fn_e
833 d.addCallback(lambda res: self.GET(u_url))
834 d.addCallback(self.failUnlessIsBarDotTxt) # TODO: check headers
837 def test_PUT_FILEURL_named_bad(self):
838 base = "/file/%s" % urllib.quote(self._bar_txt_uri)
839 d = self.shouldFail2(error.Error, "test_PUT_FILEURL_named_bad",
841 "/file can only be used with GET or HEAD",
842 self.PUT, base + "/@@name=/blah.txt", "")
846 def test_GET_DIRURL_named_bad(self):
847 base = "/file/%s" % urllib.quote(self._foo_uri)
848 d = self.shouldFail2(error.Error, "test_PUT_DIRURL_named_bad",
851 self.GET, base + "/@@name=/blah.txt")
854 def test_GET_slash_file_bad(self):
855 d = self.shouldFail2(error.Error, "test_GET_slash_file_bad",
857 "/file must be followed by a file-cap and a name",
861 def test_GET_unhandled_URI_named(self):
862 contents, n, newuri = self.makefile(12)
863 verifier_cap = n.get_verify_cap().to_string()
864 base = "/file/%s" % urllib.quote(verifier_cap)
865 # client.create_node_from_uri() can't handle verify-caps
866 d = self.shouldFail2(error.Error, "GET_unhandled_URI_named",
867 "400 Bad Request", "is not a file-cap",
871 def test_GET_unhandled_URI(self):
872 contents, n, newuri = self.makefile(12)
873 verifier_cap = n.get_verify_cap().to_string()
874 base = "/uri/%s" % urllib.quote(verifier_cap)
875 # client.create_node_from_uri() can't handle verify-caps
876 d = self.shouldFail2(error.Error, "test_GET_unhandled_URI",
878 "GET unknown URI type: can only do t=info",
882 def test_GET_FILE_URI(self):
883 base = "/uri/%s" % urllib.quote(self._bar_txt_uri)
885 d.addCallback(self.failUnlessIsBarDotTxt)
888 def test_GET_FILE_URI_mdmf(self):
889 base = "/uri/%s" % urllib.quote(self._quux_txt_uri)
891 d.addCallback(self.failUnlessIsQuuxDotTxt)
894 def test_GET_FILE_URI_mdmf_extensions(self):
895 base = "/uri/%s" % urllib.quote("%s:RANDOMSTUFF" % self._quux_txt_uri)
897 d.addCallback(self.failUnlessIsQuuxDotTxt)
900 def test_GET_FILE_URI_mdmf_readonly(self):
901 base = "/uri/%s" % urllib.quote(self._quux_txt_readonly_uri)
903 d.addCallback(self.failUnlessIsQuuxDotTxt)
906 def test_GET_FILE_URI_badchild(self):
907 base = "/uri/%s/boguschild" % urllib.quote(self._bar_txt_uri)
908 errmsg = "Files have no children, certainly not named 'boguschild'"
909 d = self.shouldFail2(error.Error, "test_GET_FILE_URI_badchild",
910 "400 Bad Request", errmsg,
914 def test_PUT_FILE_URI_badchild(self):
915 base = "/uri/%s/boguschild" % urllib.quote(self._bar_txt_uri)
916 errmsg = "Cannot create directory 'boguschild', because its parent is a file, not a directory"
917 d = self.shouldFail2(error.Error, "test_GET_FILE_URI_badchild",
918 "400 Bad Request", errmsg,
922 def test_PUT_FILE_URI_mdmf(self):
923 base = "/uri/%s" % urllib.quote(self._quux_txt_uri)
924 self._quux_new_contents = "new_contents"
926 d.addCallback(lambda res:
927 self.failUnlessIsQuuxDotTxt(res))
928 d.addCallback(lambda ignored:
929 self.PUT(base, self._quux_new_contents))
930 d.addCallback(lambda ignored:
932 d.addCallback(lambda res:
933 self.failUnlessReallyEqual(res, self._quux_new_contents))
936 def test_PUT_FILE_URI_mdmf_extensions(self):
937 base = "/uri/%s" % urllib.quote("%s:EXTENSIONSTUFF" % self._quux_txt_uri)
938 self._quux_new_contents = "new_contents"
940 d.addCallback(lambda res: self.failUnlessIsQuuxDotTxt(res))
941 d.addCallback(lambda ignored: self.PUT(base, self._quux_new_contents))
942 d.addCallback(lambda ignored: self.GET(base))
943 d.addCallback(lambda res: self.failUnlessEqual(self._quux_new_contents,
947 def test_PUT_FILE_URI_mdmf_readonly(self):
948 # We're not allowed to PUT things to a readonly cap.
949 base = "/uri/%s" % self._quux_txt_readonly_uri
951 d.addCallback(lambda res:
952 self.failUnlessIsQuuxDotTxt(res))
953 # What should we get here? We get a 500 error now; that's not right.
954 d.addCallback(lambda ignored:
955 self.shouldFail2(error.Error, "test_PUT_FILE_URI_mdmf_readonly",
956 "400 Bad Request", "read-only cap",
957 self.PUT, base, "new data"))
960 def test_PUT_FILE_URI_sdmf_readonly(self):
961 # We're not allowed to put things to a readonly cap.
962 base = "/uri/%s" % self._baz_txt_readonly_uri
964 d.addCallback(lambda res:
965 self.failUnlessIsBazDotTxt(res))
966 d.addCallback(lambda ignored:
967 self.shouldFail2(error.Error, "test_PUT_FILE_URI_sdmf_readonly",
968 "400 Bad Request", "read-only cap",
969 self.PUT, base, "new_data"))
972 # TODO: version of this with a Unicode filename
973 def test_GET_FILEURL_save(self):
974 d = self.GET(self.public_url + "/foo/bar.txt?filename=bar.txt&save=true",
975 return_response=True)
976 def _got((res, statuscode, headers)):
977 content_disposition = headers["content-disposition"][0]
978 self.failUnless(content_disposition == 'attachment; filename="bar.txt"', content_disposition)
979 self.failUnlessIsBarDotTxt(res)
983 def test_GET_FILEURL_missing(self):
984 d = self.GET(self.public_url + "/foo/missing")
985 d.addBoth(self.should404, "test_GET_FILEURL_missing")
988 def test_GET_FILEURL_info_mdmf(self):
989 d = self.GET("/uri/%s?t=info" % self._quux_txt_uri)
991 self.failUnlessIn("mutable file (mdmf)", res)
992 self.failUnlessIn(self._quux_txt_uri, res)
993 self.failUnlessIn(self._quux_txt_readonly_uri, res)
997 def test_GET_FILEURL_info_mdmf_readonly(self):
998 d = self.GET("/uri/%s?t=info" % self._quux_txt_readonly_uri)
1000 self.failUnlessIn("mutable file (mdmf)", res)
1001 self.failIfIn(self._quux_txt_uri, res)
1002 self.failUnlessIn(self._quux_txt_readonly_uri, res)
1006 def test_GET_FILEURL_info_sdmf(self):
1007 d = self.GET("/uri/%s?t=info" % self._baz_txt_uri)
1009 self.failUnlessIn("mutable file (sdmf)", res)
1010 self.failUnlessIn(self._baz_txt_uri, res)
1014 def test_GET_FILEURL_info_mdmf_extensions(self):
1015 d = self.GET("/uri/%s:STUFF?t=info" % self._quux_txt_uri)
1017 self.failUnlessIn("mutable file (mdmf)", res)
1018 self.failUnlessIn(self._quux_txt_uri, res)
1019 self.failUnlessIn(self._quux_txt_readonly_uri, res)
1023 def test_PUT_overwrite_only_files(self):
1024 # create a directory, put a file in that directory.
1025 contents, n, filecap = self.makefile(8)
1026 d = self.PUT(self.public_url + "/foo/dir?t=mkdir", "")
1027 d.addCallback(lambda res:
1028 self.PUT(self.public_url + "/foo/dir/file1.txt",
1029 self.NEWFILE_CONTENTS))
1030 # try to overwrite the file with replace=only-files
1031 # (this should work)
1032 d.addCallback(lambda res:
1033 self.PUT(self.public_url + "/foo/dir/file1.txt?t=uri&replace=only-files",
1035 d.addCallback(lambda res:
1036 self.shouldFail2(error.Error, "PUT_bad_t", "409 Conflict",
1037 "There was already a child by that name, and you asked me "
1038 "to not replace it",
1039 self.PUT, self.public_url + "/foo/dir?t=uri&replace=only-files",
1043 def test_PUT_NEWFILEURL(self):
1044 d = self.PUT(self.public_url + "/foo/new.txt", self.NEWFILE_CONTENTS)
1045 # TODO: we lose the response code, so we can't check this
1046 #self.failUnlessReallyEqual(responsecode, 201)
1047 d.addCallback(self.failUnlessURIMatchesROChild, self._foo_node, u"new.txt")
1048 d.addCallback(lambda res:
1049 self.failUnlessChildContentsAre(self._foo_node, u"new.txt",
1050 self.NEWFILE_CONTENTS))
1053 def test_PUT_NEWFILEURL_not_mutable(self):
1054 d = self.PUT(self.public_url + "/foo/new.txt?mutable=false",
1055 self.NEWFILE_CONTENTS)
1056 # TODO: we lose the response code, so we can't check this
1057 #self.failUnlessReallyEqual(responsecode, 201)
1058 d.addCallback(self.failUnlessURIMatchesROChild, self._foo_node, u"new.txt")
1059 d.addCallback(lambda res:
1060 self.failUnlessChildContentsAre(self._foo_node, u"new.txt",
1061 self.NEWFILE_CONTENTS))
1064 def test_PUT_NEWFILEURL_unlinked_mdmf(self):
1065 # this should get us a few segments of an MDMF mutable file,
1066 # which we can then test for.
1067 contents = self.NEWFILE_CONTENTS * 300000
1068 d = self.PUT("/uri?format=mdmf",
1070 def _got_filecap(filecap):
1071 self.failUnless(filecap.startswith("URI:MDMF"))
1073 d.addCallback(_got_filecap)
1074 d.addCallback(lambda filecap: self.GET("/uri/%s?t=json" % filecap))
1075 d.addCallback(lambda json: self.failUnlessIn("mdmf", json))
1078 def test_PUT_NEWFILEURL_unlinked_sdmf(self):
1079 contents = self.NEWFILE_CONTENTS * 300000
1080 d = self.PUT("/uri?format=sdmf",
1082 d.addCallback(lambda filecap: self.GET("/uri/%s?t=json" % filecap))
1083 d.addCallback(lambda json: self.failUnlessIn("sdmf", json))
1086 def test_PUT_NEWFILEURL_unlinked_bad_format(self):
1087 contents = self.NEWFILE_CONTENTS * 300000
1088 return self.shouldHTTPError("PUT_NEWFILEURL_unlinked_bad_format",
1089 400, "Bad Request", "Unknown format: foo",
1090 self.PUT, "/uri?format=foo",
1093 def test_PUT_NEWFILEURL_range_bad(self):
1094 headers = {"content-range": "bytes 1-10/%d" % len(self.NEWFILE_CONTENTS)}
1095 target = self.public_url + "/foo/new.txt"
1096 d = self.shouldFail2(error.Error, "test_PUT_NEWFILEURL_range_bad",
1097 "501 Not Implemented",
1098 "Content-Range in PUT not yet supported",
1099 # (and certainly not for immutable files)
1100 self.PUT, target, self.NEWFILE_CONTENTS[1:11],
1102 d.addCallback(lambda res:
1103 self.failIfNodeHasChild(self._foo_node, u"new.txt"))
1106 def test_PUT_NEWFILEURL_mutable(self):
1107 d = self.PUT(self.public_url + "/foo/new.txt?mutable=true",
1108 self.NEWFILE_CONTENTS)
1109 # TODO: we lose the response code, so we can't check this
1110 #self.failUnlessReallyEqual(responsecode, 201)
1111 def _check_uri(res):
1112 u = uri.from_string_mutable_filenode(res)
1113 self.failUnless(u.is_mutable())
1114 self.failIf(u.is_readonly())
1116 d.addCallback(_check_uri)
1117 d.addCallback(self.failUnlessURIMatchesRWChild, self._foo_node, u"new.txt")
1118 d.addCallback(lambda res:
1119 self.failUnlessMutableChildContentsAre(self._foo_node,
1121 self.NEWFILE_CONTENTS))
1124 def test_PUT_NEWFILEURL_mutable_toobig(self):
1125 # It is okay to upload large mutable files, so we should be able
1127 d = self.PUT(self.public_url + "/foo/new.txt?mutable=true",
1128 "b" * (self.s.MUTABLE_SIZELIMIT + 1))
1131 def test_PUT_NEWFILEURL_replace(self):
1132 d = self.PUT(self.public_url + "/foo/bar.txt", self.NEWFILE_CONTENTS)
1133 # TODO: we lose the response code, so we can't check this
1134 #self.failUnlessReallyEqual(responsecode, 200)
1135 d.addCallback(self.failUnlessURIMatchesROChild, self._foo_node, u"bar.txt")
1136 d.addCallback(lambda res:
1137 self.failUnlessChildContentsAre(self._foo_node, u"bar.txt",
1138 self.NEWFILE_CONTENTS))
1141 def test_PUT_NEWFILEURL_bad_t(self):
1142 d = self.shouldFail2(error.Error, "PUT_bad_t", "400 Bad Request",
1143 "PUT to a file: bad t=bogus",
1144 self.PUT, self.public_url + "/foo/bar.txt?t=bogus",
1148 def test_PUT_NEWFILEURL_no_replace(self):
1149 d = self.PUT(self.public_url + "/foo/bar.txt?replace=false",
1150 self.NEWFILE_CONTENTS)
1151 d.addBoth(self.shouldFail, error.Error, "PUT_NEWFILEURL_no_replace",
1153 "There was already a child by that name, and you asked me "
1154 "to not replace it")
1157 def test_PUT_NEWFILEURL_mkdirs(self):
1158 d = self.PUT(self.public_url + "/foo/newdir/new.txt", self.NEWFILE_CONTENTS)
1160 d.addCallback(self.failUnlessURIMatchesROChild, fn, u"newdir/new.txt")
1161 d.addCallback(lambda res: self.failIfNodeHasChild(fn, u"new.txt"))
1162 d.addCallback(lambda res: self.failUnlessNodeHasChild(fn, u"newdir"))
1163 d.addCallback(lambda res:
1164 self.failUnlessChildContentsAre(fn, u"newdir/new.txt",
1165 self.NEWFILE_CONTENTS))
1168 def test_PUT_NEWFILEURL_blocked(self):
1169 d = self.PUT(self.public_url + "/foo/blockingfile/new.txt",
1170 self.NEWFILE_CONTENTS)
1171 d.addBoth(self.shouldFail, error.Error, "PUT_NEWFILEURL_blocked",
1173 "Unable to create directory 'blockingfile': a file was in the way")
1176 def test_PUT_NEWFILEURL_emptyname(self):
1177 # an empty pathname component (i.e. a double-slash) is disallowed
1178 d = self.shouldFail2(error.Error, "test_PUT_NEWFILEURL_emptyname",
1180 "The webapi does not allow empty pathname components",
1181 self.PUT, self.public_url + "/foo//new.txt", "")
1184 def test_DELETE_FILEURL(self):
1185 d = self.DELETE(self.public_url + "/foo/bar.txt")
1186 d.addCallback(lambda res:
1187 self.failIfNodeHasChild(self._foo_node, u"bar.txt"))
1190 def test_DELETE_FILEURL_missing(self):
1191 d = self.DELETE(self.public_url + "/foo/missing")
1192 d.addBoth(self.should404, "test_DELETE_FILEURL_missing")
1195 def test_DELETE_FILEURL_missing2(self):
1196 d = self.DELETE(self.public_url + "/missing/missing")
1197 d.addBoth(self.should404, "test_DELETE_FILEURL_missing2")
1200 def failUnlessHasBarDotTxtMetadata(self, res):
1201 data = simplejson.loads(res)
1202 self.failUnless(isinstance(data, list))
1203 self.failUnlessIn("metadata", data[1])
1204 self.failUnlessIn("tahoe", data[1]["metadata"])
1205 self.failUnlessIn("linkcrtime", data[1]["metadata"]["tahoe"])
1206 self.failUnlessIn("linkmotime", data[1]["metadata"]["tahoe"])
1207 self.failUnlessReallyEqual(data[1]["metadata"]["tahoe"]["linkcrtime"],
1208 self._bar_txt_metadata["tahoe"]["linkcrtime"])
1210 def test_GET_FILEURL_json(self):
1211 # twisted.web.http.parse_qs ignores any query args without an '=', so
1212 # I can't do "GET /path?json", I have to do "GET /path/t=json"
1213 # instead. This may make it tricky to emulate the S3 interface
1215 d = self.GET(self.public_url + "/foo/bar.txt?t=json")
1217 self.failUnlessIsBarJSON(data)
1218 self.failUnlessHasBarDotTxtMetadata(data)
1220 d.addCallback(_check1)
1223 def test_GET_FILEURL_json_mutable_type(self):
1224 # The JSON should include format, which says whether the
1225 # file is SDMF or MDMF
1226 d = self.PUT("/uri?format=mdmf",
1227 self.NEWFILE_CONTENTS * 300000)
1228 d.addCallback(lambda filecap: self.GET("/uri/%s?t=json" % filecap))
1229 def _got_json(json, version):
1230 data = simplejson.loads(json)
1231 assert "filenode" == data[0]
1233 assert isinstance(data, dict)
1235 self.failUnlessIn("format", data)
1236 self.failUnlessEqual(data["format"], version)
1238 d.addCallback(_got_json, "mdmf")
1239 # Now make an SDMF file and check that it is reported correctly.
1240 d.addCallback(lambda ignored:
1241 self.PUT("/uri?format=sdmf",
1242 self.NEWFILE_CONTENTS * 300000))
1243 d.addCallback(lambda filecap: self.GET("/uri/%s?t=json" % filecap))
1244 d.addCallback(_got_json, "sdmf")
1247 def test_GET_FILEURL_json_mdmf(self):
1248 d = self.GET("/uri/%s?t=json" % urllib.quote(self._quux_txt_uri))
1249 d.addCallback(self.failUnlessIsQuuxJSON)
1252 def test_GET_FILEURL_json_missing(self):
1253 d = self.GET(self.public_url + "/foo/missing?json")
1254 d.addBoth(self.should404, "test_GET_FILEURL_json_missing")
1257 def test_GET_FILEURL_uri(self):
1258 d = self.GET(self.public_url + "/foo/bar.txt?t=uri")
1260 self.failUnlessReallyEqual(res, self._bar_txt_uri)
1261 d.addCallback(_check)
1262 d.addCallback(lambda res:
1263 self.GET(self.public_url + "/foo/bar.txt?t=readonly-uri"))
1265 # for now, for files, uris and readonly-uris are the same
1266 self.failUnlessReallyEqual(res, self._bar_txt_uri)
1267 d.addCallback(_check2)
1270 def test_GET_FILEURL_badtype(self):
1271 d = self.shouldHTTPError("GET t=bogus", 400, "Bad Request",
1274 self.public_url + "/foo/bar.txt?t=bogus")
1277 def test_CSS_FILE(self):
1278 d = self.GET("/tahoe_css", followRedirect=True)
1280 CSS_STYLE=re.compile('toolbar\s{.+text-align:\scenter.+toolbar-item.+display:\sinline',re.DOTALL)
1281 self.failUnless(CSS_STYLE.search(res), res)
1282 d.addCallback(_check)
1285 def test_GET_FILEURL_uri_missing(self):
1286 d = self.GET(self.public_url + "/foo/missing?t=uri")
1287 d.addBoth(self.should404, "test_GET_FILEURL_uri_missing")
1290 def _check_upload_and_mkdir_forms(self, html):
1291 # We should have a form to create a file, with radio buttons that allow
1292 # the user to toggle whether it is a CHK/LIT (default), SDMF, or MDMF file.
1293 self.failUnlessIn('name="t" value="upload"', html)
1294 self.failUnlessIn('input checked="checked" type="radio" id="upload-chk" value="chk" name="format"', html)
1295 self.failUnlessIn('input type="radio" id="upload-sdmf" value="sdmf" name="format"', html)
1296 self.failUnlessIn('input type="radio" id="upload-mdmf" value="mdmf" name="format"', html)
1298 # We should also have the ability to create a mutable directory, with
1299 # radio buttons that allow the user to toggle whether it is an SDMF (default)
1300 # or MDMF directory.
1301 self.failUnlessIn('name="t" value="mkdir"', html)
1302 self.failUnlessIn('input checked="checked" type="radio" id="mkdir-sdmf" value="sdmf" name="format"', html)
1303 self.failUnlessIn('input type="radio" id="mkdir-mdmf" value="mdmf" name="format"', html)
1305 def test_GET_DIRECTORY_html(self):
1306 d = self.GET(self.public_url + "/foo", followRedirect=True)
1308 self.failUnlessIn('<div class="toolbar-item"><a href="../../..">Return to Welcome page</a></div>', html)
1309 self._check_upload_and_mkdir_forms(html)
1310 self.failUnlessIn("quux", html)
1311 d.addCallback(_check)
1314 def test_GET_root_html(self):
1316 d.addCallback(self._check_upload_and_mkdir_forms)
1319 def test_GET_DIRURL(self):
1320 # the addSlash means we get a redirect here
1321 # from /uri/$URI/foo/ , we need ../../../ to get back to the root
1323 d = self.GET(self.public_url + "/foo", followRedirect=True)
1325 self.failUnless(('<a href="%s">Return to Welcome page' % ROOT)
1327 # the FILE reference points to a URI, but it should end in bar.txt
1328 bar_url = ("%s/file/%s/@@named=/bar.txt" %
1329 (ROOT, urllib.quote(self._bar_txt_uri)))
1330 get_bar = "".join([r'<td>FILE</td>',
1332 r'<a href="%s">bar.txt</a>' % bar_url,
1334 r'\s+<td align="right">%d</td>' % len(self.BAR_CONTENTS),
1336 self.failUnless(re.search(get_bar, res), res)
1337 for label in ['unlink', 'rename']:
1338 for line in res.split("\n"):
1339 # find the line that contains the relevant button for bar.txt
1340 if ("form action" in line and
1341 ('value="%s"' % (label,)) in line and
1342 'value="bar.txt"' in line):
1343 # the form target should use a relative URL
1344 foo_url = urllib.quote("%s/uri/%s/" % (ROOT, self._foo_uri))
1345 self.failUnlessIn('action="%s"' % foo_url, line)
1346 # and the when_done= should too
1347 #done_url = urllib.quote(???)
1348 #self.failUnlessIn('name="when_done" value="%s"' % done_url, line)
1350 # 'unlink' needs to use POST because it directly has a side effect
1351 if label == 'unlink':
1352 self.failUnlessIn('method="post"', line)
1355 self.fail("unable to find '%s bar.txt' line" % (label,), res)
1357 # the DIR reference just points to a URI
1358 sub_url = ("%s/uri/%s/" % (ROOT, urllib.quote(self._sub_uri)))
1359 get_sub = ((r'<td>DIR</td>')
1360 +r'\s+<td><a href="%s">sub</a></td>' % sub_url)
1361 self.failUnless(re.search(get_sub, res), res)
1362 d.addCallback(_check)
1364 # look at a readonly directory
1365 d.addCallback(lambda res:
1366 self.GET(self.public_url + "/reedownlee", followRedirect=True))
1368 self.failUnless("(read-only)" in res, res)
1369 self.failIf("Upload a file" in res, res)
1370 d.addCallback(_check2)
1372 # and at a directory that contains a readonly directory
1373 d.addCallback(lambda res:
1374 self.GET(self.public_url, followRedirect=True))
1376 self.failUnless(re.search('<td>DIR-RO</td>'
1377 r'\s+<td><a href="[\.\/]+/uri/URI%3ADIR2-RO%3A[^"]+">reedownlee</a></td>', res), res)
1378 d.addCallback(_check3)
1380 # and an empty directory
1381 d.addCallback(lambda res: self.GET(self.public_url + "/foo/empty/"))
1383 self.failUnless("directory is empty" in res, res)
1384 MKDIR_BUTTON_RE=re.compile('<input type="hidden" name="t" value="mkdir" />.*<legend class="freeform-form-label">Create a new directory in this directory</legend>.*<input type="submit" value="Create" />', re.I)
1385 self.failUnless(MKDIR_BUTTON_RE.search(res), res)
1386 d.addCallback(_check4)
1388 # and at a literal directory
1389 tiny_litdir_uri = "URI:DIR2-LIT:gqytunj2onug64tufqzdcosvkjetutcjkq5gw4tvm5vwszdgnz5hgyzufqydulbshj5x2lbm" # contains one child which is itself also LIT
1390 d.addCallback(lambda res:
1391 self.GET("/uri/" + tiny_litdir_uri + "/", followRedirect=True))
1393 self.failUnless('(immutable)' in res, res)
1394 self.failUnless(re.search('<td>FILE</td>'
1395 r'\s+<td><a href="[\.\/]+/file/URI%3ALIT%3Akrugkidfnzsc4/@@named=/short">short</a></td>', res), res)
1396 d.addCallback(_check5)
1399 def test_GET_DIRURL_badtype(self):
1400 d = self.shouldHTTPError("test_GET_DIRURL_badtype",
1404 self.public_url + "/foo?t=bogus")
1407 def test_GET_DIRURL_json(self):
1408 d = self.GET(self.public_url + "/foo?t=json")
1409 d.addCallback(self.failUnlessIsFooJSON)
1412 def test_GET_DIRURL_json_format(self):
1413 d = self.PUT(self.public_url + \
1414 "/foo/sdmf.txt?format=sdmf",
1415 self.NEWFILE_CONTENTS * 300000)
1416 d.addCallback(lambda ignored:
1417 self.PUT(self.public_url + \
1418 "/foo/mdmf.txt?format=mdmf",
1419 self.NEWFILE_CONTENTS * 300000))
1420 # Now we have an MDMF and SDMF file in the directory. If we GET
1421 # its JSON, we should see their encodings.
1422 d.addCallback(lambda ignored:
1423 self.GET(self.public_url + "/foo?t=json"))
1424 def _got_json(json):
1425 data = simplejson.loads(json)
1426 assert data[0] == "dirnode"
1429 kids = data['children']
1431 mdmf_data = kids['mdmf.txt'][1]
1432 self.failUnlessIn("format", mdmf_data)
1433 self.failUnlessEqual(mdmf_data["format"], "mdmf")
1435 sdmf_data = kids['sdmf.txt'][1]
1436 self.failUnlessIn("format", sdmf_data)
1437 self.failUnlessEqual(sdmf_data["format"], "sdmf")
1438 d.addCallback(_got_json)
1442 def test_POST_DIRURL_manifest_no_ophandle(self):
1443 d = self.shouldFail2(error.Error,
1444 "test_POST_DIRURL_manifest_no_ophandle",
1446 "slow operation requires ophandle=",
1447 self.POST, self.public_url, t="start-manifest")
1450 def test_POST_DIRURL_manifest(self):
1451 d = defer.succeed(None)
1452 def getman(ignored, output):
1453 d = self.POST(self.public_url + "/foo/?t=start-manifest&ophandle=125",
1454 followRedirect=True)
1455 d.addCallback(self.wait_for_operation, "125")
1456 d.addCallback(self.get_operation_results, "125", output)
1458 d.addCallback(getman, None)
1459 def _got_html(manifest):
1460 self.failUnless("Manifest of SI=" in manifest)
1461 self.failUnless("<td>sub</td>" in manifest)
1462 self.failUnless(self._sub_uri in manifest)
1463 self.failUnless("<td>sub/baz.txt</td>" in manifest)
1464 d.addCallback(_got_html)
1466 # both t=status and unadorned GET should be identical
1467 d.addCallback(lambda res: self.GET("/operations/125"))
1468 d.addCallback(_got_html)
1470 d.addCallback(getman, "html")
1471 d.addCallback(_got_html)
1472 d.addCallback(getman, "text")
1473 def _got_text(manifest):
1474 self.failUnless("\nsub " + self._sub_uri + "\n" in manifest)
1475 self.failUnless("\nsub/baz.txt URI:CHK:" in manifest)
1476 d.addCallback(_got_text)
1477 d.addCallback(getman, "JSON")
1479 data = res["manifest"]
1481 for (path_list, cap) in data:
1482 got[tuple(path_list)] = cap
1483 self.failUnlessReallyEqual(to_str(got[(u"sub",)]), self._sub_uri)
1484 self.failUnless((u"sub",u"baz.txt") in got)
1485 self.failUnless("finished" in res)
1486 self.failUnless("origin" in res)
1487 self.failUnless("storage-index" in res)
1488 self.failUnless("verifycaps" in res)
1489 self.failUnless("stats" in res)
1490 d.addCallback(_got_json)
1493 def test_POST_DIRURL_deepsize_no_ophandle(self):
1494 d = self.shouldFail2(error.Error,
1495 "test_POST_DIRURL_deepsize_no_ophandle",
1497 "slow operation requires ophandle=",
1498 self.POST, self.public_url, t="start-deep-size")
1501 def test_POST_DIRURL_deepsize(self):
1502 d = self.POST(self.public_url + "/foo/?t=start-deep-size&ophandle=126",
1503 followRedirect=True)
1504 d.addCallback(self.wait_for_operation, "126")
1505 d.addCallback(self.get_operation_results, "126", "json")
1506 def _got_json(data):
1507 self.failUnlessReallyEqual(data["finished"], True)
1509 self.failUnless(size > 1000)
1510 d.addCallback(_got_json)
1511 d.addCallback(self.get_operation_results, "126", "text")
1513 mo = re.search(r'^size: (\d+)$', res, re.M)
1514 self.failUnless(mo, res)
1515 size = int(mo.group(1))
1516 # with directories, the size varies.
1517 self.failUnless(size > 1000)
1518 d.addCallback(_got_text)
1521 def test_POST_DIRURL_deepstats_no_ophandle(self):
1522 d = self.shouldFail2(error.Error,
1523 "test_POST_DIRURL_deepstats_no_ophandle",
1525 "slow operation requires ophandle=",
1526 self.POST, self.public_url, t="start-deep-stats")
1529 def test_POST_DIRURL_deepstats(self):
1530 d = self.POST(self.public_url + "/foo/?t=start-deep-stats&ophandle=127",
1531 followRedirect=True)
1532 d.addCallback(self.wait_for_operation, "127")
1533 d.addCallback(self.get_operation_results, "127", "json")
1534 def _got_json(stats):
1535 expected = {"count-immutable-files": 3,
1536 "count-mutable-files": 2,
1537 "count-literal-files": 0,
1539 "count-directories": 3,
1540 "size-immutable-files": 57,
1541 "size-literal-files": 0,
1542 #"size-directories": 1912, # varies
1543 #"largest-directory": 1590,
1544 "largest-directory-children": 7,
1545 "largest-immutable-file": 19,
1547 for k,v in expected.iteritems():
1548 self.failUnlessReallyEqual(stats[k], v,
1549 "stats[%s] was %s, not %s" %
1551 self.failUnlessReallyEqual(stats["size-files-histogram"],
1553 d.addCallback(_got_json)
1556 def test_POST_DIRURL_stream_manifest(self):
1557 d = self.POST(self.public_url + "/foo/?t=stream-manifest")
1559 self.failUnless(res.endswith("\n"))
1560 units = [simplejson.loads(t) for t in res[:-1].split("\n")]
1561 self.failUnlessReallyEqual(len(units), 9)
1562 self.failUnlessEqual(units[-1]["type"], "stats")
1564 self.failUnlessEqual(first["path"], [])
1565 self.failUnlessReallyEqual(to_str(first["cap"]), self._foo_uri)
1566 self.failUnlessEqual(first["type"], "directory")
1567 baz = [u for u in units[:-1] if to_str(u["cap"]) == self._baz_file_uri][0]
1568 self.failUnlessEqual(baz["path"], ["sub", "baz.txt"])
1569 self.failIfEqual(baz["storage-index"], None)
1570 self.failIfEqual(baz["verifycap"], None)
1571 self.failIfEqual(baz["repaircap"], None)
1572 # XXX: Add quux and baz to this test.
1574 d.addCallback(_check)
1577 def test_GET_DIRURL_uri(self):
1578 d = self.GET(self.public_url + "/foo?t=uri")
1580 self.failUnlessReallyEqual(to_str(res), self._foo_uri)
1581 d.addCallback(_check)
1584 def test_GET_DIRURL_readonly_uri(self):
1585 d = self.GET(self.public_url + "/foo?t=readonly-uri")
1587 self.failUnlessReallyEqual(to_str(res), self._foo_readonly_uri)
1588 d.addCallback(_check)
1591 def test_PUT_NEWDIRURL(self):
1592 d = self.PUT(self.public_url + "/foo/newdir?t=mkdir", "")
1593 d.addCallback(lambda res:
1594 self.failUnlessNodeHasChild(self._foo_node, u"newdir"))
1595 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
1596 d.addCallback(self.failUnlessNodeKeysAre, [])
1599 def test_PUT_NEWDIRURL_mdmf(self):
1600 d = self.PUT(self.public_url + "/foo/newdir?t=mkdir&format=mdmf", "")
1601 d.addCallback(lambda res:
1602 self.failUnlessNodeHasChild(self._foo_node, u"newdir"))
1603 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
1604 d.addCallback(lambda node:
1605 self.failUnlessEqual(node._node.get_version(), MDMF_VERSION))
1608 def test_PUT_NEWDIRURL_sdmf(self):
1609 d = self.PUT(self.public_url + "/foo/newdir?t=mkdir&format=sdmf",
1611 d.addCallback(lambda res:
1612 self.failUnlessNodeHasChild(self._foo_node, u"newdir"))
1613 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
1614 d.addCallback(lambda node:
1615 self.failUnlessEqual(node._node.get_version(), SDMF_VERSION))
1618 def test_PUT_NEWDIRURL_bad_format(self):
1619 return self.shouldHTTPError("PUT_NEWDIRURL_bad_format",
1620 400, "Bad Request", "Unknown format: foo",
1621 self.PUT, self.public_url +
1622 "/foo/newdir=?t=mkdir&format=foo", "")
1624 def test_POST_NEWDIRURL(self):
1625 d = self.POST2(self.public_url + "/foo/newdir?t=mkdir", "")
1626 d.addCallback(lambda res:
1627 self.failUnlessNodeHasChild(self._foo_node, u"newdir"))
1628 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
1629 d.addCallback(self.failUnlessNodeKeysAre, [])
1632 def test_POST_NEWDIRURL_mdmf(self):
1633 d = self.POST2(self.public_url + "/foo/newdir?t=mkdir&format=mdmf", "")
1634 d.addCallback(lambda res:
1635 self.failUnlessNodeHasChild(self._foo_node, u"newdir"))
1636 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
1637 d.addCallback(lambda node:
1638 self.failUnlessEqual(node._node.get_version(), MDMF_VERSION))
1641 def test_POST_NEWDIRURL_sdmf(self):
1642 d = self.POST2(self.public_url + "/foo/newdir?t=mkdir&format=sdmf", "")
1643 d.addCallback(lambda res:
1644 self.failUnlessNodeHasChild(self._foo_node, u"newdir"))
1645 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
1646 d.addCallback(lambda node:
1647 self.failUnlessEqual(node._node.get_version(), SDMF_VERSION))
1650 def test_POST_NEWDIRURL_bad_format(self):
1651 return self.shouldHTTPError("POST_NEWDIRURL_bad_format",
1652 400, "Bad Request", "Unknown format: foo",
1653 self.POST2, self.public_url + \
1654 "/foo/newdir?t=mkdir&format=foo", "")
1656 def test_POST_NEWDIRURL_emptyname(self):
1657 # an empty pathname component (i.e. a double-slash) is disallowed
1658 d = self.shouldFail2(error.Error, "POST_NEWDIRURL_emptyname",
1660 "The webapi does not allow empty pathname components, i.e. a double slash",
1661 self.POST, self.public_url + "//?t=mkdir")
1664 def _do_POST_NEWDIRURL_initial_children_test(self, version=None):
1665 (newkids, caps) = self._create_initial_children()
1666 query = "/foo/newdir?t=mkdir-with-children"
1667 if version == MDMF_VERSION:
1668 query += "&format=mdmf"
1669 elif version == SDMF_VERSION:
1670 query += "&format=sdmf"
1672 version = SDMF_VERSION # for later
1673 d = self.POST2(self.public_url + query,
1674 simplejson.dumps(newkids))
1676 n = self.s.create_node_from_uri(uri.strip())
1677 d2 = self.failUnlessNodeKeysAre(n, newkids.keys())
1678 self.failUnlessEqual(n._node.get_version(), version)
1679 d2.addCallback(lambda ign:
1680 self.failUnlessROChildURIIs(n, u"child-imm",
1682 d2.addCallback(lambda ign:
1683 self.failUnlessRWChildURIIs(n, u"child-mutable",
1685 d2.addCallback(lambda ign:
1686 self.failUnlessROChildURIIs(n, u"child-mutable-ro",
1688 d2.addCallback(lambda ign:
1689 self.failUnlessROChildURIIs(n, u"unknownchild-ro",
1690 caps['unknown_rocap']))
1691 d2.addCallback(lambda ign:
1692 self.failUnlessRWChildURIIs(n, u"unknownchild-rw",
1693 caps['unknown_rwcap']))
1694 d2.addCallback(lambda ign:
1695 self.failUnlessROChildURIIs(n, u"unknownchild-imm",
1696 caps['unknown_immcap']))
1697 d2.addCallback(lambda ign:
1698 self.failUnlessRWChildURIIs(n, u"dirchild",
1700 d2.addCallback(lambda ign:
1701 self.failUnlessROChildURIIs(n, u"dirchild-lit",
1703 d2.addCallback(lambda ign:
1704 self.failUnlessROChildURIIs(n, u"dirchild-empty",
1705 caps['emptydircap']))
1707 d.addCallback(_check)
1708 d.addCallback(lambda res:
1709 self.failUnlessNodeHasChild(self._foo_node, u"newdir"))
1710 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
1711 d.addCallback(self.failUnlessNodeKeysAre, newkids.keys())
1712 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
1713 d.addCallback(self.failUnlessROChildURIIs, u"child-imm", caps['filecap1'])
1716 def test_POST_NEWDIRURL_initial_children(self):
1717 return self._do_POST_NEWDIRURL_initial_children_test()
1719 def test_POST_NEWDIRURL_initial_children_mdmf(self):
1720 return self._do_POST_NEWDIRURL_initial_children_test(MDMF_VERSION)
1722 def test_POST_NEWDIRURL_initial_children_sdmf(self):
1723 return self._do_POST_NEWDIRURL_initial_children_test(SDMF_VERSION)
1725 def test_POST_NEWDIRURL_initial_children_bad_format(self):
1726 (newkids, caps) = self._create_initial_children()
1727 return self.shouldHTTPError("POST_NEWDIRURL_initial_children_bad_format",
1728 400, "Bad Request", "Unknown format: foo",
1729 self.POST2, self.public_url + \
1730 "/foo/newdir?t=mkdir-with-children&format=foo",
1731 simplejson.dumps(newkids))
1733 def test_POST_NEWDIRURL_immutable(self):
1734 (newkids, caps) = self._create_immutable_children()
1735 d = self.POST2(self.public_url + "/foo/newdir?t=mkdir-immutable",
1736 simplejson.dumps(newkids))
1738 n = self.s.create_node_from_uri(uri.strip())
1739 d2 = self.failUnlessNodeKeysAre(n, newkids.keys())
1740 d2.addCallback(lambda ign:
1741 self.failUnlessROChildURIIs(n, u"child-imm",
1743 d2.addCallback(lambda ign:
1744 self.failUnlessROChildURIIs(n, u"unknownchild-imm",
1745 caps['unknown_immcap']))
1746 d2.addCallback(lambda ign:
1747 self.failUnlessROChildURIIs(n, u"dirchild-imm",
1749 d2.addCallback(lambda ign:
1750 self.failUnlessROChildURIIs(n, u"dirchild-lit",
1752 d2.addCallback(lambda ign:
1753 self.failUnlessROChildURIIs(n, u"dirchild-empty",
1754 caps['emptydircap']))
1756 d.addCallback(_check)
1757 d.addCallback(lambda res:
1758 self.failUnlessNodeHasChild(self._foo_node, u"newdir"))
1759 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
1760 d.addCallback(self.failUnlessNodeKeysAre, newkids.keys())
1761 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
1762 d.addCallback(self.failUnlessROChildURIIs, u"child-imm", caps['filecap1'])
1763 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
1764 d.addCallback(self.failUnlessROChildURIIs, u"unknownchild-imm", caps['unknown_immcap'])
1765 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
1766 d.addCallback(self.failUnlessROChildURIIs, u"dirchild-imm", caps['immdircap'])
1767 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
1768 d.addCallback(self.failUnlessROChildURIIs, u"dirchild-lit", caps['litdircap'])
1769 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
1770 d.addCallback(self.failUnlessROChildURIIs, u"dirchild-empty", caps['emptydircap'])
1771 d.addErrback(self.explain_web_error)
1774 def test_POST_NEWDIRURL_immutable_bad(self):
1775 (newkids, caps) = self._create_initial_children()
1776 d = self.shouldFail2(error.Error, "test_POST_NEWDIRURL_immutable_bad",
1778 "needed to be immutable but was not",
1780 self.public_url + "/foo/newdir?t=mkdir-immutable",
1781 simplejson.dumps(newkids))
1784 def test_PUT_NEWDIRURL_exists(self):
1785 d = self.PUT(self.public_url + "/foo/sub?t=mkdir", "")
1786 d.addCallback(lambda res:
1787 self.failUnlessNodeHasChild(self._foo_node, u"sub"))
1788 d.addCallback(lambda res: self._foo_node.get(u"sub"))
1789 d.addCallback(self.failUnlessNodeKeysAre, [u"baz.txt"])
1792 def test_PUT_NEWDIRURL_blocked(self):
1793 d = self.shouldFail2(error.Error, "PUT_NEWDIRURL_blocked",
1794 "409 Conflict", "Unable to create directory 'bar.txt': a file was in the way",
1796 self.public_url + "/foo/bar.txt/sub?t=mkdir", "")
1797 d.addCallback(lambda res:
1798 self.failUnlessNodeHasChild(self._foo_node, u"sub"))
1799 d.addCallback(lambda res: self._foo_node.get(u"sub"))
1800 d.addCallback(self.failUnlessNodeKeysAre, [u"baz.txt"])
1803 def test_PUT_NEWDIRURL_mkdir_p(self):
1804 d = defer.succeed(None)
1805 d.addCallback(lambda res: self.POST(self.public_url + "/foo", t='mkdir', name='mkp'))
1806 d.addCallback(lambda res: self.failUnlessNodeHasChild(self._foo_node, u"mkp"))
1807 d.addCallback(lambda res: self._foo_node.get(u"mkp"))
1808 def mkdir_p(mkpnode):
1809 url = '/uri/%s?t=mkdir-p&path=/sub1/sub2' % urllib.quote(mkpnode.get_uri())
1811 def made_subsub(ssuri):
1812 d = self._foo_node.get_child_at_path(u"mkp/sub1/sub2")
1813 d.addCallback(lambda ssnode: self.failUnlessReallyEqual(ssnode.get_uri(), ssuri))
1815 d.addCallback(lambda uri2: self.failUnlessReallyEqual(uri2, ssuri))
1817 d.addCallback(made_subsub)
1819 d.addCallback(mkdir_p)
1822 def test_PUT_NEWDIRURL_mkdirs(self):
1823 d = self.PUT(self.public_url + "/foo/subdir/newdir?t=mkdir", "")
1824 d.addCallback(lambda res:
1825 self.failIfNodeHasChild(self._foo_node, u"newdir"))
1826 d.addCallback(lambda res:
1827 self.failUnlessNodeHasChild(self._foo_node, u"subdir"))
1828 d.addCallback(lambda res:
1829 self._foo_node.get_child_at_path(u"subdir/newdir"))
1830 d.addCallback(self.failUnlessNodeKeysAre, [])
1833 def test_PUT_NEWDIRURL_mkdirs_mdmf(self):
1834 d = self.PUT(self.public_url + "/foo/subdir/newdir?t=mkdir&format=mdmf", "")
1835 d.addCallback(lambda ignored:
1836 self.failUnlessNodeHasChild(self._foo_node, u"subdir"))
1837 d.addCallback(lambda ignored:
1838 self.failIfNodeHasChild(self._foo_node, u"newdir"))
1839 d.addCallback(lambda ignored:
1840 self._foo_node.get_child_at_path(u"subdir"))
1841 def _got_subdir(subdir):
1842 # XXX: What we want?
1843 #self.failUnlessEqual(subdir._node.get_version(), MDMF_VERSION)
1844 self.failUnlessNodeHasChild(subdir, u"newdir")
1845 return subdir.get_child_at_path(u"newdir")
1846 d.addCallback(_got_subdir)
1847 d.addCallback(lambda newdir:
1848 self.failUnlessEqual(newdir._node.get_version(), MDMF_VERSION))
1851 def test_PUT_NEWDIRURL_mkdirs_sdmf(self):
1852 d = self.PUT(self.public_url + "/foo/subdir/newdir?t=mkdir&format=sdmf", "")
1853 d.addCallback(lambda ignored:
1854 self.failUnlessNodeHasChild(self._foo_node, u"subdir"))
1855 d.addCallback(lambda ignored:
1856 self.failIfNodeHasChild(self._foo_node, u"newdir"))
1857 d.addCallback(lambda ignored:
1858 self._foo_node.get_child_at_path(u"subdir"))
1859 def _got_subdir(subdir):
1860 # XXX: What we want?
1861 #self.failUnlessEqual(subdir._node.get_version(), MDMF_VERSION)
1862 self.failUnlessNodeHasChild(subdir, u"newdir")
1863 return subdir.get_child_at_path(u"newdir")
1864 d.addCallback(_got_subdir)
1865 d.addCallback(lambda newdir:
1866 self.failUnlessEqual(newdir._node.get_version(), SDMF_VERSION))
1869 def test_PUT_NEWDIRURL_mkdirs_bad_format(self):
1870 return self.shouldHTTPError("PUT_NEWDIRURL_mkdirs_bad_format",
1871 400, "Bad Request", "Unknown format: foo",
1872 self.PUT, self.public_url + \
1873 "/foo/subdir/newdir?t=mkdir&format=foo",
1876 def test_DELETE_DIRURL(self):
1877 d = self.DELETE(self.public_url + "/foo")
1878 d.addCallback(lambda res:
1879 self.failIfNodeHasChild(self.public_root, u"foo"))
1882 def test_DELETE_DIRURL_missing(self):
1883 d = self.DELETE(self.public_url + "/foo/missing")
1884 d.addBoth(self.should404, "test_DELETE_DIRURL_missing")
1885 d.addCallback(lambda res:
1886 self.failUnlessNodeHasChild(self.public_root, u"foo"))
1889 def test_DELETE_DIRURL_missing2(self):
1890 d = self.DELETE(self.public_url + "/missing")
1891 d.addBoth(self.should404, "test_DELETE_DIRURL_missing2")
1894 def dump_root(self):
1896 w = webish.DirnodeWalkerMixin()
1897 def visitor(childpath, childnode, metadata):
1899 d = w.walk(self.public_root, visitor)
1902 def failUnlessNodeKeysAre(self, node, expected_keys):
1903 for k in expected_keys:
1904 assert isinstance(k, unicode)
1906 def _check(children):
1907 self.failUnlessReallyEqual(sorted(children.keys()), sorted(expected_keys))
1908 d.addCallback(_check)
1910 def failUnlessNodeHasChild(self, node, name):
1911 assert isinstance(name, unicode)
1913 def _check(children):
1914 self.failUnless(name in children)
1915 d.addCallback(_check)
1917 def failIfNodeHasChild(self, node, name):
1918 assert isinstance(name, unicode)
1920 def _check(children):
1921 self.failIf(name in children)
1922 d.addCallback(_check)
1925 def failUnlessChildContentsAre(self, node, name, expected_contents):
1926 assert isinstance(name, unicode)
1927 d = node.get_child_at_path(name)
1928 d.addCallback(lambda node: download_to_data(node))
1929 def _check(contents):
1930 self.failUnlessReallyEqual(contents, expected_contents)
1931 d.addCallback(_check)
1934 def failUnlessMutableChildContentsAre(self, node, name, expected_contents):
1935 assert isinstance(name, unicode)
1936 d = node.get_child_at_path(name)
1937 d.addCallback(lambda node: node.download_best_version())
1938 def _check(contents):
1939 self.failUnlessReallyEqual(contents, expected_contents)
1940 d.addCallback(_check)
1943 def failUnlessRWChildURIIs(self, node, name, expected_uri):
1944 assert isinstance(name, unicode)
1945 d = node.get_child_at_path(name)
1947 self.failUnless(child.is_unknown() or not child.is_readonly())
1948 self.failUnlessReallyEqual(child.get_uri(), expected_uri.strip())
1949 self.failUnlessReallyEqual(child.get_write_uri(), expected_uri.strip())
1950 expected_ro_uri = self._make_readonly(expected_uri)
1952 self.failUnlessReallyEqual(child.get_readonly_uri(), expected_ro_uri.strip())
1953 d.addCallback(_check)
1956 def failUnlessROChildURIIs(self, node, name, expected_uri):
1957 assert isinstance(name, unicode)
1958 d = node.get_child_at_path(name)
1960 self.failUnless(child.is_unknown() or child.is_readonly())
1961 self.failUnlessReallyEqual(child.get_write_uri(), None)
1962 self.failUnlessReallyEqual(child.get_uri(), expected_uri.strip())
1963 self.failUnlessReallyEqual(child.get_readonly_uri(), expected_uri.strip())
1964 d.addCallback(_check)
1967 def failUnlessURIMatchesRWChild(self, got_uri, node, name):
1968 assert isinstance(name, unicode)
1969 d = node.get_child_at_path(name)
1971 self.failUnless(child.is_unknown() or not child.is_readonly())
1972 self.failUnlessReallyEqual(child.get_uri(), got_uri.strip())
1973 self.failUnlessReallyEqual(child.get_write_uri(), got_uri.strip())
1974 expected_ro_uri = self._make_readonly(got_uri)
1976 self.failUnlessReallyEqual(child.get_readonly_uri(), expected_ro_uri.strip())
1977 d.addCallback(_check)
1980 def failUnlessURIMatchesROChild(self, got_uri, node, name):
1981 assert isinstance(name, unicode)
1982 d = node.get_child_at_path(name)
1984 self.failUnless(child.is_unknown() or child.is_readonly())
1985 self.failUnlessReallyEqual(child.get_write_uri(), None)
1986 self.failUnlessReallyEqual(got_uri.strip(), child.get_uri())
1987 self.failUnlessReallyEqual(got_uri.strip(), child.get_readonly_uri())
1988 d.addCallback(_check)
1991 def failUnlessCHKURIHasContents(self, got_uri, contents):
1992 self.failUnless(FakeCHKFileNode.all_contents[got_uri] == contents)
1994 def test_POST_upload(self):
1995 d = self.POST(self.public_url + "/foo", t="upload",
1996 file=("new.txt", self.NEWFILE_CONTENTS))
1998 d.addCallback(self.failUnlessURIMatchesROChild, fn, u"new.txt")
1999 d.addCallback(lambda res:
2000 self.failUnlessChildContentsAre(fn, u"new.txt",
2001 self.NEWFILE_CONTENTS))
2004 def test_POST_upload_unicode(self):
2005 filename = u"n\u00e9wer.txt" # n e-acute w e r . t x t
2006 d = self.POST(self.public_url + "/foo", t="upload",
2007 file=(filename, self.NEWFILE_CONTENTS))
2009 d.addCallback(self.failUnlessURIMatchesROChild, fn, filename)
2010 d.addCallback(lambda res:
2011 self.failUnlessChildContentsAre(fn, filename,
2012 self.NEWFILE_CONTENTS))
2013 target_url = self.public_url + "/foo/" + filename.encode("utf-8")
2014 d.addCallback(lambda res: self.GET(target_url))
2015 d.addCallback(lambda contents: self.failUnlessReallyEqual(contents,
2016 self.NEWFILE_CONTENTS,
2020 def test_POST_upload_unicode_named(self):
2021 filename = u"n\u00e9wer.txt" # n e-acute w e r . t x t
2022 d = self.POST(self.public_url + "/foo", t="upload",
2024 file=("overridden", self.NEWFILE_CONTENTS))
2026 d.addCallback(self.failUnlessURIMatchesROChild, fn, filename)
2027 d.addCallback(lambda res:
2028 self.failUnlessChildContentsAre(fn, filename,
2029 self.NEWFILE_CONTENTS))
2030 target_url = self.public_url + "/foo/" + filename.encode("utf-8")
2031 d.addCallback(lambda res: self.GET(target_url))
2032 d.addCallback(lambda contents: self.failUnlessReallyEqual(contents,
2033 self.NEWFILE_CONTENTS,
2037 def test_POST_upload_no_link(self):
2038 d = self.POST("/uri", t="upload",
2039 file=("new.txt", self.NEWFILE_CONTENTS))
2040 def _check_upload_results(page):
2041 # this should be a page which describes the results of the upload
2042 # that just finished.
2043 self.failUnless("Upload Results:" in page)
2044 self.failUnless("URI:" in page)
2045 uri_re = re.compile("URI: <tt><span>(.*)</span>")
2046 mo = uri_re.search(page)
2047 self.failUnless(mo, page)
2048 new_uri = mo.group(1)
2050 d.addCallback(_check_upload_results)
2051 d.addCallback(self.failUnlessCHKURIHasContents, self.NEWFILE_CONTENTS)
2054 def test_POST_upload_no_link_whendone(self):
2055 d = self.POST("/uri", t="upload", when_done="/",
2056 file=("new.txt", self.NEWFILE_CONTENTS))
2057 d.addBoth(self.shouldRedirect, "/")
2060 def shouldRedirect2(self, which, checker, callable, *args, **kwargs):
2061 d = defer.maybeDeferred(callable, *args, **kwargs)
2063 if isinstance(res, failure.Failure):
2064 res.trap(error.PageRedirect)
2065 statuscode = res.value.status
2066 target = res.value.location
2067 return checker(statuscode, target)
2068 self.fail("%s: callable was supposed to redirect, not return '%s'"
2073 def test_POST_upload_no_link_whendone_results(self):
2074 def check(statuscode, target):
2075 self.failUnlessReallyEqual(statuscode, str(http.FOUND))
2076 self.failUnless(target.startswith(self.webish_url), target)
2077 return client.getPage(target, method="GET")
2078 d = self.shouldRedirect2("test_POST_upload_no_link_whendone_results",
2080 self.POST, "/uri", t="upload",
2081 when_done="/uri/%(uri)s",
2082 file=("new.txt", self.NEWFILE_CONTENTS))
2083 d.addCallback(lambda res:
2084 self.failUnlessReallyEqual(res, self.NEWFILE_CONTENTS))
2087 def test_POST_upload_no_link_mutable(self):
2088 d = self.POST("/uri", t="upload", mutable="true",
2089 file=("new.txt", self.NEWFILE_CONTENTS))
2090 def _check(filecap):
2091 filecap = filecap.strip()
2092 self.failUnless(filecap.startswith("URI:SSK:"), filecap)
2093 self.filecap = filecap
2094 u = uri.WriteableSSKFileURI.init_from_string(filecap)
2095 self.failUnless(u.get_storage_index() in FakeMutableFileNode.all_contents)
2096 n = self.s.create_node_from_uri(filecap)
2097 return n.download_best_version()
2098 d.addCallback(_check)
2100 self.failUnlessReallyEqual(data, self.NEWFILE_CONTENTS)
2101 return self.GET("/uri/%s" % urllib.quote(self.filecap))
2102 d.addCallback(_check2)
2104 self.failUnlessReallyEqual(data, self.NEWFILE_CONTENTS)
2105 return self.GET("/file/%s" % urllib.quote(self.filecap))
2106 d.addCallback(_check3)
2108 self.failUnlessReallyEqual(data, self.NEWFILE_CONTENTS)
2109 d.addCallback(_check4)
2112 def test_POST_upload_no_link_mutable_toobig(self):
2113 # The SDMF size limit is no longer in place, so we should be
2114 # able to upload mutable files that are as large as we want them
2116 d = self.POST("/uri", t="upload", mutable="true",
2117 file=("new.txt", "b" * (self.s.MUTABLE_SIZELIMIT + 1)))
2121 def test_POST_upload_format_unlinked(self):
2122 def _check_upload_unlinked(ign, format, uri_prefix):
2123 filename = format + ".txt"
2124 d = self.POST("/uri?t=upload&format=" + format,
2125 file=(filename, self.NEWFILE_CONTENTS * 300000))
2126 def _got_filecap(filecap):
2127 self.failUnless(filecap.startswith(uri_prefix))
2128 return self.GET("/uri/%s?t=json" % filecap)
2129 d.addCallback(_got_filecap)
2130 def _got_json(json):
2131 data = simplejson.loads(json)
2133 self.failUnlessIn("format", data)
2134 self.failUnlessEqual(data["format"], format)
2135 d.addCallback(_got_json)
2137 d = defer.succeed(None)
2138 d.addCallback(_check_upload_unlinked, "chk", "URI:CHK")
2139 d.addCallback(_check_upload_unlinked, "CHK", "URI:CHK")
2140 d.addCallback(_check_upload_unlinked, "sdmf", "URI:SSK")
2141 d.addCallback(_check_upload_unlinked, "mdmf", "URI:MDMF")
2144 def test_POST_upload_bad_format_unlinked(self):
2145 return self.shouldHTTPError("POST_upload_bad_format_unlinked",
2146 400, "Bad Request", "Unknown format: foo",
2148 "/uri?t=upload&format=foo",
2149 file=("foo.txt", self.NEWFILE_CONTENTS * 300000))
2151 def test_POST_upload_format(self):
2152 def _check_upload(ign, format, uri_prefix, fn=None):
2153 filename = format + ".txt"
2154 d = self.POST(self.public_url +
2155 "/foo?t=upload&format=" + format,
2156 file=(filename, self.NEWFILE_CONTENTS * 300000))
2157 def _got_filecap(filecap):
2159 filenameu = unicode(filename)
2160 self.failUnlessURIMatchesRWChild(filecap, fn, filenameu)
2161 self.failUnless(filecap.startswith(uri_prefix))
2162 return self.GET(self.public_url + "/foo/%s?t=json" % filename)
2163 d.addCallback(_got_filecap)
2164 def _got_json(json):
2165 data = simplejson.loads(json)
2167 self.failUnlessIn("format", data)
2168 self.failUnlessEqual(data["format"], format)
2169 d.addCallback(_got_json)
2171 d = defer.succeed(None)
2172 d.addCallback(_check_upload, "chk", "URI:CHK")
2173 d.addCallback(_check_upload, "sdmf", "URI:SSK", self._foo_node)
2174 d.addCallback(_check_upload, "mdmf", "URI:MDMF")
2175 d.addCallback(_check_upload, "MDMF", "URI:MDMF")
2178 def test_POST_upload_bad_format(self):
2179 return self.shouldHTTPError("POST_upload_bad_format",
2180 400, "Bad Request", "Unknown format: foo",
2181 self.POST, self.public_url + \
2182 "/foo?t=upload&format=foo",
2183 file=("foo.txt", self.NEWFILE_CONTENTS * 300000))
2185 def test_POST_upload_mutable(self):
2186 # this creates a mutable file
2187 d = self.POST(self.public_url + "/foo", t="upload", mutable="true",
2188 file=("new.txt", self.NEWFILE_CONTENTS))
2190 d.addCallback(self.failUnlessURIMatchesRWChild, fn, u"new.txt")
2191 d.addCallback(lambda res:
2192 self.failUnlessMutableChildContentsAre(fn, u"new.txt",
2193 self.NEWFILE_CONTENTS))
2194 d.addCallback(lambda res: self._foo_node.get(u"new.txt"))
2196 self.failUnless(IMutableFileNode.providedBy(newnode))
2197 self.failUnless(newnode.is_mutable())
2198 self.failIf(newnode.is_readonly())
2199 self._mutable_node = newnode
2200 self._mutable_uri = newnode.get_uri()
2203 # now upload it again and make sure that the URI doesn't change
2204 NEWER_CONTENTS = self.NEWFILE_CONTENTS + "newer\n"
2205 d.addCallback(lambda res:
2206 self.POST(self.public_url + "/foo", t="upload",
2208 file=("new.txt", NEWER_CONTENTS)))
2209 d.addCallback(self.failUnlessURIMatchesRWChild, fn, u"new.txt")
2210 d.addCallback(lambda res:
2211 self.failUnlessMutableChildContentsAre(fn, u"new.txt",
2213 d.addCallback(lambda res: self._foo_node.get(u"new.txt"))
2215 self.failUnless(IMutableFileNode.providedBy(newnode))
2216 self.failUnless(newnode.is_mutable())
2217 self.failIf(newnode.is_readonly())
2218 self.failUnlessReallyEqual(self._mutable_uri, newnode.get_uri())
2219 d.addCallback(_got2)
2221 # upload a second time, using PUT instead of POST
2222 NEW2_CONTENTS = NEWER_CONTENTS + "overwrite with PUT\n"
2223 d.addCallback(lambda res:
2224 self.PUT(self.public_url + "/foo/new.txt", NEW2_CONTENTS))
2225 d.addCallback(self.failUnlessURIMatchesRWChild, fn, u"new.txt")
2226 d.addCallback(lambda res:
2227 self.failUnlessMutableChildContentsAre(fn, u"new.txt",
2230 # finally list the directory, since mutable files are displayed
2231 # slightly differently
2233 d.addCallback(lambda res:
2234 self.GET(self.public_url + "/foo/",
2235 followRedirect=True))
2236 def _check_page(res):
2237 # TODO: assert more about the contents
2238 self.failUnless("SSK" in res)
2240 d.addCallback(_check_page)
2242 d.addCallback(lambda res: self._foo_node.get(u"new.txt"))
2244 self.failUnless(IMutableFileNode.providedBy(newnode))
2245 self.failUnless(newnode.is_mutable())
2246 self.failIf(newnode.is_readonly())
2247 self.failUnlessReallyEqual(self._mutable_uri, newnode.get_uri())
2248 d.addCallback(_got3)
2250 # look at the JSON form of the enclosing directory
2251 d.addCallback(lambda res:
2252 self.GET(self.public_url + "/foo/?t=json",
2253 followRedirect=True))
2254 def _check_page_json(res):
2255 parsed = simplejson.loads(res)
2256 self.failUnlessEqual(parsed[0], "dirnode")
2257 children = dict( [(unicode(name),value)
2259 in parsed[1]["children"].iteritems()] )
2260 self.failUnless(u"new.txt" in children)
2261 new_json = children[u"new.txt"]
2262 self.failUnlessEqual(new_json[0], "filenode")
2263 self.failUnless(new_json[1]["mutable"])
2264 self.failUnlessReallyEqual(to_str(new_json[1]["rw_uri"]), self._mutable_uri)
2265 ro_uri = self._mutable_node.get_readonly().to_string()
2266 self.failUnlessReallyEqual(to_str(new_json[1]["ro_uri"]), ro_uri)
2267 d.addCallback(_check_page_json)
2269 # and the JSON form of the file
2270 d.addCallback(lambda res:
2271 self.GET(self.public_url + "/foo/new.txt?t=json"))
2272 def _check_file_json(res):
2273 parsed = simplejson.loads(res)
2274 self.failUnlessEqual(parsed[0], "filenode")
2275 self.failUnless(parsed[1]["mutable"])
2276 self.failUnlessReallyEqual(to_str(parsed[1]["rw_uri"]), self._mutable_uri)
2277 ro_uri = self._mutable_node.get_readonly().to_string()
2278 self.failUnlessReallyEqual(to_str(parsed[1]["ro_uri"]), ro_uri)
2279 d.addCallback(_check_file_json)
2281 # and look at t=uri and t=readonly-uri
2282 d.addCallback(lambda res:
2283 self.GET(self.public_url + "/foo/new.txt?t=uri"))
2284 d.addCallback(lambda res: self.failUnlessReallyEqual(res, self._mutable_uri))
2285 d.addCallback(lambda res:
2286 self.GET(self.public_url + "/foo/new.txt?t=readonly-uri"))
2287 def _check_ro_uri(res):
2288 ro_uri = self._mutable_node.get_readonly().to_string()
2289 self.failUnlessReallyEqual(res, ro_uri)
2290 d.addCallback(_check_ro_uri)
2292 # make sure we can get to it from /uri/URI
2293 d.addCallback(lambda res:
2294 self.GET("/uri/%s" % urllib.quote(self._mutable_uri)))
2295 d.addCallback(lambda res:
2296 self.failUnlessReallyEqual(res, NEW2_CONTENTS))
2298 # and that HEAD computes the size correctly
2299 d.addCallback(lambda res:
2300 self.HEAD(self.public_url + "/foo/new.txt",
2301 return_response=True))
2302 def _got_headers((res, status, headers)):
2303 self.failUnlessReallyEqual(res, "")
2304 self.failUnlessReallyEqual(headers["content-length"][0],
2305 str(len(NEW2_CONTENTS)))
2306 self.failUnlessReallyEqual(headers["content-type"], ["text/plain"])
2307 d.addCallback(_got_headers)
2309 # make sure that outdated size limits aren't enforced anymore.
2310 d.addCallback(lambda ignored:
2311 self.POST(self.public_url + "/foo", t="upload",
2314 "b" * (self.s.MUTABLE_SIZELIMIT+1))))
2315 d.addErrback(self.dump_error)
2318 def test_POST_upload_mutable_toobig(self):
2319 # SDMF had a size limti that was removed a while ago. MDMF has
2320 # never had a size limit. Test to make sure that we do not
2321 # encounter errors when trying to upload large mutable files,
2322 # since there should be no coded prohibitions regarding large
2324 d = self.POST(self.public_url + "/foo",
2325 t="upload", mutable="true",
2326 file=("new.txt", "b" * (self.s.MUTABLE_SIZELIMIT + 1)))
2329 def dump_error(self, f):
2330 # if the web server returns an error code (like 400 Bad Request),
2331 # web.client.getPage puts the HTTP response body into the .response
2332 # attribute of the exception object that it gives back. It does not
2333 # appear in the Failure's repr(), so the ERROR that trial displays
2334 # will be rather terse and unhelpful. addErrback this method to the
2335 # end of your chain to get more information out of these errors.
2336 if f.check(error.Error):
2337 print "web.error.Error:"
2339 print f.value.response
2342 def test_POST_upload_replace(self):
2343 d = self.POST(self.public_url + "/foo", t="upload",
2344 file=("bar.txt", self.NEWFILE_CONTENTS))
2346 d.addCallback(self.failUnlessURIMatchesROChild, fn, u"bar.txt")
2347 d.addCallback(lambda res:
2348 self.failUnlessChildContentsAre(fn, u"bar.txt",
2349 self.NEWFILE_CONTENTS))
2352 def test_POST_upload_no_replace_ok(self):
2353 d = self.POST(self.public_url + "/foo?replace=false", t="upload",
2354 file=("new.txt", self.NEWFILE_CONTENTS))
2355 d.addCallback(lambda res: self.GET(self.public_url + "/foo/new.txt"))
2356 d.addCallback(lambda res: self.failUnlessReallyEqual(res,
2357 self.NEWFILE_CONTENTS))
2360 def test_POST_upload_no_replace_queryarg(self):
2361 d = self.POST(self.public_url + "/foo?replace=false", t="upload",
2362 file=("bar.txt", self.NEWFILE_CONTENTS))
2363 d.addBoth(self.shouldFail, error.Error,
2364 "POST_upload_no_replace_queryarg",
2366 "There was already a child by that name, and you asked me "
2367 "to not replace it")
2368 d.addCallback(lambda res: self.GET(self.public_url + "/foo/bar.txt"))
2369 d.addCallback(self.failUnlessIsBarDotTxt)
2372 def test_POST_upload_no_replace_field(self):
2373 d = self.POST(self.public_url + "/foo", t="upload", replace="false",
2374 file=("bar.txt", self.NEWFILE_CONTENTS))
2375 d.addBoth(self.shouldFail, error.Error, "POST_upload_no_replace_field",
2377 "There was already a child by that name, and you asked me "
2378 "to not replace it")
2379 d.addCallback(lambda res: self.GET(self.public_url + "/foo/bar.txt"))
2380 d.addCallback(self.failUnlessIsBarDotTxt)
2383 def test_POST_upload_whendone(self):
2384 d = self.POST(self.public_url + "/foo", t="upload", when_done="/THERE",
2385 file=("new.txt", self.NEWFILE_CONTENTS))
2386 d.addBoth(self.shouldRedirect, "/THERE")
2388 d.addCallback(lambda res:
2389 self.failUnlessChildContentsAre(fn, u"new.txt",
2390 self.NEWFILE_CONTENTS))
2393 def test_POST_upload_named(self):
2395 d = self.POST(self.public_url + "/foo", t="upload",
2396 name="new.txt", file=self.NEWFILE_CONTENTS)
2397 d.addCallback(self.failUnlessURIMatchesROChild, fn, u"new.txt")
2398 d.addCallback(lambda res:
2399 self.failUnlessChildContentsAre(fn, u"new.txt",
2400 self.NEWFILE_CONTENTS))
2403 def test_POST_upload_named_badfilename(self):
2404 d = self.POST(self.public_url + "/foo", t="upload",
2405 name="slashes/are/bad.txt", file=self.NEWFILE_CONTENTS)
2406 d.addBoth(self.shouldFail, error.Error,
2407 "test_POST_upload_named_badfilename",
2409 "name= may not contain a slash",
2411 # make sure that nothing was added
2412 d.addCallback(lambda res:
2413 self.failUnlessNodeKeysAre(self._foo_node,
2414 [u"bar.txt", u"baz.txt", u"blockingfile",
2415 u"empty", u"n\u00fc.txt", u"quux.txt",
2419 def test_POST_FILEURL_check(self):
2420 bar_url = self.public_url + "/foo/bar.txt"
2421 d = self.POST(bar_url, t="check")
2423 self.failUnless("Healthy :" in res)
2424 d.addCallback(_check)
2425 redir_url = "http://allmydata.org/TARGET"
2426 def _check2(statuscode, target):
2427 self.failUnlessReallyEqual(statuscode, str(http.FOUND))
2428 self.failUnlessReallyEqual(target, redir_url)
2429 d.addCallback(lambda res:
2430 self.shouldRedirect2("test_POST_FILEURL_check",
2434 when_done=redir_url))
2435 d.addCallback(lambda res:
2436 self.POST(bar_url, t="check", return_to=redir_url))
2438 self.failUnless("Healthy :" in res)
2439 self.failUnless("Return to file" in res)
2440 self.failUnless(redir_url in res)
2441 d.addCallback(_check3)
2443 d.addCallback(lambda res:
2444 self.POST(bar_url, t="check", output="JSON"))
2445 def _check_json(res):
2446 data = simplejson.loads(res)
2447 self.failUnless("storage-index" in data)
2448 self.failUnless(data["results"]["healthy"])
2449 d.addCallback(_check_json)
2453 def test_POST_FILEURL_check_and_repair(self):
2454 bar_url = self.public_url + "/foo/bar.txt"
2455 d = self.POST(bar_url, t="check", repair="true")
2457 self.failUnless("Healthy :" in res)
2458 d.addCallback(_check)
2459 redir_url = "http://allmydata.org/TARGET"
2460 def _check2(statuscode, target):
2461 self.failUnlessReallyEqual(statuscode, str(http.FOUND))
2462 self.failUnlessReallyEqual(target, redir_url)
2463 d.addCallback(lambda res:
2464 self.shouldRedirect2("test_POST_FILEURL_check_and_repair",
2467 t="check", repair="true",
2468 when_done=redir_url))
2469 d.addCallback(lambda res:
2470 self.POST(bar_url, t="check", return_to=redir_url))
2472 self.failUnless("Healthy :" in res)
2473 self.failUnless("Return to file" in res)
2474 self.failUnless(redir_url in res)
2475 d.addCallback(_check3)
2478 def test_POST_DIRURL_check(self):
2479 foo_url = self.public_url + "/foo/"
2480 d = self.POST(foo_url, t="check")
2482 self.failUnless("Healthy :" in res, res)
2483 d.addCallback(_check)
2484 redir_url = "http://allmydata.org/TARGET"
2485 def _check2(statuscode, target):
2486 self.failUnlessReallyEqual(statuscode, str(http.FOUND))
2487 self.failUnlessReallyEqual(target, redir_url)
2488 d.addCallback(lambda res:
2489 self.shouldRedirect2("test_POST_DIRURL_check",
2493 when_done=redir_url))
2494 d.addCallback(lambda res:
2495 self.POST(foo_url, t="check", return_to=redir_url))
2497 self.failUnless("Healthy :" in res, res)
2498 self.failUnless("Return to file/directory" in res)
2499 self.failUnless(redir_url in res)
2500 d.addCallback(_check3)
2502 d.addCallback(lambda res:
2503 self.POST(foo_url, t="check", output="JSON"))
2504 def _check_json(res):
2505 data = simplejson.loads(res)
2506 self.failUnless("storage-index" in data)
2507 self.failUnless(data["results"]["healthy"])
2508 d.addCallback(_check_json)
2512 def test_POST_DIRURL_check_and_repair(self):
2513 foo_url = self.public_url + "/foo/"
2514 d = self.POST(foo_url, t="check", repair="true")
2516 self.failUnless("Healthy :" in res, res)
2517 d.addCallback(_check)
2518 redir_url = "http://allmydata.org/TARGET"
2519 def _check2(statuscode, target):
2520 self.failUnlessReallyEqual(statuscode, str(http.FOUND))
2521 self.failUnlessReallyEqual(target, redir_url)
2522 d.addCallback(lambda res:
2523 self.shouldRedirect2("test_POST_DIRURL_check_and_repair",
2526 t="check", repair="true",
2527 when_done=redir_url))
2528 d.addCallback(lambda res:
2529 self.POST(foo_url, t="check", return_to=redir_url))
2531 self.failUnless("Healthy :" in res)
2532 self.failUnless("Return to file/directory" in res)
2533 self.failUnless(redir_url in res)
2534 d.addCallback(_check3)
2537 def test_POST_FILEURL_mdmf_check(self):
2538 quux_url = "/uri/%s" % urllib.quote(self._quux_txt_uri)
2539 d = self.POST(quux_url, t="check")
2541 self.failUnlessIn("Healthy", res)
2542 d.addCallback(_check)
2543 quux_extension_url = "/uri/%s" % urllib.quote("%s:3:131073" % self._quux_txt_uri)
2544 d.addCallback(lambda ignored:
2545 self.POST(quux_extension_url, t="check"))
2546 d.addCallback(_check)
2549 def test_POST_FILEURL_mdmf_check_and_repair(self):
2550 quux_url = "/uri/%s" % urllib.quote(self._quux_txt_uri)
2551 d = self.POST(quux_url, t="check", repair="true")
2553 self.failUnlessIn("Healthy", res)
2554 d.addCallback(_check)
2555 quux_extension_url = "/uri/%s" %\
2556 urllib.quote("%s:3:131073" % self._quux_txt_uri)
2557 d.addCallback(lambda ignored:
2558 self.POST(quux_extension_url, t="check", repair="true"))
2559 d.addCallback(_check)
2562 def wait_for_operation(self, ignored, ophandle):
2563 url = "/operations/" + ophandle
2564 url += "?t=status&output=JSON"
2567 data = simplejson.loads(res)
2568 if not data["finished"]:
2569 d = self.stall(delay=1.0)
2570 d.addCallback(self.wait_for_operation, ophandle)
2576 def get_operation_results(self, ignored, ophandle, output=None):
2577 url = "/operations/" + ophandle
2580 url += "&output=" + output
2583 if output and output.lower() == "json":
2584 return simplejson.loads(res)
2589 def test_POST_DIRURL_deepcheck_no_ophandle(self):
2590 d = self.shouldFail2(error.Error,
2591 "test_POST_DIRURL_deepcheck_no_ophandle",
2593 "slow operation requires ophandle=",
2594 self.POST, self.public_url, t="start-deep-check")
2597 def test_POST_DIRURL_deepcheck(self):
2598 def _check_redirect(statuscode, target):
2599 self.failUnlessReallyEqual(statuscode, str(http.FOUND))
2600 self.failUnless(target.endswith("/operations/123"))
2601 d = self.shouldRedirect2("test_POST_DIRURL_deepcheck", _check_redirect,
2602 self.POST, self.public_url,
2603 t="start-deep-check", ophandle="123")
2604 d.addCallback(self.wait_for_operation, "123")
2605 def _check_json(data):
2606 self.failUnlessReallyEqual(data["finished"], True)
2607 self.failUnlessReallyEqual(data["count-objects-checked"], 10)
2608 self.failUnlessReallyEqual(data["count-objects-healthy"], 10)
2609 d.addCallback(_check_json)
2610 d.addCallback(self.get_operation_results, "123", "html")
2611 def _check_html(res):
2612 self.failUnless("Objects Checked: <span>10</span>" in res)
2613 self.failUnless("Objects Healthy: <span>10</span>" in res)
2614 d.addCallback(_check_html)
2616 d.addCallback(lambda res:
2617 self.GET("/operations/123/"))
2618 d.addCallback(_check_html) # should be the same as without the slash
2620 d.addCallback(lambda res:
2621 self.shouldFail2(error.Error, "one", "404 Not Found",
2622 "No detailed results for SI bogus",
2623 self.GET, "/operations/123/bogus"))
2625 foo_si = self._foo_node.get_storage_index()
2626 foo_si_s = base32.b2a(foo_si)
2627 d.addCallback(lambda res:
2628 self.GET("/operations/123/%s?output=JSON" % foo_si_s))
2629 def _check_foo_json(res):
2630 data = simplejson.loads(res)
2631 self.failUnlessEqual(data["storage-index"], foo_si_s)
2632 self.failUnless(data["results"]["healthy"])
2633 d.addCallback(_check_foo_json)
2636 def test_POST_DIRURL_deepcheck_and_repair(self):
2637 d = self.POST(self.public_url, t="start-deep-check", repair="true",
2638 ophandle="124", output="json", followRedirect=True)
2639 d.addCallback(self.wait_for_operation, "124")
2640 def _check_json(data):
2641 self.failUnlessReallyEqual(data["finished"], True)
2642 self.failUnlessReallyEqual(data["count-objects-checked"], 10)
2643 self.failUnlessReallyEqual(data["count-objects-healthy-pre-repair"], 10)
2644 self.failUnlessReallyEqual(data["count-objects-unhealthy-pre-repair"], 0)
2645 self.failUnlessReallyEqual(data["count-corrupt-shares-pre-repair"], 0)
2646 self.failUnlessReallyEqual(data["count-repairs-attempted"], 0)
2647 self.failUnlessReallyEqual(data["count-repairs-successful"], 0)
2648 self.failUnlessReallyEqual(data["count-repairs-unsuccessful"], 0)
2649 self.failUnlessReallyEqual(data["count-objects-healthy-post-repair"], 10)
2650 self.failUnlessReallyEqual(data["count-objects-unhealthy-post-repair"], 0)
2651 self.failUnlessReallyEqual(data["count-corrupt-shares-post-repair"], 0)
2652 d.addCallback(_check_json)
2653 d.addCallback(self.get_operation_results, "124", "html")
2654 def _check_html(res):
2655 self.failUnless("Objects Checked: <span>10</span>" in res)
2657 self.failUnless("Objects Healthy (before repair): <span>10</span>" in res)
2658 self.failUnless("Objects Unhealthy (before repair): <span>0</span>" in res)
2659 self.failUnless("Corrupt Shares (before repair): <span>0</span>" in res)
2661 self.failUnless("Repairs Attempted: <span>0</span>" in res)
2662 self.failUnless("Repairs Successful: <span>0</span>" in res)
2663 self.failUnless("Repairs Unsuccessful: <span>0</span>" in res)
2665 self.failUnless("Objects Healthy (after repair): <span>10</span>" in res)
2666 self.failUnless("Objects Unhealthy (after repair): <span>0</span>" in res)
2667 self.failUnless("Corrupt Shares (after repair): <span>0</span>" in res)
2668 d.addCallback(_check_html)
2671 def test_POST_FILEURL_bad_t(self):
2672 d = self.shouldFail2(error.Error, "POST_bad_t", "400 Bad Request",
2673 "POST to file: bad t=bogus",
2674 self.POST, self.public_url + "/foo/bar.txt",
2678 def test_POST_mkdir(self): # return value?
2679 d = self.POST(self.public_url + "/foo", t="mkdir", name="newdir")
2680 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
2681 d.addCallback(self.failUnlessNodeKeysAre, [])
2684 def test_POST_mkdir_mdmf(self):
2685 d = self.POST(self.public_url + "/foo?t=mkdir&name=newdir&format=mdmf")
2686 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
2687 d.addCallback(lambda node:
2688 self.failUnlessEqual(node._node.get_version(), MDMF_VERSION))
2691 def test_POST_mkdir_sdmf(self):
2692 d = self.POST(self.public_url + "/foo?t=mkdir&name=newdir&format=sdmf")
2693 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
2694 d.addCallback(lambda node:
2695 self.failUnlessEqual(node._node.get_version(), SDMF_VERSION))
2698 def test_POST_mkdir_bad_format(self):
2699 return self.shouldHTTPError("POST_mkdir_bad_format",
2700 400, "Bad Request", "Unknown format: foo",
2701 self.POST, self.public_url +
2702 "/foo?t=mkdir&name=newdir&format=foo")
2704 def test_POST_mkdir_initial_children(self):
2705 (newkids, caps) = self._create_initial_children()
2706 d = self.POST2(self.public_url +
2707 "/foo?t=mkdir-with-children&name=newdir",
2708 simplejson.dumps(newkids))
2709 d.addCallback(lambda res:
2710 self.failUnlessNodeHasChild(self._foo_node, u"newdir"))
2711 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
2712 d.addCallback(self.failUnlessNodeKeysAre, newkids.keys())
2713 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
2714 d.addCallback(self.failUnlessROChildURIIs, u"child-imm", caps['filecap1'])
2717 def test_POST_mkdir_initial_children_mdmf(self):
2718 (newkids, caps) = self._create_initial_children()
2719 d = self.POST2(self.public_url +
2720 "/foo?t=mkdir-with-children&name=newdir&format=mdmf",
2721 simplejson.dumps(newkids))
2722 d.addCallback(lambda res:
2723 self.failUnlessNodeHasChild(self._foo_node, u"newdir"))
2724 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
2725 d.addCallback(lambda node:
2726 self.failUnlessEqual(node._node.get_version(), MDMF_VERSION))
2727 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
2728 d.addCallback(self.failUnlessROChildURIIs, u"child-imm",
2733 def test_POST_mkdir_initial_children_sdmf(self):
2734 (newkids, caps) = self._create_initial_children()
2735 d = self.POST2(self.public_url +
2736 "/foo?t=mkdir-with-children&name=newdir&format=sdmf",
2737 simplejson.dumps(newkids))
2738 d.addCallback(lambda res:
2739 self.failUnlessNodeHasChild(self._foo_node, u"newdir"))
2740 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
2741 d.addCallback(lambda node:
2742 self.failUnlessEqual(node._node.get_version(), SDMF_VERSION))
2743 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
2744 d.addCallback(self.failUnlessROChildURIIs, u"child-imm",
2748 def test_POST_mkdir_initial_children_bad_format(self):
2749 (newkids, caps) = self._create_initial_children()
2750 return self.shouldHTTPError("POST_mkdir_initial_children_bad_format",
2751 400, "Bad Request", "Unknown format: foo",
2752 self.POST, self.public_url + \
2753 "/foo?t=mkdir-with-children&name=newdir&format=foo",
2754 simplejson.dumps(newkids))
2756 def test_POST_mkdir_immutable(self):
2757 (newkids, caps) = self._create_immutable_children()
2758 d = self.POST2(self.public_url +
2759 "/foo?t=mkdir-immutable&name=newdir",
2760 simplejson.dumps(newkids))
2761 d.addCallback(lambda res:
2762 self.failUnlessNodeHasChild(self._foo_node, u"newdir"))
2763 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
2764 d.addCallback(self.failUnlessNodeKeysAre, newkids.keys())
2765 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
2766 d.addCallback(self.failUnlessROChildURIIs, u"child-imm", caps['filecap1'])
2767 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
2768 d.addCallback(self.failUnlessROChildURIIs, u"unknownchild-imm", caps['unknown_immcap'])
2769 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
2770 d.addCallback(self.failUnlessROChildURIIs, u"dirchild-imm", caps['immdircap'])
2771 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
2772 d.addCallback(self.failUnlessROChildURIIs, u"dirchild-lit", caps['litdircap'])
2773 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
2774 d.addCallback(self.failUnlessROChildURIIs, u"dirchild-empty", caps['emptydircap'])
2777 def test_POST_mkdir_immutable_bad(self):
2778 (newkids, caps) = self._create_initial_children()
2779 d = self.shouldFail2(error.Error, "POST_mkdir_immutable_bad",
2781 "needed to be immutable but was not",
2784 "/foo?t=mkdir-immutable&name=newdir",
2785 simplejson.dumps(newkids))
2788 def test_POST_mkdir_2(self):
2789 d = self.POST(self.public_url + "/foo/newdir?t=mkdir", "")
2790 d.addCallback(lambda res:
2791 self.failUnlessNodeHasChild(self._foo_node, u"newdir"))
2792 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
2793 d.addCallback(self.failUnlessNodeKeysAre, [])
2796 def test_POST_mkdirs_2(self):
2797 d = self.POST(self.public_url + "/foo/bardir/newdir?t=mkdir", "")
2798 d.addCallback(lambda res:
2799 self.failUnlessNodeHasChild(self._foo_node, u"bardir"))
2800 d.addCallback(lambda res: self._foo_node.get(u"bardir"))
2801 d.addCallback(lambda bardirnode: bardirnode.get(u"newdir"))
2802 d.addCallback(self.failUnlessNodeKeysAre, [])
2805 def test_POST_mkdir_no_parentdir_noredirect(self):
2806 d = self.POST("/uri?t=mkdir")
2807 def _after_mkdir(res):
2808 uri.DirectoryURI.init_from_string(res)
2809 d.addCallback(_after_mkdir)
2812 def test_POST_mkdir_no_parentdir_noredirect_mdmf(self):
2813 d = self.POST("/uri?t=mkdir&format=mdmf")
2814 def _after_mkdir(res):
2815 u = uri.from_string(res)
2816 # Check that this is an MDMF writecap
2817 self.failUnlessIsInstance(u, uri.MDMFDirectoryURI)
2818 d.addCallback(_after_mkdir)
2821 def test_POST_mkdir_no_parentdir_noredirect_sdmf(self):
2822 d = self.POST("/uri?t=mkdir&format=sdmf")
2823 def _after_mkdir(res):
2824 u = uri.from_string(res)
2825 self.failUnlessIsInstance(u, uri.DirectoryURI)
2826 d.addCallback(_after_mkdir)
2829 def test_POST_mkdir_no_parentdir_noredirect_bad_format(self):
2830 return self.shouldHTTPError("POST_mkdir_no_parentdir_noredirect_bad_format",
2831 400, "Bad Request", "Unknown format: foo",
2832 self.POST, self.public_url +
2833 "/uri?t=mkdir&format=foo")
2835 def test_POST_mkdir_no_parentdir_noredirect2(self):
2836 # make sure form-based arguments (as on the welcome page) still work
2837 d = self.POST("/uri", t="mkdir")
2838 def _after_mkdir(res):
2839 uri.DirectoryURI.init_from_string(res)
2840 d.addCallback(_after_mkdir)
2841 d.addErrback(self.explain_web_error)
2844 def test_POST_mkdir_no_parentdir_redirect(self):
2845 d = self.POST("/uri?t=mkdir&redirect_to_result=true")
2846 d.addBoth(self.shouldRedirect, None, statuscode='303')
2847 def _check_target(target):
2848 target = urllib.unquote(target)
2849 self.failUnless(target.startswith("uri/URI:DIR2:"), target)
2850 d.addCallback(_check_target)
2853 def test_POST_mkdir_no_parentdir_redirect2(self):
2854 d = self.POST("/uri", t="mkdir", redirect_to_result="true")
2855 d.addBoth(self.shouldRedirect, None, statuscode='303')
2856 def _check_target(target):
2857 target = urllib.unquote(target)
2858 self.failUnless(target.startswith("uri/URI:DIR2:"), target)
2859 d.addCallback(_check_target)
2860 d.addErrback(self.explain_web_error)
2863 def _make_readonly(self, u):
2864 ro_uri = uri.from_string(u).get_readonly()
2867 return ro_uri.to_string()
2869 def _create_initial_children(self):
2870 contents, n, filecap1 = self.makefile(12)
2871 md1 = {"metakey1": "metavalue1"}
2872 filecap2 = make_mutable_file_uri()
2873 node3 = self.s.create_node_from_uri(make_mutable_file_uri())
2874 filecap3 = node3.get_readonly_uri()
2875 node4 = self.s.create_node_from_uri(make_mutable_file_uri())
2876 dircap = DirectoryNode(node4, None, None).get_uri()
2877 mdmfcap = make_mutable_file_uri(mdmf=True)
2878 litdircap = "URI:DIR2-LIT:ge3dumj2mewdcotyfqydulbshj5x2lbm"
2879 emptydircap = "URI:DIR2-LIT:"
2880 newkids = {u"child-imm": ["filenode", {"rw_uri": filecap1,
2881 "ro_uri": self._make_readonly(filecap1),
2882 "metadata": md1, }],
2883 u"child-mutable": ["filenode", {"rw_uri": filecap2,
2884 "ro_uri": self._make_readonly(filecap2)}],
2885 u"child-mutable-ro": ["filenode", {"ro_uri": filecap3}],
2886 u"unknownchild-rw": ["unknown", {"rw_uri": unknown_rwcap,
2887 "ro_uri": unknown_rocap}],
2888 u"unknownchild-ro": ["unknown", {"ro_uri": unknown_rocap}],
2889 u"unknownchild-imm": ["unknown", {"ro_uri": unknown_immcap}],
2890 u"dirchild": ["dirnode", {"rw_uri": dircap,
2891 "ro_uri": self._make_readonly(dircap)}],
2892 u"dirchild-lit": ["dirnode", {"ro_uri": litdircap}],
2893 u"dirchild-empty": ["dirnode", {"ro_uri": emptydircap}],
2894 u"child-mutable-mdmf": ["filenode", {"rw_uri": mdmfcap,
2895 "ro_uri": self._make_readonly(mdmfcap)}],
2897 return newkids, {'filecap1': filecap1,
2898 'filecap2': filecap2,
2899 'filecap3': filecap3,
2900 'unknown_rwcap': unknown_rwcap,
2901 'unknown_rocap': unknown_rocap,
2902 'unknown_immcap': unknown_immcap,
2904 'litdircap': litdircap,
2905 'emptydircap': emptydircap,
2908 def _create_immutable_children(self):
2909 contents, n, filecap1 = self.makefile(12)
2910 md1 = {"metakey1": "metavalue1"}
2911 tnode = create_chk_filenode("immutable directory contents\n"*10)
2912 dnode = DirectoryNode(tnode, None, None)
2913 assert not dnode.is_mutable()
2914 immdircap = dnode.get_uri()
2915 litdircap = "URI:DIR2-LIT:ge3dumj2mewdcotyfqydulbshj5x2lbm"
2916 emptydircap = "URI:DIR2-LIT:"
2917 newkids = {u"child-imm": ["filenode", {"ro_uri": filecap1,
2918 "metadata": md1, }],
2919 u"unknownchild-imm": ["unknown", {"ro_uri": unknown_immcap}],
2920 u"dirchild-imm": ["dirnode", {"ro_uri": immdircap}],
2921 u"dirchild-lit": ["dirnode", {"ro_uri": litdircap}],
2922 u"dirchild-empty": ["dirnode", {"ro_uri": emptydircap}],
2924 return newkids, {'filecap1': filecap1,
2925 'unknown_immcap': unknown_immcap,
2926 'immdircap': immdircap,
2927 'litdircap': litdircap,
2928 'emptydircap': emptydircap}
2930 def test_POST_mkdir_no_parentdir_initial_children(self):
2931 (newkids, caps) = self._create_initial_children()
2932 d = self.POST2("/uri?t=mkdir-with-children", simplejson.dumps(newkids))
2933 def _after_mkdir(res):
2934 self.failUnless(res.startswith("URI:DIR"), res)
2935 n = self.s.create_node_from_uri(res)
2936 d2 = self.failUnlessNodeKeysAre(n, newkids.keys())
2937 d2.addCallback(lambda ign:
2938 self.failUnlessROChildURIIs(n, u"child-imm",
2940 d2.addCallback(lambda ign:
2941 self.failUnlessRWChildURIIs(n, u"child-mutable",
2943 d2.addCallback(lambda ign:
2944 self.failUnlessROChildURIIs(n, u"child-mutable-ro",
2946 d2.addCallback(lambda ign:
2947 self.failUnlessRWChildURIIs(n, u"unknownchild-rw",
2948 caps['unknown_rwcap']))
2949 d2.addCallback(lambda ign:
2950 self.failUnlessROChildURIIs(n, u"unknownchild-ro",
2951 caps['unknown_rocap']))
2952 d2.addCallback(lambda ign:
2953 self.failUnlessROChildURIIs(n, u"unknownchild-imm",
2954 caps['unknown_immcap']))
2955 d2.addCallback(lambda ign:
2956 self.failUnlessRWChildURIIs(n, u"dirchild",
2959 d.addCallback(_after_mkdir)
2962 def test_POST_mkdir_no_parentdir_unexpected_children(self):
2963 # the regular /uri?t=mkdir operation is specified to ignore its body.
2964 # Only t=mkdir-with-children pays attention to it.
2965 (newkids, caps) = self._create_initial_children()
2966 d = self.shouldHTTPError("POST_mkdir_no_parentdir_unexpected_children",
2968 "t=mkdir does not accept children=, "
2969 "try t=mkdir-with-children instead",
2970 self.POST2, "/uri?t=mkdir", # without children
2971 simplejson.dumps(newkids))
2974 def test_POST_noparent_bad(self):
2975 d = self.shouldHTTPError("POST_noparent_bad",
2977 "/uri accepts only PUT, PUT?t=mkdir, "
2978 "POST?t=upload, and POST?t=mkdir",
2979 self.POST, "/uri?t=bogus")
2982 def test_POST_mkdir_no_parentdir_immutable(self):
2983 (newkids, caps) = self._create_immutable_children()
2984 d = self.POST2("/uri?t=mkdir-immutable", simplejson.dumps(newkids))
2985 def _after_mkdir(res):
2986 self.failUnless(res.startswith("URI:DIR"), res)
2987 n = self.s.create_node_from_uri(res)
2988 d2 = self.failUnlessNodeKeysAre(n, newkids.keys())
2989 d2.addCallback(lambda ign:
2990 self.failUnlessROChildURIIs(n, u"child-imm",
2992 d2.addCallback(lambda ign:
2993 self.failUnlessROChildURIIs(n, u"unknownchild-imm",
2994 caps['unknown_immcap']))
2995 d2.addCallback(lambda ign:
2996 self.failUnlessROChildURIIs(n, u"dirchild-imm",
2998 d2.addCallback(lambda ign:
2999 self.failUnlessROChildURIIs(n, u"dirchild-lit",
3001 d2.addCallback(lambda ign:
3002 self.failUnlessROChildURIIs(n, u"dirchild-empty",
3003 caps['emptydircap']))
3005 d.addCallback(_after_mkdir)
3008 def test_POST_mkdir_no_parentdir_immutable_bad(self):
3009 (newkids, caps) = self._create_initial_children()
3010 d = self.shouldFail2(error.Error,
3011 "test_POST_mkdir_no_parentdir_immutable_bad",
3013 "needed to be immutable but was not",
3015 "/uri?t=mkdir-immutable",
3016 simplejson.dumps(newkids))
3019 def test_welcome_page_mkdir_button(self):
3020 # Fetch the welcome page.
3022 def _after_get_welcome_page(res):
3023 MKDIR_BUTTON_RE = re.compile(
3024 '<form action="([^"]*)" method="post".*?'
3025 '<input type="hidden" name="t" value="([^"]*)" />'
3026 '<input type="hidden" name="([^"]*)" value="([^"]*)" />'
3027 '<input type="submit" value="Create a directory" />',
3029 mo = MKDIR_BUTTON_RE.search(res)
3030 formaction = mo.group(1)
3032 formaname = mo.group(3)
3033 formavalue = mo.group(4)
3034 return (formaction, formt, formaname, formavalue)
3035 d.addCallback(_after_get_welcome_page)
3036 def _after_parse_form(res):
3037 (formaction, formt, formaname, formavalue) = res
3038 return self.POST("/%s?t=%s&%s=%s" % (formaction, formt, formaname, formavalue))
3039 d.addCallback(_after_parse_form)
3040 d.addBoth(self.shouldRedirect, None, statuscode='303')
3043 def test_POST_mkdir_replace(self): # return value?
3044 d = self.POST(self.public_url + "/foo", t="mkdir", name="sub")
3045 d.addCallback(lambda res: self._foo_node.get(u"sub"))
3046 d.addCallback(self.failUnlessNodeKeysAre, [])
3049 def test_POST_mkdir_no_replace_queryarg(self): # return value?
3050 d = self.POST(self.public_url + "/foo?replace=false", t="mkdir", name="sub")
3051 d.addBoth(self.shouldFail, error.Error,
3052 "POST_mkdir_no_replace_queryarg",
3054 "There was already a child by that name, and you asked me "
3055 "to not replace it")
3056 d.addCallback(lambda res: self._foo_node.get(u"sub"))
3057 d.addCallback(self.failUnlessNodeKeysAre, [u"baz.txt"])
3060 def test_POST_mkdir_no_replace_field(self): # return value?
3061 d = self.POST(self.public_url + "/foo", t="mkdir", name="sub",
3063 d.addBoth(self.shouldFail, error.Error, "POST_mkdir_no_replace_field",
3065 "There was already a child by that name, and you asked me "
3066 "to not replace it")
3067 d.addCallback(lambda res: self._foo_node.get(u"sub"))
3068 d.addCallback(self.failUnlessNodeKeysAre, [u"baz.txt"])
3071 def test_POST_mkdir_whendone_field(self):
3072 d = self.POST(self.public_url + "/foo",
3073 t="mkdir", name="newdir", when_done="/THERE")
3074 d.addBoth(self.shouldRedirect, "/THERE")
3075 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
3076 d.addCallback(self.failUnlessNodeKeysAre, [])
3079 def test_POST_mkdir_whendone_queryarg(self):
3080 d = self.POST(self.public_url + "/foo?when_done=/THERE",
3081 t="mkdir", name="newdir")
3082 d.addBoth(self.shouldRedirect, "/THERE")
3083 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
3084 d.addCallback(self.failUnlessNodeKeysAre, [])
3087 def test_POST_bad_t(self):
3088 d = self.shouldFail2(error.Error, "POST_bad_t",
3090 "POST to a directory with bad t=BOGUS",
3091 self.POST, self.public_url + "/foo", t="BOGUS")
3094 def test_POST_set_children(self, command_name="set_children"):
3095 contents9, n9, newuri9 = self.makefile(9)
3096 contents10, n10, newuri10 = self.makefile(10)
3097 contents11, n11, newuri11 = self.makefile(11)
3100 "atomic_added_1": [ "filenode", { "rw_uri": "%s",
3103 "ctime": 1002777696.7564139,
3104 "mtime": 1002777696.7564139
3107 "atomic_added_2": [ "filenode", { "rw_uri": "%s",
3110 "ctime": 1002777696.7564139,
3111 "mtime": 1002777696.7564139
3114 "atomic_added_3": [ "filenode", { "rw_uri": "%s",
3117 "ctime": 1002777696.7564139,
3118 "mtime": 1002777696.7564139
3121 }""" % (newuri9, newuri10, newuri11)
3123 url = self.webish_url + self.public_url + "/foo" + "?t=" + command_name
3125 d = client.getPage(url, method="POST", postdata=reqbody)
3127 self.failUnlessURIMatchesROChild(newuri9, self._foo_node, u"atomic_added_1")
3128 self.failUnlessURIMatchesROChild(newuri10, self._foo_node, u"atomic_added_2")
3129 self.failUnlessURIMatchesROChild(newuri11, self._foo_node, u"atomic_added_3")
3131 d.addCallback(_then)
3132 d.addErrback(self.dump_error)
3135 def test_POST_set_children_with_hyphen(self):
3136 return self.test_POST_set_children(command_name="set-children")
3138 def test_POST_link_uri(self):
3139 contents, n, newuri = self.makefile(8)
3140 d = self.POST(self.public_url + "/foo", t="uri", name="new.txt", uri=newuri)
3141 d.addCallback(self.failUnlessURIMatchesROChild, self._foo_node, u"new.txt")
3142 d.addCallback(lambda res:
3143 self.failUnlessChildContentsAre(self._foo_node, u"new.txt",
3147 def test_POST_link_uri_replace(self):
3148 contents, n, newuri = self.makefile(8)
3149 d = self.POST(self.public_url + "/foo", t="uri", name="bar.txt", uri=newuri)
3150 d.addCallback(self.failUnlessURIMatchesROChild, self._foo_node, u"bar.txt")
3151 d.addCallback(lambda res:
3152 self.failUnlessChildContentsAre(self._foo_node, u"bar.txt",
3156 def test_POST_link_uri_unknown_bad(self):
3157 d = self.POST(self.public_url + "/foo", t="uri", name="future.txt", uri=unknown_rwcap)
3158 d.addBoth(self.shouldFail, error.Error,
3159 "POST_link_uri_unknown_bad",
3161 "unknown cap in a write slot")
3164 def test_POST_link_uri_unknown_ro_good(self):
3165 d = self.POST(self.public_url + "/foo", t="uri", name="future-ro.txt", uri=unknown_rocap)
3166 d.addCallback(self.failUnlessURIMatchesROChild, self._foo_node, u"future-ro.txt")
3169 def test_POST_link_uri_unknown_imm_good(self):
3170 d = self.POST(self.public_url + "/foo", t="uri", name="future-imm.txt", uri=unknown_immcap)
3171 d.addCallback(self.failUnlessURIMatchesROChild, self._foo_node, u"future-imm.txt")
3174 def test_POST_link_uri_no_replace_queryarg(self):
3175 contents, n, newuri = self.makefile(8)
3176 d = self.POST(self.public_url + "/foo?replace=false", t="uri",
3177 name="bar.txt", uri=newuri)
3178 d.addBoth(self.shouldFail, error.Error,
3179 "POST_link_uri_no_replace_queryarg",
3181 "There was already a child by that name, and you asked me "
3182 "to not replace it")
3183 d.addCallback(lambda res: self.GET(self.public_url + "/foo/bar.txt"))
3184 d.addCallback(self.failUnlessIsBarDotTxt)
3187 def test_POST_link_uri_no_replace_field(self):
3188 contents, n, newuri = self.makefile(8)
3189 d = self.POST(self.public_url + "/foo", t="uri", replace="false",
3190 name="bar.txt", uri=newuri)
3191 d.addBoth(self.shouldFail, error.Error,
3192 "POST_link_uri_no_replace_field",
3194 "There was already a child by that name, and you asked me "
3195 "to not replace it")
3196 d.addCallback(lambda res: self.GET(self.public_url + "/foo/bar.txt"))
3197 d.addCallback(self.failUnlessIsBarDotTxt)
3200 def test_POST_delete(self, command_name='delete'):
3201 d = self._foo_node.list()
3202 def _check_before(children):
3203 self.failUnless(u"bar.txt" in children)
3204 d.addCallback(_check_before)
3205 d.addCallback(lambda res: self.POST(self.public_url + "/foo", t=command_name, name="bar.txt"))
3206 d.addCallback(lambda res: self._foo_node.list())
3207 def _check_after(children):
3208 self.failIf(u"bar.txt" in children)
3209 d.addCallback(_check_after)
3212 def test_POST_unlink(self):
3213 return self.test_POST_delete(command_name='unlink')
3215 def test_POST_rename_file(self):
3216 d = self.POST(self.public_url + "/foo", t="rename",
3217 from_name="bar.txt", to_name='wibble.txt')
3218 d.addCallback(lambda res:
3219 self.failIfNodeHasChild(self._foo_node, u"bar.txt"))
3220 d.addCallback(lambda res:
3221 self.failUnlessNodeHasChild(self._foo_node, u"wibble.txt"))
3222 d.addCallback(lambda res: self.GET(self.public_url + "/foo/wibble.txt"))
3223 d.addCallback(self.failUnlessIsBarDotTxt)
3224 d.addCallback(lambda res: self.GET(self.public_url + "/foo/wibble.txt?t=json"))
3225 d.addCallback(self.failUnlessIsBarJSON)
3228 def test_POST_rename_file_redundant(self):
3229 d = self.POST(self.public_url + "/foo", t="rename",
3230 from_name="bar.txt", to_name='bar.txt')
3231 d.addCallback(lambda res:
3232 self.failUnlessNodeHasChild(self._foo_node, u"bar.txt"))
3233 d.addCallback(lambda res: self.GET(self.public_url + "/foo/bar.txt"))
3234 d.addCallback(self.failUnlessIsBarDotTxt)
3235 d.addCallback(lambda res: self.GET(self.public_url + "/foo/bar.txt?t=json"))
3236 d.addCallback(self.failUnlessIsBarJSON)
3239 def test_POST_rename_file_replace(self):
3240 # rename a file and replace a directory with it
3241 d = self.POST(self.public_url + "/foo", t="rename",
3242 from_name="bar.txt", to_name='empty')
3243 d.addCallback(lambda res:
3244 self.failIfNodeHasChild(self._foo_node, u"bar.txt"))
3245 d.addCallback(lambda res:
3246 self.failUnlessNodeHasChild(self._foo_node, u"empty"))
3247 d.addCallback(lambda res: self.GET(self.public_url + "/foo/empty"))
3248 d.addCallback(self.failUnlessIsBarDotTxt)
3249 d.addCallback(lambda res: self.GET(self.public_url + "/foo/empty?t=json"))
3250 d.addCallback(self.failUnlessIsBarJSON)
3253 def test_POST_rename_file_no_replace_queryarg(self):
3254 # rename a file and replace a directory with it
3255 d = self.POST(self.public_url + "/foo?replace=false", t="rename",
3256 from_name="bar.txt", to_name='empty')
3257 d.addBoth(self.shouldFail, error.Error,
3258 "POST_rename_file_no_replace_queryarg",
3260 "There was already a child by that name, and you asked me "
3261 "to not replace it")
3262 d.addCallback(lambda res: self.GET(self.public_url + "/foo/empty?t=json"))
3263 d.addCallback(self.failUnlessIsEmptyJSON)
3266 def test_POST_rename_file_no_replace_field(self):
3267 # rename a file and replace a directory with it
3268 d = self.POST(self.public_url + "/foo", t="rename", replace="false",
3269 from_name="bar.txt", to_name='empty')
3270 d.addBoth(self.shouldFail, error.Error,
3271 "POST_rename_file_no_replace_field",
3273 "There was already a child by that name, and you asked me "
3274 "to not replace it")
3275 d.addCallback(lambda res: self.GET(self.public_url + "/foo/empty?t=json"))
3276 d.addCallback(self.failUnlessIsEmptyJSON)
3279 def failUnlessIsEmptyJSON(self, res):
3280 data = simplejson.loads(res)
3281 self.failUnlessEqual(data[0], "dirnode", data)
3282 self.failUnlessReallyEqual(len(data[1]["children"]), 0)
3284 def test_POST_rename_file_slash_fail(self):
3285 d = self.POST(self.public_url + "/foo", t="rename",
3286 from_name="bar.txt", to_name='kirk/spock.txt')
3287 d.addBoth(self.shouldFail, error.Error,
3288 "test_POST_rename_file_slash_fail",
3290 "to_name= may not contain a slash",
3292 d.addCallback(lambda res:
3293 self.failUnlessNodeHasChild(self._foo_node, u"bar.txt"))
3296 def test_POST_rename_dir(self):
3297 d = self.POST(self.public_url, t="rename",
3298 from_name="foo", to_name='plunk')
3299 d.addCallback(lambda res:
3300 self.failIfNodeHasChild(self.public_root, u"foo"))
3301 d.addCallback(lambda res:
3302 self.failUnlessNodeHasChild(self.public_root, u"plunk"))
3303 d.addCallback(lambda res: self.GET(self.public_url + "/plunk?t=json"))
3304 d.addCallback(self.failUnlessIsFooJSON)
3307 def shouldRedirect(self, res, target=None, statuscode=None, which=""):
3308 """ If target is not None then the redirection has to go to target. If
3309 statuscode is not None then the redirection has to be accomplished with
3310 that HTTP status code."""
3311 if not isinstance(res, failure.Failure):
3312 to_where = (target is None) and "somewhere" or ("to " + target)
3313 self.fail("%s: we were expecting to get redirected %s, not get an"
3314 " actual page: %s" % (which, to_where, res))
3315 res.trap(error.PageRedirect)
3316 if statuscode is not None:
3317 self.failUnlessReallyEqual(res.value.status, statuscode,
3318 "%s: not a redirect" % which)
3319 if target is not None:
3320 # the PageRedirect does not seem to capture the uri= query arg
3321 # properly, so we can't check for it.
3322 realtarget = self.webish_url + target
3323 self.failUnlessReallyEqual(res.value.location, realtarget,
3324 "%s: wrong target" % which)
3325 return res.value.location
3327 def test_GET_URI_form(self):
3328 base = "/uri?uri=%s" % self._bar_txt_uri
3329 # this is supposed to give us a redirect to /uri/$URI, plus arguments
3330 targetbase = "/uri/%s" % urllib.quote(self._bar_txt_uri)
3332 d.addBoth(self.shouldRedirect, targetbase)
3333 d.addCallback(lambda res: self.GET(base+"&filename=bar.txt"))
3334 d.addBoth(self.shouldRedirect, targetbase+"?filename=bar.txt")
3335 d.addCallback(lambda res: self.GET(base+"&t=json"))
3336 d.addBoth(self.shouldRedirect, targetbase+"?t=json")
3337 d.addCallback(self.log, "about to get file by uri")
3338 d.addCallback(lambda res: self.GET(base, followRedirect=True))
3339 d.addCallback(self.failUnlessIsBarDotTxt)
3340 d.addCallback(self.log, "got file by uri, about to get dir by uri")
3341 d.addCallback(lambda res: self.GET("/uri?uri=%s&t=json" % self._foo_uri,
3342 followRedirect=True))
3343 d.addCallback(self.failUnlessIsFooJSON)
3344 d.addCallback(self.log, "got dir by uri")
3348 def test_GET_URI_form_bad(self):
3349 d = self.shouldFail2(error.Error, "test_GET_URI_form_bad",
3350 "400 Bad Request", "GET /uri requires uri=",
3354 def test_GET_rename_form(self):
3355 d = self.GET(self.public_url + "/foo?t=rename-form&name=bar.txt",
3356 followRedirect=True)
3358 self.failUnless('name="when_done" value="."' in res, res)
3359 self.failUnless(re.search(r'name="from_name" value="bar\.txt"', res))
3360 d.addCallback(_check)
3363 def log(self, res, msg):
3364 #print "MSG: %s RES: %s" % (msg, res)
3368 def test_GET_URI_URL(self):
3369 base = "/uri/%s" % self._bar_txt_uri
3371 d.addCallback(self.failUnlessIsBarDotTxt)
3372 d.addCallback(lambda res: self.GET(base+"?filename=bar.txt"))
3373 d.addCallback(self.failUnlessIsBarDotTxt)
3374 d.addCallback(lambda res: self.GET(base+"?filename=bar.txt&save=true"))
3375 d.addCallback(self.failUnlessIsBarDotTxt)
3378 def test_GET_URI_URL_dir(self):
3379 base = "/uri/%s?t=json" % self._foo_uri
3381 d.addCallback(self.failUnlessIsFooJSON)
3384 def test_GET_URI_URL_missing(self):
3385 base = "/uri/%s" % self._bad_file_uri
3386 d = self.shouldHTTPError("test_GET_URI_URL_missing",
3387 http.GONE, None, "NotEnoughSharesError",
3389 # TODO: how can we exercise both sides of WebDownloadTarget.fail
3390 # here? we must arrange for a download to fail after target.open()
3391 # has been called, and then inspect the response to see that it is
3392 # shorter than we expected.
3395 def test_PUT_DIRURL_uri(self):
3396 d = self.s.create_dirnode()
3398 new_uri = dn.get_uri()
3399 # replace /foo with a new (empty) directory
3400 d = self.PUT(self.public_url + "/foo?t=uri", new_uri)
3401 d.addCallback(lambda res:
3402 self.failUnlessReallyEqual(res.strip(), new_uri))
3403 d.addCallback(lambda res:
3404 self.failUnlessRWChildURIIs(self.public_root,
3408 d.addCallback(_made_dir)
3411 def test_PUT_DIRURL_uri_noreplace(self):
3412 d = self.s.create_dirnode()
3414 new_uri = dn.get_uri()
3415 # replace /foo with a new (empty) directory, but ask that
3416 # replace=false, so it should fail
3417 d = self.shouldFail2(error.Error, "test_PUT_DIRURL_uri_noreplace",
3418 "409 Conflict", "There was already a child by that name, and you asked me to not replace it",
3420 self.public_url + "/foo?t=uri&replace=false",
3422 d.addCallback(lambda res:
3423 self.failUnlessRWChildURIIs(self.public_root,
3427 d.addCallback(_made_dir)
3430 def test_PUT_DIRURL_bad_t(self):
3431 d = self.shouldFail2(error.Error, "test_PUT_DIRURL_bad_t",
3432 "400 Bad Request", "PUT to a directory",
3433 self.PUT, self.public_url + "/foo?t=BOGUS", "")
3434 d.addCallback(lambda res:
3435 self.failUnlessRWChildURIIs(self.public_root,
3440 def test_PUT_NEWFILEURL_uri(self):
3441 contents, n, new_uri = self.makefile(8)
3442 d = self.PUT(self.public_url + "/foo/new.txt?t=uri", new_uri)
3443 d.addCallback(lambda res: self.failUnlessReallyEqual(res.strip(), new_uri))
3444 d.addCallback(lambda res:
3445 self.failUnlessChildContentsAre(self._foo_node, u"new.txt",
3449 def test_PUT_NEWFILEURL_mdmf(self):
3450 new_contents = self.NEWFILE_CONTENTS * 300000
3451 d = self.PUT(self.public_url + \
3452 "/foo/mdmf.txt?format=mdmf",
3454 d.addCallback(lambda ignored:
3455 self.GET(self.public_url + "/foo/mdmf.txt?t=json"))
3456 def _got_json(json):
3457 data = simplejson.loads(json)
3459 self.failUnlessIn("format", data)
3460 self.failUnlessEqual(data["format"], "mdmf")
3461 self.failUnless(data['rw_uri'].startswith("URI:MDMF"))
3462 self.failUnless(data['ro_uri'].startswith("URI:MDMF"))
3463 d.addCallback(_got_json)
3466 def test_PUT_NEWFILEURL_sdmf(self):
3467 new_contents = self.NEWFILE_CONTENTS * 300000
3468 d = self.PUT(self.public_url + \
3469 "/foo/sdmf.txt?format=sdmf",
3471 d.addCallback(lambda ignored:
3472 self.GET(self.public_url + "/foo/sdmf.txt?t=json"))
3473 def _got_json(json):
3474 data = simplejson.loads(json)
3476 self.failUnlessIn("format", data)
3477 self.failUnlessEqual(data["format"], "sdmf")
3478 d.addCallback(_got_json)
3481 def test_PUT_NEWFILEURL_bad_format(self):
3482 new_contents = self.NEWFILE_CONTENTS * 300000
3483 return self.shouldHTTPError("PUT_NEWFILEURL_bad_format",
3484 400, "Bad Request", "Unknown format: foo",
3485 self.PUT, self.public_url + \
3486 "/foo/foo.txt?format=foo",
3489 def test_PUT_NEWFILEURL_uri_replace(self):
3490 contents, n, new_uri = self.makefile(8)
3491 d = self.PUT(self.public_url + "/foo/bar.txt?t=uri", new_uri)
3492 d.addCallback(lambda res: self.failUnlessReallyEqual(res.strip(), new_uri))
3493 d.addCallback(lambda res:
3494 self.failUnlessChildContentsAre(self._foo_node, u"bar.txt",
3498 def test_PUT_NEWFILEURL_uri_no_replace(self):
3499 contents, n, new_uri = self.makefile(8)
3500 d = self.PUT(self.public_url + "/foo/bar.txt?t=uri&replace=false", new_uri)
3501 d.addBoth(self.shouldFail, error.Error,
3502 "PUT_NEWFILEURL_uri_no_replace",
3504 "There was already a child by that name, and you asked me "
3505 "to not replace it")
3508 def test_PUT_NEWFILEURL_uri_unknown_bad(self):
3509 d = self.PUT(self.public_url + "/foo/put-future.txt?t=uri", unknown_rwcap)
3510 d.addBoth(self.shouldFail, error.Error,
3511 "POST_put_uri_unknown_bad",
3513 "unknown cap in a write slot")
3516 def test_PUT_NEWFILEURL_uri_unknown_ro_good(self):
3517 d = self.PUT(self.public_url + "/foo/put-future-ro.txt?t=uri", unknown_rocap)
3518 d.addCallback(self.failUnlessURIMatchesROChild, self._foo_node,
3519 u"put-future-ro.txt")
3522 def test_PUT_NEWFILEURL_uri_unknown_imm_good(self):
3523 d = self.PUT(self.public_url + "/foo/put-future-imm.txt?t=uri", unknown_immcap)
3524 d.addCallback(self.failUnlessURIMatchesROChild, self._foo_node,
3525 u"put-future-imm.txt")
3528 def test_PUT_NEWFILE_URI(self):
3529 file_contents = "New file contents here\n"
3530 d = self.PUT("/uri", file_contents)
3532 assert isinstance(uri, str), uri
3533 self.failUnless(uri in FakeCHKFileNode.all_contents)
3534 self.failUnlessReallyEqual(FakeCHKFileNode.all_contents[uri],
3536 return self.GET("/uri/%s" % uri)
3537 d.addCallback(_check)
3539 self.failUnlessReallyEqual(res, file_contents)
3540 d.addCallback(_check2)
3543 def test_PUT_NEWFILE_URI_not_mutable(self):
3544 file_contents = "New file contents here\n"
3545 d = self.PUT("/uri?mutable=false", file_contents)
3547 assert isinstance(uri, str), uri
3548 self.failUnless(uri in FakeCHKFileNode.all_contents)
3549 self.failUnlessReallyEqual(FakeCHKFileNode.all_contents[uri],
3551 return self.GET("/uri/%s" % uri)
3552 d.addCallback(_check)
3554 self.failUnlessReallyEqual(res, file_contents)
3555 d.addCallback(_check2)
3558 def test_PUT_NEWFILE_URI_only_PUT(self):
3559 d = self.PUT("/uri?t=bogus", "")
3560 d.addBoth(self.shouldFail, error.Error,
3561 "PUT_NEWFILE_URI_only_PUT",
3563 "/uri accepts only PUT, PUT?t=mkdir, POST?t=upload, and POST?t=mkdir")
3566 def test_PUT_NEWFILE_URI_mutable(self):
3567 file_contents = "New file contents here\n"
3568 d = self.PUT("/uri?mutable=true", file_contents)
3569 def _check1(filecap):
3570 filecap = filecap.strip()
3571 self.failUnless(filecap.startswith("URI:SSK:"), filecap)
3572 self.filecap = filecap
3573 u = uri.WriteableSSKFileURI.init_from_string(filecap)
3574 self.failUnless(u.get_storage_index() in FakeMutableFileNode.all_contents)
3575 n = self.s.create_node_from_uri(filecap)
3576 return n.download_best_version()
3577 d.addCallback(_check1)
3579 self.failUnlessReallyEqual(data, file_contents)
3580 return self.GET("/uri/%s" % urllib.quote(self.filecap))
3581 d.addCallback(_check2)
3583 self.failUnlessReallyEqual(res, file_contents)
3584 d.addCallback(_check3)
3587 def test_PUT_mkdir(self):
3588 d = self.PUT("/uri?t=mkdir", "")
3590 n = self.s.create_node_from_uri(uri.strip())
3591 d2 = self.failUnlessNodeKeysAre(n, [])
3592 d2.addCallback(lambda res:
3593 self.GET("/uri/%s?t=json" % uri))
3595 d.addCallback(_check)
3596 d.addCallback(self.failUnlessIsEmptyJSON)
3599 def test_PUT_mkdir_mdmf(self):
3600 d = self.PUT("/uri?t=mkdir&format=mdmf", "")
3602 u = uri.from_string(res)
3603 # Check that this is an MDMF writecap
3604 self.failUnlessIsInstance(u, uri.MDMFDirectoryURI)
3608 def test_PUT_mkdir_sdmf(self):
3609 d = self.PUT("/uri?t=mkdir&format=sdmf", "")
3611 u = uri.from_string(res)
3612 self.failUnlessIsInstance(u, uri.DirectoryURI)
3616 def test_PUT_mkdir_bad_format(self):
3617 return self.shouldHTTPError("PUT_mkdir_bad_format",
3618 400, "Bad Request", "Unknown format: foo",
3619 self.PUT, "/uri?t=mkdir&format=foo",
3622 def test_POST_check(self):
3623 d = self.POST(self.public_url + "/foo", t="check", name="bar.txt")
3625 # this returns a string form of the results, which are probably
3626 # None since we're using fake filenodes.
3627 # TODO: verify that the check actually happened, by changing
3628 # FakeCHKFileNode to count how many times .check() has been
3631 d.addCallback(_done)
3635 def test_PUT_update_at_offset(self):
3636 file_contents = "test file" * 100000 # about 900 KiB
3637 d = self.PUT("/uri?mutable=true", file_contents)
3639 self.filecap = filecap
3640 new_data = file_contents[:100]
3641 new = "replaced and so on"
3643 new_data += file_contents[len(new_data):]
3644 assert len(new_data) == len(file_contents)
3645 self.new_data = new_data
3646 d.addCallback(_then)
3647 d.addCallback(lambda ignored:
3648 self.PUT("/uri/%s?replace=True&offset=100" % self.filecap,
3649 "replaced and so on"))
3650 def _get_data(filecap):
3651 n = self.s.create_node_from_uri(filecap)
3652 return n.download_best_version()
3653 d.addCallback(_get_data)
3654 d.addCallback(lambda results:
3655 self.failUnlessEqual(results, self.new_data))
3656 # Now try appending things to the file
3657 d.addCallback(lambda ignored:
3658 self.PUT("/uri/%s?offset=%d" % (self.filecap, len(self.new_data)),
3660 d.addCallback(_get_data)
3661 d.addCallback(lambda results:
3662 self.failUnlessEqual(results, self.new_data + ("puppies" * 100)))
3663 # and try replacing the beginning of the file
3664 d.addCallback(lambda ignored:
3665 self.PUT("/uri/%s?offset=0" % self.filecap, "begin"))
3666 d.addCallback(_get_data)
3667 d.addCallback(lambda results:
3668 self.failUnlessEqual(results, "begin"+self.new_data[len("begin"):]+("puppies"*100)))
3671 def test_PUT_update_at_invalid_offset(self):
3672 file_contents = "test file" * 100000 # about 900 KiB
3673 d = self.PUT("/uri?mutable=true", file_contents)
3675 self.filecap = filecap
3676 d.addCallback(_then)
3677 # Negative offsets should cause an error.
3678 d.addCallback(lambda ignored:
3679 self.shouldHTTPError("PUT_update_at_invalid_offset",
3683 "/uri/%s?offset=-1" % self.filecap,
3687 def test_PUT_update_at_offset_immutable(self):
3688 file_contents = "Test file" * 100000
3689 d = self.PUT("/uri", file_contents)
3691 self.filecap = filecap
3692 d.addCallback(_then)
3693 d.addCallback(lambda ignored:
3694 self.shouldHTTPError("PUT_update_at_offset_immutable",
3698 "/uri/%s?offset=50" % self.filecap,
3703 def test_bad_method(self):
3704 url = self.webish_url + self.public_url + "/foo/bar.txt"
3705 d = self.shouldHTTPError("bad_method",
3706 501, "Not Implemented",
3707 "I don't know how to treat a BOGUS request.",
3708 client.getPage, url, method="BOGUS")
3711 def test_short_url(self):
3712 url = self.webish_url + "/uri"
3713 d = self.shouldHTTPError("short_url", 501, "Not Implemented",
3714 "I don't know how to treat a DELETE request.",
3715 client.getPage, url, method="DELETE")
3718 def test_ophandle_bad(self):
3719 url = self.webish_url + "/operations/bogus?t=status"
3720 d = self.shouldHTTPError("ophandle_bad", 404, "404 Not Found",
3721 "unknown/expired handle 'bogus'",
3722 client.getPage, url)
3725 def test_ophandle_cancel(self):
3726 d = self.POST(self.public_url + "/foo/?t=start-manifest&ophandle=128",
3727 followRedirect=True)
3728 d.addCallback(lambda ignored:
3729 self.GET("/operations/128?t=status&output=JSON"))
3731 data = simplejson.loads(res)
3732 self.failUnless("finished" in data, res)
3733 monitor = self.ws.root.child_operations.handles["128"][0]
3734 d = self.POST("/operations/128?t=cancel&output=JSON")
3736 data = simplejson.loads(res)
3737 self.failUnless("finished" in data, res)
3738 # t=cancel causes the handle to be forgotten
3739 self.failUnless(monitor.is_cancelled())
3740 d.addCallback(_check2)
3742 d.addCallback(_check1)
3743 d.addCallback(lambda ignored:
3744 self.shouldHTTPError("ophandle_cancel",
3745 404, "404 Not Found",
3746 "unknown/expired handle '128'",
3748 "/operations/128?t=status&output=JSON"))
3751 def test_ophandle_retainfor(self):
3752 d = self.POST(self.public_url + "/foo/?t=start-manifest&ophandle=129&retain-for=60",
3753 followRedirect=True)
3754 d.addCallback(lambda ignored:
3755 self.GET("/operations/129?t=status&output=JSON&retain-for=0"))
3757 data = simplejson.loads(res)
3758 self.failUnless("finished" in data, res)
3759 d.addCallback(_check1)
3760 # the retain-for=0 will cause the handle to be expired very soon
3761 d.addCallback(lambda ign:
3762 self.clock.advance(2.0))
3763 d.addCallback(lambda ignored:
3764 self.shouldHTTPError("ophandle_retainfor",
3765 404, "404 Not Found",
3766 "unknown/expired handle '129'",
3768 "/operations/129?t=status&output=JSON"))
3771 def test_ophandle_release_after_complete(self):
3772 d = self.POST(self.public_url + "/foo/?t=start-manifest&ophandle=130",
3773 followRedirect=True)
3774 d.addCallback(self.wait_for_operation, "130")
3775 d.addCallback(lambda ignored:
3776 self.GET("/operations/130?t=status&output=JSON&release-after-complete=true"))
3777 # the release-after-complete=true will cause the handle to be expired
3778 d.addCallback(lambda ignored:
3779 self.shouldHTTPError("ophandle_release_after_complete",
3780 404, "404 Not Found",
3781 "unknown/expired handle '130'",
3783 "/operations/130?t=status&output=JSON"))
3786 def test_uncollected_ophandle_expiration(self):
3787 # uncollected ophandles should expire after 4 days
3788 def _make_uncollected_ophandle(ophandle):
3789 d = self.POST(self.public_url +
3790 "/foo/?t=start-manifest&ophandle=%d" % ophandle,
3791 followRedirect=False)
3792 # When we start the operation, the webapi server will want
3793 # to redirect us to the page for the ophandle, so we get
3794 # confirmation that the operation has started. If the
3795 # manifest operation has finished by the time we get there,
3796 # following that redirect (by setting followRedirect=True
3797 # above) has the side effect of collecting the ophandle that
3798 # we've just created, which means that we can't use the
3799 # ophandle to test the uncollected timeout anymore. So,
3800 # instead, catch the 302 here and don't follow it.
3801 d.addBoth(self.should302, "uncollected_ophandle_creation")
3803 # Create an ophandle, don't collect it, then advance the clock by
3804 # 4 days - 1 second and make sure that the ophandle is still there.
3805 d = _make_uncollected_ophandle(131)
3806 d.addCallback(lambda ign:
3807 self.clock.advance((96*60*60) - 1)) # 96 hours = 4 days
3808 d.addCallback(lambda ign:
3809 self.GET("/operations/131?t=status&output=JSON"))
3811 data = simplejson.loads(res)
3812 self.failUnless("finished" in data, res)
3813 d.addCallback(_check1)
3814 # Create an ophandle, don't collect it, then try to collect it
3815 # after 4 days. It should be gone.
3816 d.addCallback(lambda ign:
3817 _make_uncollected_ophandle(132))
3818 d.addCallback(lambda ign:
3819 self.clock.advance(96*60*60))
3820 d.addCallback(lambda ign:
3821 self.shouldHTTPError("uncollected_ophandle_expired_after_100_hours",
3822 404, "404 Not Found",
3823 "unknown/expired handle '132'",
3825 "/operations/132?t=status&output=JSON"))
3828 def test_collected_ophandle_expiration(self):
3829 # collected ophandles should expire after 1 day
3830 def _make_collected_ophandle(ophandle):
3831 d = self.POST(self.public_url +
3832 "/foo/?t=start-manifest&ophandle=%d" % ophandle,
3833 followRedirect=True)
3834 # By following the initial redirect, we collect the ophandle
3835 # we've just created.
3837 # Create a collected ophandle, then collect it after 23 hours
3838 # and 59 seconds to make sure that it is still there.
3839 d = _make_collected_ophandle(133)
3840 d.addCallback(lambda ign:
3841 self.clock.advance((24*60*60) - 1))
3842 d.addCallback(lambda ign:
3843 self.GET("/operations/133?t=status&output=JSON"))
3845 data = simplejson.loads(res)
3846 self.failUnless("finished" in data, res)
3847 d.addCallback(_check1)
3848 # Create another uncollected ophandle, then try to collect it
3849 # after 24 hours to make sure that it is gone.
3850 d.addCallback(lambda ign:
3851 _make_collected_ophandle(134))
3852 d.addCallback(lambda ign:
3853 self.clock.advance(24*60*60))
3854 d.addCallback(lambda ign:
3855 self.shouldHTTPError("collected_ophandle_expired_after_1_day",
3856 404, "404 Not Found",
3857 "unknown/expired handle '134'",
3859 "/operations/134?t=status&output=JSON"))
3862 def test_incident(self):
3863 d = self.POST("/report_incident", details="eek")
3865 self.failUnless("Thank you for your report!" in res, res)
3866 d.addCallback(_done)
3869 def test_static(self):
3870 webdir = os.path.join(self.staticdir, "subdir")
3871 fileutil.make_dirs(webdir)
3872 f = open(os.path.join(webdir, "hello.txt"), "wb")
3876 d = self.GET("/static/subdir/hello.txt")
3878 self.failUnlessReallyEqual(res, "hello")
3879 d.addCallback(_check)
3883 class Util(ShouldFailMixin, testutil.ReallyEqualMixin, unittest.TestCase):
3884 def test_load_file(self):
3885 # This will raise an exception unless a well-formed XML file is found under that name.
3886 common.getxmlfile('directory.xhtml').load()
3888 def test_parse_replace_arg(self):
3889 self.failUnlessReallyEqual(common.parse_replace_arg("true"), True)
3890 self.failUnlessReallyEqual(common.parse_replace_arg("false"), False)
3891 self.failUnlessReallyEqual(common.parse_replace_arg("only-files"),
3893 self.shouldFail(AssertionError, "test_parse_replace_arg", "",
3894 common.parse_replace_arg, "only_fles")
3896 def test_abbreviate_time(self):
3897 self.failUnlessReallyEqual(common.abbreviate_time(None), "")
3898 self.failUnlessReallyEqual(common.abbreviate_time(1.234), "1.23s")
3899 self.failUnlessReallyEqual(common.abbreviate_time(0.123), "123ms")
3900 self.failUnlessReallyEqual(common.abbreviate_time(0.00123), "1.2ms")
3901 self.failUnlessReallyEqual(common.abbreviate_time(0.000123), "123us")
3902 self.failUnlessReallyEqual(common.abbreviate_time(-123000), "-123000000000us")
3904 def test_compute_rate(self):
3905 self.failUnlessReallyEqual(common.compute_rate(None, None), None)
3906 self.failUnlessReallyEqual(common.compute_rate(None, 1), None)
3907 self.failUnlessReallyEqual(common.compute_rate(250000, None), None)
3908 self.failUnlessReallyEqual(common.compute_rate(250000, 0), None)
3909 self.failUnlessReallyEqual(common.compute_rate(250000, 10), 25000.0)
3910 self.failUnlessReallyEqual(common.compute_rate(0, 10), 0.0)
3911 self.shouldFail(AssertionError, "test_compute_rate", "",
3912 common.compute_rate, -100, 10)
3913 self.shouldFail(AssertionError, "test_compute_rate", "",
3914 common.compute_rate, 100, -10)
3917 rate = common.compute_rate(10*1000*1000, 1)
3918 self.failUnlessReallyEqual(common.abbreviate_rate(rate), "10.00MBps")
3920 def test_abbreviate_rate(self):
3921 self.failUnlessReallyEqual(common.abbreviate_rate(None), "")
3922 self.failUnlessReallyEqual(common.abbreviate_rate(1234000), "1.23MBps")
3923 self.failUnlessReallyEqual(common.abbreviate_rate(12340), "12.3kBps")
3924 self.failUnlessReallyEqual(common.abbreviate_rate(123), "123Bps")
3926 def test_abbreviate_size(self):
3927 self.failUnlessReallyEqual(common.abbreviate_size(None), "")
3928 self.failUnlessReallyEqual(common.abbreviate_size(1.23*1000*1000*1000), "1.23GB")
3929 self.failUnlessReallyEqual(common.abbreviate_size(1.23*1000*1000), "1.23MB")
3930 self.failUnlessReallyEqual(common.abbreviate_size(1230), "1.2kB")
3931 self.failUnlessReallyEqual(common.abbreviate_size(123), "123B")
3933 def test_plural(self):
3935 return "%d second%s" % (s, status.plural(s))
3936 self.failUnlessReallyEqual(convert(0), "0 seconds")
3937 self.failUnlessReallyEqual(convert(1), "1 second")
3938 self.failUnlessReallyEqual(convert(2), "2 seconds")
3940 return "has share%s: %s" % (status.plural(s), ",".join(s))
3941 self.failUnlessReallyEqual(convert2([]), "has shares: ")
3942 self.failUnlessReallyEqual(convert2(["1"]), "has share: 1")
3943 self.failUnlessReallyEqual(convert2(["1","2"]), "has shares: 1,2")
3946 class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMixin, unittest.TestCase):
3948 def CHECK(self, ign, which, args, clientnum=0):
3949 fileurl = self.fileurls[which]
3950 url = fileurl + "?" + args
3951 return self.GET(url, method="POST", clientnum=clientnum)
3953 def test_filecheck(self):
3954 self.basedir = "web/Grid/filecheck"
3956 c0 = self.g.clients[0]
3959 d = c0.upload(upload.Data(DATA, convergence=""))
3960 def _stash_uri(ur, which):
3961 self.uris[which] = ur.uri
3962 d.addCallback(_stash_uri, "good")
3963 d.addCallback(lambda ign:
3964 c0.upload(upload.Data(DATA+"1", convergence="")))
3965 d.addCallback(_stash_uri, "sick")
3966 d.addCallback(lambda ign:
3967 c0.upload(upload.Data(DATA+"2", convergence="")))
3968 d.addCallback(_stash_uri, "dead")
3969 def _stash_mutable_uri(n, which):
3970 self.uris[which] = n.get_uri()
3971 assert isinstance(self.uris[which], str)
3972 d.addCallback(lambda ign:
3973 c0.create_mutable_file(publish.MutableData(DATA+"3")))
3974 d.addCallback(_stash_mutable_uri, "corrupt")
3975 d.addCallback(lambda ign:
3976 c0.upload(upload.Data("literal", convergence="")))
3977 d.addCallback(_stash_uri, "small")
3978 d.addCallback(lambda ign: c0.create_immutable_dirnode({}))
3979 d.addCallback(_stash_mutable_uri, "smalldir")
3981 def _compute_fileurls(ignored):
3983 for which in self.uris:
3984 self.fileurls[which] = "uri/" + urllib.quote(self.uris[which])
3985 d.addCallback(_compute_fileurls)
3987 def _clobber_shares(ignored):
3988 good_shares = self.find_uri_shares(self.uris["good"])
3989 self.failUnlessReallyEqual(len(good_shares), 10)
3990 sick_shares = self.find_uri_shares(self.uris["sick"])
3991 os.unlink(sick_shares[0][2])
3992 dead_shares = self.find_uri_shares(self.uris["dead"])
3993 for i in range(1, 10):
3994 os.unlink(dead_shares[i][2])
3995 c_shares = self.find_uri_shares(self.uris["corrupt"])
3996 cso = CorruptShareOptions()
3997 cso.stdout = StringIO()
3998 cso.parseOptions([c_shares[0][2]])
4000 d.addCallback(_clobber_shares)
4002 d.addCallback(self.CHECK, "good", "t=check")
4003 def _got_html_good(res):
4004 self.failUnless("Healthy" in res, res)
4005 self.failIf("Not Healthy" in res, res)
4006 d.addCallback(_got_html_good)
4007 d.addCallback(self.CHECK, "good", "t=check&return_to=somewhere")
4008 def _got_html_good_return_to(res):
4009 self.failUnless("Healthy" in res, res)
4010 self.failIf("Not Healthy" in res, res)
4011 self.failUnless('<a href="somewhere">Return to file'
4013 d.addCallback(_got_html_good_return_to)
4014 d.addCallback(self.CHECK, "good", "t=check&output=json")
4015 def _got_json_good(res):
4016 r = simplejson.loads(res)
4017 self.failUnlessEqual(r["summary"], "Healthy")
4018 self.failUnless(r["results"]["healthy"])
4019 self.failIf(r["results"]["needs-rebalancing"])
4020 self.failUnless(r["results"]["recoverable"])
4021 d.addCallback(_got_json_good)
4023 d.addCallback(self.CHECK, "small", "t=check")
4024 def _got_html_small(res):
4025 self.failUnless("Literal files are always healthy" in res, res)
4026 self.failIf("Not Healthy" in res, res)
4027 d.addCallback(_got_html_small)
4028 d.addCallback(self.CHECK, "small", "t=check&return_to=somewhere")
4029 def _got_html_small_return_to(res):
4030 self.failUnless("Literal files are always healthy" in res, res)
4031 self.failIf("Not Healthy" in res, res)
4032 self.failUnless('<a href="somewhere">Return to file'
4034 d.addCallback(_got_html_small_return_to)
4035 d.addCallback(self.CHECK, "small", "t=check&output=json")
4036 def _got_json_small(res):
4037 r = simplejson.loads(res)
4038 self.failUnlessEqual(r["storage-index"], "")
4039 self.failUnless(r["results"]["healthy"])
4040 d.addCallback(_got_json_small)
4042 d.addCallback(self.CHECK, "smalldir", "t=check")
4043 def _got_html_smalldir(res):
4044 self.failUnless("Literal files are always healthy" in res, res)
4045 self.failIf("Not Healthy" in res, res)
4046 d.addCallback(_got_html_smalldir)
4047 d.addCallback(self.CHECK, "smalldir", "t=check&output=json")
4048 def _got_json_smalldir(res):
4049 r = simplejson.loads(res)
4050 self.failUnlessEqual(r["storage-index"], "")
4051 self.failUnless(r["results"]["healthy"])
4052 d.addCallback(_got_json_smalldir)
4054 d.addCallback(self.CHECK, "sick", "t=check")
4055 def _got_html_sick(res):
4056 self.failUnless("Not Healthy" in res, res)
4057 d.addCallback(_got_html_sick)
4058 d.addCallback(self.CHECK, "sick", "t=check&output=json")
4059 def _got_json_sick(res):
4060 r = simplejson.loads(res)
4061 self.failUnlessEqual(r["summary"],
4062 "Not Healthy: 9 shares (enc 3-of-10)")
4063 self.failIf(r["results"]["healthy"])
4064 self.failIf(r["results"]["needs-rebalancing"])
4065 self.failUnless(r["results"]["recoverable"])
4066 d.addCallback(_got_json_sick)
4068 d.addCallback(self.CHECK, "dead", "t=check")
4069 def _got_html_dead(res):
4070 self.failUnless("Not Healthy" in res, res)
4071 d.addCallback(_got_html_dead)
4072 d.addCallback(self.CHECK, "dead", "t=check&output=json")
4073 def _got_json_dead(res):
4074 r = simplejson.loads(res)
4075 self.failUnlessEqual(r["summary"],
4076 "Not Healthy: 1 shares (enc 3-of-10)")
4077 self.failIf(r["results"]["healthy"])
4078 self.failIf(r["results"]["needs-rebalancing"])
4079 self.failIf(r["results"]["recoverable"])
4080 d.addCallback(_got_json_dead)
4082 d.addCallback(self.CHECK, "corrupt", "t=check&verify=true")
4083 def _got_html_corrupt(res):
4084 self.failUnless("Not Healthy! : Unhealthy" in res, res)
4085 d.addCallback(_got_html_corrupt)
4086 d.addCallback(self.CHECK, "corrupt", "t=check&verify=true&output=json")
4087 def _got_json_corrupt(res):
4088 r = simplejson.loads(res)
4089 self.failUnless("Unhealthy: 9 shares (enc 3-of-10)" in r["summary"],
4091 self.failIf(r["results"]["healthy"])
4092 self.failUnless(r["results"]["recoverable"])
4093 self.failUnlessReallyEqual(r["results"]["count-shares-good"], 9)
4094 self.failUnlessReallyEqual(r["results"]["count-corrupt-shares"], 1)
4095 d.addCallback(_got_json_corrupt)
4097 d.addErrback(self.explain_web_error)
4100 def test_repair_html(self):
4101 self.basedir = "web/Grid/repair_html"
4103 c0 = self.g.clients[0]
4106 d = c0.upload(upload.Data(DATA, convergence=""))
4107 def _stash_uri(ur, which):
4108 self.uris[which] = ur.uri
4109 d.addCallback(_stash_uri, "good")
4110 d.addCallback(lambda ign:
4111 c0.upload(upload.Data(DATA+"1", convergence="")))
4112 d.addCallback(_stash_uri, "sick")
4113 d.addCallback(lambda ign:
4114 c0.upload(upload.Data(DATA+"2", convergence="")))
4115 d.addCallback(_stash_uri, "dead")
4116 def _stash_mutable_uri(n, which):
4117 self.uris[which] = n.get_uri()
4118 assert isinstance(self.uris[which], str)
4119 d.addCallback(lambda ign:
4120 c0.create_mutable_file(publish.MutableData(DATA+"3")))
4121 d.addCallback(_stash_mutable_uri, "corrupt")
4123 def _compute_fileurls(ignored):
4125 for which in self.uris:
4126 self.fileurls[which] = "uri/" + urllib.quote(self.uris[which])
4127 d.addCallback(_compute_fileurls)
4129 def _clobber_shares(ignored):
4130 good_shares = self.find_uri_shares(self.uris["good"])
4131 self.failUnlessReallyEqual(len(good_shares), 10)
4132 sick_shares = self.find_uri_shares(self.uris["sick"])
4133 os.unlink(sick_shares[0][2])
4134 dead_shares = self.find_uri_shares(self.uris["dead"])
4135 for i in range(1, 10):
4136 os.unlink(dead_shares[i][2])
4137 c_shares = self.find_uri_shares(self.uris["corrupt"])
4138 cso = CorruptShareOptions()
4139 cso.stdout = StringIO()
4140 cso.parseOptions([c_shares[0][2]])
4142 d.addCallback(_clobber_shares)
4144 d.addCallback(self.CHECK, "good", "t=check&repair=true")
4145 def _got_html_good(res):
4146 self.failUnless("Healthy" in res, res)
4147 self.failIf("Not Healthy" in res, res)
4148 self.failUnless("No repair necessary" in res, res)
4149 d.addCallback(_got_html_good)
4151 d.addCallback(self.CHECK, "sick", "t=check&repair=true")
4152 def _got_html_sick(res):
4153 self.failUnless("Healthy : healthy" in res, res)
4154 self.failIf("Not Healthy" in res, res)
4155 self.failUnless("Repair successful" in res, res)
4156 d.addCallback(_got_html_sick)
4158 # repair of a dead file will fail, of course, but it isn't yet
4159 # clear how this should be reported. Right now it shows up as
4162 #d.addCallback(self.CHECK, "dead", "t=check&repair=true")
4163 #def _got_html_dead(res):
4165 # self.failUnless("Healthy : healthy" in res, res)
4166 # self.failIf("Not Healthy" in res, res)
4167 # self.failUnless("No repair necessary" in res, res)
4168 #d.addCallback(_got_html_dead)
4170 d.addCallback(self.CHECK, "corrupt", "t=check&verify=true&repair=true")
4171 def _got_html_corrupt(res):
4172 self.failUnless("Healthy : Healthy" in res, res)
4173 self.failIf("Not Healthy" in res, res)
4174 self.failUnless("Repair successful" in res, res)
4175 d.addCallback(_got_html_corrupt)
4177 d.addErrback(self.explain_web_error)
4180 def test_repair_json(self):
4181 self.basedir = "web/Grid/repair_json"
4183 c0 = self.g.clients[0]
4186 d = c0.upload(upload.Data(DATA+"1", convergence=""))
4187 def _stash_uri(ur, which):
4188 self.uris[which] = ur.uri
4189 d.addCallback(_stash_uri, "sick")
4191 def _compute_fileurls(ignored):
4193 for which in self.uris:
4194 self.fileurls[which] = "uri/" + urllib.quote(self.uris[which])
4195 d.addCallback(_compute_fileurls)
4197 def _clobber_shares(ignored):
4198 sick_shares = self.find_uri_shares(self.uris["sick"])
4199 os.unlink(sick_shares[0][2])
4200 d.addCallback(_clobber_shares)
4202 d.addCallback(self.CHECK, "sick", "t=check&repair=true&output=json")
4203 def _got_json_sick(res):
4204 r = simplejson.loads(res)
4205 self.failUnlessReallyEqual(r["repair-attempted"], True)
4206 self.failUnlessReallyEqual(r["repair-successful"], True)
4207 self.failUnlessEqual(r["pre-repair-results"]["summary"],
4208 "Not Healthy: 9 shares (enc 3-of-10)")
4209 self.failIf(r["pre-repair-results"]["results"]["healthy"])
4210 self.failUnlessEqual(r["post-repair-results"]["summary"], "healthy")
4211 self.failUnless(r["post-repair-results"]["results"]["healthy"])
4212 d.addCallback(_got_json_sick)
4214 d.addErrback(self.explain_web_error)
4217 def test_unknown(self, immutable=False):
4218 self.basedir = "web/Grid/unknown"
4220 self.basedir = "web/Grid/unknown-immutable"
4223 c0 = self.g.clients[0]
4227 # the future cap format may contain slashes, which must be tolerated
4228 expected_info_url = "uri/%s?t=info" % urllib.quote(unknown_rwcap,
4232 name = u"future-imm"
4233 future_node = UnknownNode(None, unknown_immcap, deep_immutable=True)
4234 d = c0.create_immutable_dirnode({name: (future_node, {})})
4237 future_node = UnknownNode(unknown_rwcap, unknown_rocap)
4238 d = c0.create_dirnode()
4240 def _stash_root_and_create_file(n):
4242 self.rooturl = "uri/" + urllib.quote(n.get_uri()) + "/"
4243 self.rourl = "uri/" + urllib.quote(n.get_readonly_uri()) + "/"
4245 return self.rootnode.set_node(name, future_node)
4246 d.addCallback(_stash_root_and_create_file)
4248 # make sure directory listing tolerates unknown nodes
4249 d.addCallback(lambda ign: self.GET(self.rooturl))
4250 def _check_directory_html(res, expected_type_suffix):
4251 pattern = re.compile(r'<td>\?%s</td>[ \t\n\r]*'
4252 '<td>%s</td>' % (expected_type_suffix, str(name)),
4254 self.failUnless(re.search(pattern, res), res)
4255 # find the More Info link for name, should be relative
4256 mo = re.search(r'<a href="([^"]+)">More Info</a>', res)
4257 info_url = mo.group(1)
4258 self.failUnlessReallyEqual(info_url, "%s?t=info" % (str(name),))
4260 d.addCallback(_check_directory_html, "-IMM")
4262 d.addCallback(_check_directory_html, "")
4264 d.addCallback(lambda ign: self.GET(self.rooturl+"?t=json"))
4265 def _check_directory_json(res, expect_rw_uri):
4266 data = simplejson.loads(res)
4267 self.failUnlessEqual(data[0], "dirnode")
4268 f = data[1]["children"][name]
4269 self.failUnlessEqual(f[0], "unknown")
4271 self.failUnlessReallyEqual(to_str(f[1]["rw_uri"]), unknown_rwcap, data)
4273 self.failIfIn("rw_uri", f[1])
4275 self.failUnlessReallyEqual(to_str(f[1]["ro_uri"]), unknown_immcap, data)
4277 self.failUnlessReallyEqual(to_str(f[1]["ro_uri"]), unknown_rocap, data)
4278 self.failUnless("metadata" in f[1])
4279 d.addCallback(_check_directory_json, expect_rw_uri=not immutable)
4281 def _check_info(res, expect_rw_uri, expect_ro_uri):
4282 self.failUnlessIn("Object Type: <span>unknown</span>", res)
4284 self.failUnlessIn(unknown_rwcap, res)
4287 self.failUnlessIn(unknown_immcap, res)
4289 self.failUnlessIn(unknown_rocap, res)
4291 self.failIfIn(unknown_rocap, res)
4292 self.failIfIn("Raw data as", res)
4293 self.failIfIn("Directory writecap", res)
4294 self.failIfIn("Checker Operations", res)
4295 self.failIfIn("Mutable File Operations", res)
4296 self.failIfIn("Directory Operations", res)
4298 # FIXME: these should have expect_rw_uri=not immutable; I don't know
4299 # why they fail. Possibly related to ticket #922.
4301 d.addCallback(lambda ign: self.GET(expected_info_url))
4302 d.addCallback(_check_info, expect_rw_uri=False, expect_ro_uri=False)
4303 d.addCallback(lambda ign: self.GET("%s%s?t=info" % (self.rooturl, str(name))))
4304 d.addCallback(_check_info, expect_rw_uri=False, expect_ro_uri=True)
4306 def _check_json(res, expect_rw_uri):
4307 data = simplejson.loads(res)
4308 self.failUnlessEqual(data[0], "unknown")
4310 self.failUnlessReallyEqual(to_str(data[1]["rw_uri"]), unknown_rwcap, data)
4312 self.failIfIn("rw_uri", data[1])
4315 self.failUnlessReallyEqual(to_str(data[1]["ro_uri"]), unknown_immcap, data)
4316 self.failUnlessReallyEqual(data[1]["mutable"], False)
4318 self.failUnlessReallyEqual(to_str(data[1]["ro_uri"]), unknown_rocap, data)
4319 self.failUnlessReallyEqual(data[1]["mutable"], True)
4321 self.failUnlessReallyEqual(to_str(data[1]["ro_uri"]), unknown_rocap, data)
4322 self.failIf("mutable" in data[1], data[1])
4324 # TODO: check metadata contents
4325 self.failUnless("metadata" in data[1])
4327 d.addCallback(lambda ign: self.GET("%s%s?t=json" % (self.rooturl, str(name))))
4328 d.addCallback(_check_json, expect_rw_uri=not immutable)
4330 # and make sure that a read-only version of the directory can be
4331 # rendered too. This version will not have unknown_rwcap, whether
4332 # or not future_node was immutable.
4333 d.addCallback(lambda ign: self.GET(self.rourl))
4335 d.addCallback(_check_directory_html, "-IMM")
4337 d.addCallback(_check_directory_html, "-RO")
4339 d.addCallback(lambda ign: self.GET(self.rourl+"?t=json"))
4340 d.addCallback(_check_directory_json, expect_rw_uri=False)
4342 d.addCallback(lambda ign: self.GET("%s%s?t=json" % (self.rourl, str(name))))
4343 d.addCallback(_check_json, expect_rw_uri=False)
4345 # TODO: check that getting t=info from the Info link in the ro directory
4346 # works, and does not include the writecap URI.
4349 def test_immutable_unknown(self):
4350 return self.test_unknown(immutable=True)
4352 def test_mutant_dirnodes_are_omitted(self):
4353 self.basedir = "web/Grid/mutant_dirnodes_are_omitted"
4356 c = self.g.clients[0]
4361 lonely_uri = "URI:LIT:n5xgk" # LIT for "one"
4362 mut_write_uri = "URI:SSK:vfvcbdfbszyrsaxchgevhmmlii:euw4iw7bbnkrrwpzuburbhppuxhc3gwxv26f6imekhz7zyw2ojnq"
4363 mut_read_uri = "URI:SSK-RO:e3mdrzfwhoq42hy5ubcz6rp3o4:ybyibhnp3vvwuq2vaw2ckjmesgkklfs6ghxleztqidihjyofgw7q"
4365 # This method tests mainly dirnode, but we'd have to duplicate code in order to
4366 # test the dirnode and web layers separately.
4368 # 'lonely' is a valid LIT child, 'ro' is a mutant child with an SSK-RO readcap,
4369 # and 'write-in-ro' is a mutant child with an SSK writecap in the ro_uri field.
4370 # When the directory is read, the mutants should be silently disposed of, leaving
4371 # their lonely sibling.
4372 # We don't test the case of a retrieving a cap from the encrypted rw_uri field,
4373 # because immutable directories don't have a writecap and therefore that field
4374 # isn't (and can't be) decrypted.
4375 # TODO: The field still exists in the netstring. Technically we should check what
4376 # happens if something is put there (_unpack_contents should raise ValueError),
4377 # but that can wait.
4379 lonely_child = nm.create_from_cap(lonely_uri)
4380 mutant_ro_child = nm.create_from_cap(mut_read_uri)
4381 mutant_write_in_ro_child = nm.create_from_cap(mut_write_uri)
4383 def _by_hook_or_by_crook():
4385 for n in [mutant_ro_child, mutant_write_in_ro_child]:
4386 n.is_allowed_in_immutable_directory = _by_hook_or_by_crook
4388 mutant_write_in_ro_child.get_write_uri = lambda: None
4389 mutant_write_in_ro_child.get_readonly_uri = lambda: mut_write_uri
4391 kids = {u"lonely": (lonely_child, {}),
4392 u"ro": (mutant_ro_child, {}),
4393 u"write-in-ro": (mutant_write_in_ro_child, {}),
4395 d = c.create_immutable_dirnode(kids)
4398 self.failUnless(isinstance(dn, dirnode.DirectoryNode))
4399 self.failIf(dn.is_mutable())
4400 self.failUnless(dn.is_readonly())
4401 # This checks that if we somehow ended up calling dn._decrypt_rwcapdata, it would fail.
4402 self.failIf(hasattr(dn._node, 'get_writekey'))
4404 self.failUnless("RO-IMM" in rep)
4406 self.failUnlessIn("CHK", cap.to_string())
4409 self.rooturl = "uri/" + urllib.quote(dn.get_uri()) + "/"
4410 return download_to_data(dn._node)
4411 d.addCallback(_created)
4413 def _check_data(data):
4414 # Decode the netstring representation of the directory to check that all children
4415 # are present. This is a bit of an abstraction violation, but there's not really
4416 # any other way to do it given that the real DirectoryNode._unpack_contents would
4417 # strip the mutant children out (which is what we're trying to test, later).
4420 while position < len(data):
4421 entries, position = split_netstring(data, 1, position)
4423 (name_utf8, ro_uri, rwcapdata, metadata_s), subpos = split_netstring(entry, 4)
4424 name = name_utf8.decode("utf-8")
4425 self.failUnless(rwcapdata == "")
4426 self.failUnless(name in kids)
4427 (expected_child, ign) = kids[name]
4428 self.failUnlessReallyEqual(ro_uri, expected_child.get_readonly_uri())
4431 self.failUnlessReallyEqual(numkids, 3)
4432 return self.rootnode.list()
4433 d.addCallback(_check_data)
4435 # Now when we use the real directory listing code, the mutants should be absent.
4436 def _check_kids(children):
4437 self.failUnlessReallyEqual(sorted(children.keys()), [u"lonely"])
4438 lonely_node, lonely_metadata = children[u"lonely"]
4440 self.failUnlessReallyEqual(lonely_node.get_write_uri(), None)
4441 self.failUnlessReallyEqual(lonely_node.get_readonly_uri(), lonely_uri)
4442 d.addCallback(_check_kids)
4444 d.addCallback(lambda ign: nm.create_from_cap(self.cap.to_string()))
4445 d.addCallback(lambda n: n.list())
4446 d.addCallback(_check_kids) # again with dirnode recreated from cap
4448 # Make sure the lonely child can be listed in HTML...
4449 d.addCallback(lambda ign: self.GET(self.rooturl))
4450 def _check_html(res):
4451 self.failIfIn("URI:SSK", res)
4452 get_lonely = "".join([r'<td>FILE</td>',
4454 r'<a href="[^"]+%s[^"]+">lonely</a>' % (urllib.quote(lonely_uri),),
4456 r'\s+<td align="right">%d</td>' % len("one"),
4458 self.failUnless(re.search(get_lonely, res), res)
4460 # find the More Info link for name, should be relative
4461 mo = re.search(r'<a href="([^"]+)">More Info</a>', res)
4462 info_url = mo.group(1)
4463 self.failUnless(info_url.endswith(urllib.quote(lonely_uri) + "?t=info"), info_url)
4464 d.addCallback(_check_html)
4467 d.addCallback(lambda ign: self.GET(self.rooturl+"?t=json"))
4468 def _check_json(res):
4469 data = simplejson.loads(res)
4470 self.failUnlessEqual(data[0], "dirnode")
4471 listed_children = data[1]["children"]
4472 self.failUnlessReallyEqual(sorted(listed_children.keys()), [u"lonely"])
4473 ll_type, ll_data = listed_children[u"lonely"]
4474 self.failUnlessEqual(ll_type, "filenode")
4475 self.failIf("rw_uri" in ll_data)
4476 self.failUnlessReallyEqual(to_str(ll_data["ro_uri"]), lonely_uri)
4477 d.addCallback(_check_json)
4480 def test_deep_check(self):
4481 self.basedir = "web/Grid/deep_check"
4483 c0 = self.g.clients[0]
4487 d = c0.create_dirnode()
4488 def _stash_root_and_create_file(n):
4490 self.fileurls["root"] = "uri/" + urllib.quote(n.get_uri()) + "/"
4491 return n.add_file(u"good", upload.Data(DATA, convergence=""))
4492 d.addCallback(_stash_root_and_create_file)
4493 def _stash_uri(fn, which):
4494 self.uris[which] = fn.get_uri()
4496 d.addCallback(_stash_uri, "good")
4497 d.addCallback(lambda ign:
4498 self.rootnode.add_file(u"small",
4499 upload.Data("literal",
4501 d.addCallback(_stash_uri, "small")
4502 d.addCallback(lambda ign:
4503 self.rootnode.add_file(u"sick",
4504 upload.Data(DATA+"1",
4506 d.addCallback(_stash_uri, "sick")
4508 # this tests that deep-check and stream-manifest will ignore
4509 # UnknownNode instances. Hopefully this will also cover deep-stats.
4510 future_node = UnknownNode(unknown_rwcap, unknown_rocap)
4511 d.addCallback(lambda ign: self.rootnode.set_node(u"future", future_node))
4513 def _clobber_shares(ignored):
4514 self.delete_shares_numbered(self.uris["sick"], [0,1])
4515 d.addCallback(_clobber_shares)
4523 d.addCallback(self.CHECK, "root", "t=stream-deep-check")
4526 units = [simplejson.loads(line)
4527 for line in res.splitlines()
4530 print "response is:", res
4531 print "undecodeable line was '%s'" % line
4533 self.failUnlessReallyEqual(len(units), 5+1)
4534 # should be parent-first
4536 self.failUnlessEqual(u0["path"], [])
4537 self.failUnlessEqual(u0["type"], "directory")
4538 self.failUnlessReallyEqual(to_str(u0["cap"]), self.rootnode.get_uri())
4539 u0cr = u0["check-results"]
4540 self.failUnlessReallyEqual(u0cr["results"]["count-shares-good"], 10)
4542 ugood = [u for u in units
4543 if u["type"] == "file" and u["path"] == [u"good"]][0]
4544 self.failUnlessReallyEqual(to_str(ugood["cap"]), self.uris["good"])
4545 ugoodcr = ugood["check-results"]
4546 self.failUnlessReallyEqual(ugoodcr["results"]["count-shares-good"], 10)
4549 self.failUnlessEqual(stats["type"], "stats")
4551 self.failUnlessReallyEqual(s["count-immutable-files"], 2)
4552 self.failUnlessReallyEqual(s["count-literal-files"], 1)
4553 self.failUnlessReallyEqual(s["count-directories"], 1)
4554 self.failUnlessReallyEqual(s["count-unknown"], 1)
4555 d.addCallback(_done)
4557 d.addCallback(self.CHECK, "root", "t=stream-manifest")
4558 def _check_manifest(res):
4559 self.failUnless(res.endswith("\n"))
4560 units = [simplejson.loads(t) for t in res[:-1].split("\n")]
4561 self.failUnlessReallyEqual(len(units), 5+1)
4562 self.failUnlessEqual(units[-1]["type"], "stats")
4564 self.failUnlessEqual(first["path"], [])
4565 self.failUnlessEqual(to_str(first["cap"]), self.rootnode.get_uri())
4566 self.failUnlessEqual(first["type"], "directory")
4567 stats = units[-1]["stats"]
4568 self.failUnlessReallyEqual(stats["count-immutable-files"], 2)
4569 self.failUnlessReallyEqual(stats["count-literal-files"], 1)
4570 self.failUnlessReallyEqual(stats["count-mutable-files"], 0)
4571 self.failUnlessReallyEqual(stats["count-immutable-files"], 2)
4572 self.failUnlessReallyEqual(stats["count-unknown"], 1)
4573 d.addCallback(_check_manifest)
4575 # now add root/subdir and root/subdir/grandchild, then make subdir
4576 # unrecoverable, then see what happens
4578 d.addCallback(lambda ign:
4579 self.rootnode.create_subdirectory(u"subdir"))
4580 d.addCallback(_stash_uri, "subdir")
4581 d.addCallback(lambda subdir_node:
4582 subdir_node.add_file(u"grandchild",
4583 upload.Data(DATA+"2",
4585 d.addCallback(_stash_uri, "grandchild")
4587 d.addCallback(lambda ign:
4588 self.delete_shares_numbered(self.uris["subdir"],
4596 # root/subdir [unrecoverable]
4597 # root/subdir/grandchild
4599 # how should a streaming-JSON API indicate fatal error?
4600 # answer: emit ERROR: instead of a JSON string
4602 d.addCallback(self.CHECK, "root", "t=stream-manifest")
4603 def _check_broken_manifest(res):
4604 lines = res.splitlines()
4606 for (i,line) in enumerate(lines)
4607 if line.startswith("ERROR:")]
4609 self.fail("no ERROR: in output: %s" % (res,))
4610 first_error = error_lines[0]
4611 error_line = lines[first_error]
4612 error_msg = lines[first_error+1:]
4613 error_msg_s = "\n".join(error_msg) + "\n"
4614 self.failUnlessIn("ERROR: UnrecoverableFileError(no recoverable versions)",
4616 self.failUnless(len(error_msg) > 2, error_msg_s) # some traceback
4617 units = [simplejson.loads(line) for line in lines[:first_error]]
4618 self.failUnlessReallyEqual(len(units), 6) # includes subdir
4619 last_unit = units[-1]
4620 self.failUnlessEqual(last_unit["path"], ["subdir"])
4621 d.addCallback(_check_broken_manifest)
4623 d.addCallback(self.CHECK, "root", "t=stream-deep-check")
4624 def _check_broken_deepcheck(res):
4625 lines = res.splitlines()
4627 for (i,line) in enumerate(lines)
4628 if line.startswith("ERROR:")]
4630 self.fail("no ERROR: in output: %s" % (res,))
4631 first_error = error_lines[0]
4632 error_line = lines[first_error]
4633 error_msg = lines[first_error+1:]
4634 error_msg_s = "\n".join(error_msg) + "\n"
4635 self.failUnlessIn("ERROR: UnrecoverableFileError(no recoverable versions)",
4637 self.failUnless(len(error_msg) > 2, error_msg_s) # some traceback
4638 units = [simplejson.loads(line) for line in lines[:first_error]]
4639 self.failUnlessReallyEqual(len(units), 6) # includes subdir
4640 last_unit = units[-1]
4641 self.failUnlessEqual(last_unit["path"], ["subdir"])
4642 r = last_unit["check-results"]["results"]
4643 self.failUnlessReallyEqual(r["count-recoverable-versions"], 0)
4644 self.failUnlessReallyEqual(r["count-shares-good"], 1)
4645 self.failUnlessReallyEqual(r["recoverable"], False)
4646 d.addCallback(_check_broken_deepcheck)
4648 d.addErrback(self.explain_web_error)
4651 def test_deep_check_and_repair(self):
4652 self.basedir = "web/Grid/deep_check_and_repair"
4654 c0 = self.g.clients[0]
4658 d = c0.create_dirnode()
4659 def _stash_root_and_create_file(n):
4661 self.fileurls["root"] = "uri/" + urllib.quote(n.get_uri()) + "/"
4662 return n.add_file(u"good", upload.Data(DATA, convergence=""))
4663 d.addCallback(_stash_root_and_create_file)
4664 def _stash_uri(fn, which):
4665 self.uris[which] = fn.get_uri()
4666 d.addCallback(_stash_uri, "good")
4667 d.addCallback(lambda ign:
4668 self.rootnode.add_file(u"small",
4669 upload.Data("literal",
4671 d.addCallback(_stash_uri, "small")
4672 d.addCallback(lambda ign:
4673 self.rootnode.add_file(u"sick",
4674 upload.Data(DATA+"1",
4676 d.addCallback(_stash_uri, "sick")
4677 #d.addCallback(lambda ign:
4678 # self.rootnode.add_file(u"dead",
4679 # upload.Data(DATA+"2",
4681 #d.addCallback(_stash_uri, "dead")
4683 #d.addCallback(lambda ign: c0.create_mutable_file("mutable"))
4684 #d.addCallback(lambda fn: self.rootnode.set_node(u"corrupt", fn))
4685 #d.addCallback(_stash_uri, "corrupt")
4687 def _clobber_shares(ignored):
4688 good_shares = self.find_uri_shares(self.uris["good"])
4689 self.failUnlessReallyEqual(len(good_shares), 10)
4690 sick_shares = self.find_uri_shares(self.uris["sick"])
4691 os.unlink(sick_shares[0][2])
4692 #dead_shares = self.find_uri_shares(self.uris["dead"])
4693 #for i in range(1, 10):
4694 # os.unlink(dead_shares[i][2])
4696 #c_shares = self.find_uri_shares(self.uris["corrupt"])
4697 #cso = CorruptShareOptions()
4698 #cso.stdout = StringIO()
4699 #cso.parseOptions([c_shares[0][2]])
4701 d.addCallback(_clobber_shares)
4704 # root/good CHK, 10 shares
4706 # root/sick CHK, 9 shares
4708 d.addCallback(self.CHECK, "root", "t=stream-deep-check&repair=true")
4710 units = [simplejson.loads(line)
4711 for line in res.splitlines()
4713 self.failUnlessReallyEqual(len(units), 4+1)
4714 # should be parent-first
4716 self.failUnlessEqual(u0["path"], [])
4717 self.failUnlessEqual(u0["type"], "directory")
4718 self.failUnlessReallyEqual(to_str(u0["cap"]), self.rootnode.get_uri())
4719 u0crr = u0["check-and-repair-results"]
4720 self.failUnlessReallyEqual(u0crr["repair-attempted"], False)
4721 self.failUnlessReallyEqual(u0crr["pre-repair-results"]["results"]["count-shares-good"], 10)
4723 ugood = [u for u in units
4724 if u["type"] == "file" and u["path"] == [u"good"]][0]
4725 self.failUnlessEqual(to_str(ugood["cap"]), self.uris["good"])
4726 ugoodcrr = ugood["check-and-repair-results"]
4727 self.failUnlessReallyEqual(ugoodcrr["repair-attempted"], False)
4728 self.failUnlessReallyEqual(ugoodcrr["pre-repair-results"]["results"]["count-shares-good"], 10)
4730 usick = [u for u in units
4731 if u["type"] == "file" and u["path"] == [u"sick"]][0]
4732 self.failUnlessReallyEqual(to_str(usick["cap"]), self.uris["sick"])
4733 usickcrr = usick["check-and-repair-results"]
4734 self.failUnlessReallyEqual(usickcrr["repair-attempted"], True)
4735 self.failUnlessReallyEqual(usickcrr["repair-successful"], True)
4736 self.failUnlessReallyEqual(usickcrr["pre-repair-results"]["results"]["count-shares-good"], 9)
4737 self.failUnlessReallyEqual(usickcrr["post-repair-results"]["results"]["count-shares-good"], 10)
4740 self.failUnlessEqual(stats["type"], "stats")
4742 self.failUnlessReallyEqual(s["count-immutable-files"], 2)
4743 self.failUnlessReallyEqual(s["count-literal-files"], 1)
4744 self.failUnlessReallyEqual(s["count-directories"], 1)
4745 d.addCallback(_done)
4747 d.addErrback(self.explain_web_error)
4750 def _count_leases(self, ignored, which):
4751 u = self.uris[which]
4752 shares = self.find_uri_shares(u)
4754 for shnum, serverid, fn in shares:
4755 sf = get_share_file(fn)
4756 num_leases = len(list(sf.get_leases()))
4757 lease_counts.append( (fn, num_leases) )
4760 def _assert_leasecount(self, lease_counts, expected):
4761 for (fn, num_leases) in lease_counts:
4762 if num_leases != expected:
4763 self.fail("expected %d leases, have %d, on %s" %
4764 (expected, num_leases, fn))
4766 def test_add_lease(self):
4767 self.basedir = "web/Grid/add_lease"
4768 self.set_up_grid(num_clients=2)
4769 c0 = self.g.clients[0]
4772 d = c0.upload(upload.Data(DATA, convergence=""))
4773 def _stash_uri(ur, which):
4774 self.uris[which] = ur.uri
4775 d.addCallback(_stash_uri, "one")
4776 d.addCallback(lambda ign:
4777 c0.upload(upload.Data(DATA+"1", convergence="")))
4778 d.addCallback(_stash_uri, "two")
4779 def _stash_mutable_uri(n, which):
4780 self.uris[which] = n.get_uri()
4781 assert isinstance(self.uris[which], str)
4782 d.addCallback(lambda ign:
4783 c0.create_mutable_file(publish.MutableData(DATA+"2")))
4784 d.addCallback(_stash_mutable_uri, "mutable")
4786 def _compute_fileurls(ignored):
4788 for which in self.uris:
4789 self.fileurls[which] = "uri/" + urllib.quote(self.uris[which])
4790 d.addCallback(_compute_fileurls)
4792 d.addCallback(self._count_leases, "one")
4793 d.addCallback(self._assert_leasecount, 1)
4794 d.addCallback(self._count_leases, "two")
4795 d.addCallback(self._assert_leasecount, 1)
4796 d.addCallback(self._count_leases, "mutable")
4797 d.addCallback(self._assert_leasecount, 1)
4799 d.addCallback(self.CHECK, "one", "t=check") # no add-lease
4800 def _got_html_good(res):
4801 self.failUnless("Healthy" in res, res)
4802 self.failIf("Not Healthy" in res, res)
4803 d.addCallback(_got_html_good)
4805 d.addCallback(self._count_leases, "one")
4806 d.addCallback(self._assert_leasecount, 1)
4807 d.addCallback(self._count_leases, "two")
4808 d.addCallback(self._assert_leasecount, 1)
4809 d.addCallback(self._count_leases, "mutable")
4810 d.addCallback(self._assert_leasecount, 1)
4812 # this CHECK uses the original client, which uses the same
4813 # lease-secrets, so it will just renew the original lease
4814 d.addCallback(self.CHECK, "one", "t=check&add-lease=true")
4815 d.addCallback(_got_html_good)
4817 d.addCallback(self._count_leases, "one")
4818 d.addCallback(self._assert_leasecount, 1)
4819 d.addCallback(self._count_leases, "two")
4820 d.addCallback(self._assert_leasecount, 1)
4821 d.addCallback(self._count_leases, "mutable")
4822 d.addCallback(self._assert_leasecount, 1)
4824 # this CHECK uses an alternate client, which adds a second lease
4825 d.addCallback(self.CHECK, "one", "t=check&add-lease=true", clientnum=1)
4826 d.addCallback(_got_html_good)
4828 d.addCallback(self._count_leases, "one")
4829 d.addCallback(self._assert_leasecount, 2)
4830 d.addCallback(self._count_leases, "two")
4831 d.addCallback(self._assert_leasecount, 1)
4832 d.addCallback(self._count_leases, "mutable")
4833 d.addCallback(self._assert_leasecount, 1)
4835 d.addCallback(self.CHECK, "mutable", "t=check&add-lease=true")
4836 d.addCallback(_got_html_good)
4838 d.addCallback(self._count_leases, "one")
4839 d.addCallback(self._assert_leasecount, 2)
4840 d.addCallback(self._count_leases, "two")
4841 d.addCallback(self._assert_leasecount, 1)
4842 d.addCallback(self._count_leases, "mutable")
4843 d.addCallback(self._assert_leasecount, 1)
4845 d.addCallback(self.CHECK, "mutable", "t=check&add-lease=true",
4847 d.addCallback(_got_html_good)
4849 d.addCallback(self._count_leases, "one")
4850 d.addCallback(self._assert_leasecount, 2)
4851 d.addCallback(self._count_leases, "two")
4852 d.addCallback(self._assert_leasecount, 1)
4853 d.addCallback(self._count_leases, "mutable")
4854 d.addCallback(self._assert_leasecount, 2)
4856 d.addErrback(self.explain_web_error)
4859 def test_deep_add_lease(self):
4860 self.basedir = "web/Grid/deep_add_lease"
4861 self.set_up_grid(num_clients=2)
4862 c0 = self.g.clients[0]
4866 d = c0.create_dirnode()
4867 def _stash_root_and_create_file(n):
4869 self.uris["root"] = n.get_uri()
4870 self.fileurls["root"] = "uri/" + urllib.quote(n.get_uri()) + "/"
4871 return n.add_file(u"one", upload.Data(DATA, convergence=""))
4872 d.addCallback(_stash_root_and_create_file)
4873 def _stash_uri(fn, which):
4874 self.uris[which] = fn.get_uri()
4875 d.addCallback(_stash_uri, "one")
4876 d.addCallback(lambda ign:
4877 self.rootnode.add_file(u"small",
4878 upload.Data("literal",
4880 d.addCallback(_stash_uri, "small")
4882 d.addCallback(lambda ign:
4883 c0.create_mutable_file(publish.MutableData("mutable")))
4884 d.addCallback(lambda fn: self.rootnode.set_node(u"mutable", fn))
4885 d.addCallback(_stash_uri, "mutable")
4887 d.addCallback(self.CHECK, "root", "t=stream-deep-check") # no add-lease
4889 units = [simplejson.loads(line)
4890 for line in res.splitlines()
4892 # root, one, small, mutable, stats
4893 self.failUnlessReallyEqual(len(units), 4+1)
4894 d.addCallback(_done)
4896 d.addCallback(self._count_leases, "root")
4897 d.addCallback(self._assert_leasecount, 1)
4898 d.addCallback(self._count_leases, "one")
4899 d.addCallback(self._assert_leasecount, 1)
4900 d.addCallback(self._count_leases, "mutable")
4901 d.addCallback(self._assert_leasecount, 1)
4903 d.addCallback(self.CHECK, "root", "t=stream-deep-check&add-lease=true")
4904 d.addCallback(_done)
4906 d.addCallback(self._count_leases, "root")
4907 d.addCallback(self._assert_leasecount, 1)
4908 d.addCallback(self._count_leases, "one")
4909 d.addCallback(self._assert_leasecount, 1)
4910 d.addCallback(self._count_leases, "mutable")
4911 d.addCallback(self._assert_leasecount, 1)
4913 d.addCallback(self.CHECK, "root", "t=stream-deep-check&add-lease=true",
4915 d.addCallback(_done)
4917 d.addCallback(self._count_leases, "root")
4918 d.addCallback(self._assert_leasecount, 2)
4919 d.addCallback(self._count_leases, "one")
4920 d.addCallback(self._assert_leasecount, 2)
4921 d.addCallback(self._count_leases, "mutable")
4922 d.addCallback(self._assert_leasecount, 2)
4924 d.addErrback(self.explain_web_error)
4928 def test_exceptions(self):
4929 self.basedir = "web/Grid/exceptions"
4930 self.set_up_grid(num_clients=1, num_servers=2)
4931 c0 = self.g.clients[0]
4932 c0.DEFAULT_ENCODING_PARAMETERS['happy'] = 2
4935 d = c0.create_dirnode()
4937 self.fileurls["root"] = "uri/" + urllib.quote(n.get_uri()) + "/"
4938 self.fileurls["imaginary"] = self.fileurls["root"] + "imaginary"
4940 d.addCallback(_stash_root)
4941 d.addCallback(lambda ign: c0.upload(upload.Data(DATA, convergence="")))
4943 self.fileurls["1share"] = "uri/" + urllib.quote(ur.uri)
4944 self.delete_shares_numbered(ur.uri, range(1,10))
4946 u = uri.from_string(ur.uri)
4947 u.key = testutil.flip_bit(u.key, 0)
4948 baduri = u.to_string()
4949 self.fileurls["0shares"] = "uri/" + urllib.quote(baduri)
4950 d.addCallback(_stash_bad)
4951 d.addCallback(lambda ign: c0.create_dirnode())
4952 def _mangle_dirnode_1share(n):
4954 url = self.fileurls["dir-1share"] = "uri/" + urllib.quote(u) + "/"
4955 self.fileurls["dir-1share-json"] = url + "?t=json"
4956 self.delete_shares_numbered(u, range(1,10))
4957 d.addCallback(_mangle_dirnode_1share)
4958 d.addCallback(lambda ign: c0.create_dirnode())
4959 def _mangle_dirnode_0share(n):
4961 url = self.fileurls["dir-0share"] = "uri/" + urllib.quote(u) + "/"
4962 self.fileurls["dir-0share-json"] = url + "?t=json"
4963 self.delete_shares_numbered(u, range(0,10))
4964 d.addCallback(_mangle_dirnode_0share)
4966 # NotEnoughSharesError should be reported sensibly, with a
4967 # text/plain explanation of the problem, and perhaps some
4968 # information on which shares *could* be found.
4970 d.addCallback(lambda ignored:
4971 self.shouldHTTPError("GET unrecoverable",
4972 410, "Gone", "NoSharesError",
4973 self.GET, self.fileurls["0shares"]))
4974 def _check_zero_shares(body):
4975 self.failIf("<html>" in body, body)
4976 body = " ".join(body.strip().split())
4977 exp = ("NoSharesError: no shares could be found. "
4978 "Zero shares usually indicates a corrupt URI, or that "
4979 "no servers were connected, but it might also indicate "
4980 "severe corruption. You should perform a filecheck on "
4981 "this object to learn more. The full error message is: "
4982 "no shares (need 3). Last failure: None")
4983 self.failUnlessReallyEqual(exp, body)
4984 d.addCallback(_check_zero_shares)
4987 d.addCallback(lambda ignored:
4988 self.shouldHTTPError("GET 1share",
4989 410, "Gone", "NotEnoughSharesError",
4990 self.GET, self.fileurls["1share"]))
4991 def _check_one_share(body):
4992 self.failIf("<html>" in body, body)
4993 body = " ".join(body.strip().split())
4994 msgbase = ("NotEnoughSharesError: This indicates that some "
4995 "servers were unavailable, or that shares have been "
4996 "lost to server departure, hard drive failure, or disk "
4997 "corruption. You should perform a filecheck on "
4998 "this object to learn more. The full error message is:"
5000 msg1 = msgbase + (" ran out of shares:"
5003 " overdue= unused= need 3. Last failure: None")
5004 msg2 = msgbase + (" ran out of shares:"
5006 " pending=Share(sh0-on-xgru5)"
5007 " overdue= unused= need 3. Last failure: None")
5008 self.failUnless(body == msg1 or body == msg2, body)
5009 d.addCallback(_check_one_share)
5011 d.addCallback(lambda ignored:
5012 self.shouldHTTPError("GET imaginary",
5013 404, "Not Found", None,
5014 self.GET, self.fileurls["imaginary"]))
5015 def _missing_child(body):
5016 self.failUnless("No such child: imaginary" in body, body)
5017 d.addCallback(_missing_child)
5019 d.addCallback(lambda ignored: self.GET(self.fileurls["dir-0share"]))
5020 def _check_0shares_dir_html(body):
5021 self.failUnless("<html>" in body, body)
5022 # we should see the regular page, but without the child table or
5024 body = " ".join(body.strip().split())
5025 self.failUnlessIn('href="?t=info">More info on this directory',
5027 exp = ("UnrecoverableFileError: the directory (or mutable file) "
5028 "could not be retrieved, because there were insufficient "
5029 "good shares. This might indicate that no servers were "
5030 "connected, insufficient servers were connected, the URI "
5031 "was corrupt, or that shares have been lost due to server "
5032 "departure, hard drive failure, or disk corruption. You "
5033 "should perform a filecheck on this object to learn more.")
5034 self.failUnlessIn(exp, body)
5035 self.failUnlessIn("No upload forms: directory is unreadable", body)
5036 d.addCallback(_check_0shares_dir_html)
5038 d.addCallback(lambda ignored: self.GET(self.fileurls["dir-1share"]))
5039 def _check_1shares_dir_html(body):
5040 # at some point, we'll split UnrecoverableFileError into 0-shares
5041 # and some-shares like we did for immutable files (since there
5042 # are different sorts of advice to offer in each case). For now,
5043 # they present the same way.
5044 self.failUnless("<html>" in body, body)
5045 body = " ".join(body.strip().split())
5046 self.failUnlessIn('href="?t=info">More info on this directory',
5048 exp = ("UnrecoverableFileError: the directory (or mutable file) "
5049 "could not be retrieved, because there were insufficient "
5050 "good shares. This might indicate that no servers were "
5051 "connected, insufficient servers were connected, the URI "
5052 "was corrupt, or that shares have been lost due to server "
5053 "departure, hard drive failure, or disk corruption. You "
5054 "should perform a filecheck on this object to learn more.")
5055 self.failUnlessIn(exp, body)
5056 self.failUnlessIn("No upload forms: directory is unreadable", body)
5057 d.addCallback(_check_1shares_dir_html)
5059 d.addCallback(lambda ignored:
5060 self.shouldHTTPError("GET dir-0share-json",
5061 410, "Gone", "UnrecoverableFileError",
5063 self.fileurls["dir-0share-json"]))
5064 def _check_unrecoverable_file(body):
5065 self.failIf("<html>" in body, body)
5066 body = " ".join(body.strip().split())
5067 exp = ("UnrecoverableFileError: the directory (or mutable file) "
5068 "could not be retrieved, because there were insufficient "
5069 "good shares. This might indicate that no servers were "
5070 "connected, insufficient servers were connected, the URI "
5071 "was corrupt, or that shares have been lost due to server "
5072 "departure, hard drive failure, or disk corruption. You "
5073 "should perform a filecheck on this object to learn more.")
5074 self.failUnlessReallyEqual(exp, body)
5075 d.addCallback(_check_unrecoverable_file)
5077 d.addCallback(lambda ignored:
5078 self.shouldHTTPError("GET dir-1share-json",
5079 410, "Gone", "UnrecoverableFileError",
5081 self.fileurls["dir-1share-json"]))
5082 d.addCallback(_check_unrecoverable_file)
5084 d.addCallback(lambda ignored:
5085 self.shouldHTTPError("GET imaginary",
5086 404, "Not Found", None,
5087 self.GET, self.fileurls["imaginary"]))
5089 # attach a webapi child that throws a random error, to test how it
5091 w = c0.getServiceNamed("webish")
5092 w.root.putChild("ERRORBOOM", ErrorBoom())
5094 # "Accept: */*" : should get a text/html stack trace
5095 # "Accept: text/plain" : should get a text/plain stack trace
5096 # "Accept: text/plain, application/octet-stream" : text/plain (CLI)
5097 # no Accept header: should get a text/html stack trace
5099 d.addCallback(lambda ignored:
5100 self.shouldHTTPError("GET errorboom_html",
5101 500, "Internal Server Error", None,
5102 self.GET, "ERRORBOOM",
5103 headers={"accept": ["*/*"]}))
5104 def _internal_error_html1(body):
5105 self.failUnless("<html>" in body, "expected HTML, not '%s'" % body)
5106 d.addCallback(_internal_error_html1)
5108 d.addCallback(lambda ignored:
5109 self.shouldHTTPError("GET errorboom_text",
5110 500, "Internal Server Error", None,
5111 self.GET, "ERRORBOOM",
5112 headers={"accept": ["text/plain"]}))
5113 def _internal_error_text2(body):
5114 self.failIf("<html>" in body, body)
5115 self.failUnless(body.startswith("Traceback "), body)
5116 d.addCallback(_internal_error_text2)
5118 CLI_accepts = "text/plain, application/octet-stream"
5119 d.addCallback(lambda ignored:
5120 self.shouldHTTPError("GET errorboom_text",
5121 500, "Internal Server Error", None,
5122 self.GET, "ERRORBOOM",
5123 headers={"accept": [CLI_accepts]}))
5124 def _internal_error_text3(body):
5125 self.failIf("<html>" in body, body)
5126 self.failUnless(body.startswith("Traceback "), body)
5127 d.addCallback(_internal_error_text3)
5129 d.addCallback(lambda ignored:
5130 self.shouldHTTPError("GET errorboom_text",
5131 500, "Internal Server Error", None,
5132 self.GET, "ERRORBOOM"))
5133 def _internal_error_html4(body):
5134 self.failUnless("<html>" in body, "expected HTML, not '%s'" % body)
5135 d.addCallback(_internal_error_html4)
5137 def _flush_errors(res):
5138 # Trial: please ignore the CompletelyUnhandledError in the logs
5139 self.flushLoggedErrors(CompletelyUnhandledError)
5141 d.addBoth(_flush_errors)
5145 def test_blacklist(self):
5146 # download from a blacklisted URI, get an error
5147 self.basedir = "web/Grid/blacklist"
5149 c0 = self.g.clients[0]
5150 c0_basedir = c0.basedir
5151 fn = os.path.join(c0_basedir, "access.blacklist")
5153 DATA = "off-limits " * 50
5155 d = c0.upload(upload.Data(DATA, convergence=""))
5156 def _stash_uri_and_create_dir(ur):
5158 self.url = "uri/"+self.uri
5159 u = uri.from_string_filenode(self.uri)
5160 self.si = u.get_storage_index()
5161 childnode = c0.create_node_from_uri(self.uri, None)
5162 return c0.create_dirnode({u"blacklisted.txt": (childnode,{}) })
5163 d.addCallback(_stash_uri_and_create_dir)
5164 def _stash_dir(node):
5165 self.dir_node = node
5166 self.dir_uri = node.get_uri()
5167 self.dir_url = "uri/"+self.dir_uri
5168 d.addCallback(_stash_dir)
5169 d.addCallback(lambda ign: self.GET(self.dir_url, followRedirect=True))
5170 def _check_dir_html(body):
5171 self.failUnlessIn("<html>", body)
5172 self.failUnlessIn("blacklisted.txt</a>", body)
5173 d.addCallback(_check_dir_html)
5174 d.addCallback(lambda ign: self.GET(self.url))
5175 d.addCallback(lambda body: self.failUnlessEqual(DATA, body))
5177 def _blacklist(ign):
5179 f.write(" # this is a comment\n")
5181 f.write("\n") # also exercise blank lines
5182 f.write("%s %s\n" % (base32.b2a(self.si), "off-limits to you"))
5184 # clients should be checking the blacklist each time, so we don't
5185 # need to restart the client
5186 d.addCallback(_blacklist)
5187 d.addCallback(lambda ign: self.shouldHTTPError("get_from_blacklisted_uri",
5189 "Access Prohibited: off-limits",
5190 self.GET, self.url))
5192 # We should still be able to list the parent directory, in HTML...
5193 d.addCallback(lambda ign: self.GET(self.dir_url, followRedirect=True))
5194 def _check_dir_html2(body):
5195 self.failUnlessIn("<html>", body)
5196 self.failUnlessIn("blacklisted.txt</strike>", body)
5197 d.addCallback(_check_dir_html2)
5199 # ... and in JSON (used by CLI).
5200 d.addCallback(lambda ign: self.GET(self.dir_url+"?t=json", followRedirect=True))
5201 def _check_dir_json(res):
5202 data = simplejson.loads(res)
5203 self.failUnless(isinstance(data, list), data)
5204 self.failUnlessEqual(data[0], "dirnode")
5205 self.failUnless(isinstance(data[1], dict), data)
5206 self.failUnlessIn("children", data[1])
5207 self.failUnlessIn("blacklisted.txt", data[1]["children"])
5208 childdata = data[1]["children"]["blacklisted.txt"]
5209 self.failUnless(isinstance(childdata, list), data)
5210 self.failUnlessEqual(childdata[0], "filenode")
5211 self.failUnless(isinstance(childdata[1], dict), data)
5212 d.addCallback(_check_dir_json)
5214 def _unblacklist(ign):
5215 open(fn, "w").close()
5216 # the Blacklist object watches mtime to tell when the file has
5217 # changed, but on windows this test will run faster than the
5218 # filesystem's mtime resolution. So we edit Blacklist.last_mtime
5219 # to force a reload.
5220 self.g.clients[0].blacklist.last_mtime -= 2.0
5221 d.addCallback(_unblacklist)
5223 # now a read should work
5224 d.addCallback(lambda ign: self.GET(self.url))
5225 d.addCallback(lambda body: self.failUnlessEqual(DATA, body))
5227 # read again to exercise the blacklist-is-unchanged logic
5228 d.addCallback(lambda ign: self.GET(self.url))
5229 d.addCallback(lambda body: self.failUnlessEqual(DATA, body))
5231 # now add a blacklisted directory, and make sure files under it are
5234 childnode = c0.create_node_from_uri(self.uri, None)
5235 return c0.create_dirnode({u"child": (childnode,{}) })
5236 d.addCallback(_add_dir)
5237 def _get_dircap(dn):
5238 self.dir_si_b32 = base32.b2a(dn.get_storage_index())
5239 self.dir_url_base = "uri/"+dn.get_write_uri()
5240 self.dir_url_json1 = "uri/"+dn.get_write_uri()+"?t=json"
5241 self.dir_url_json2 = "uri/"+dn.get_write_uri()+"/?t=json"
5242 self.dir_url_json_ro = "uri/"+dn.get_readonly_uri()+"/?t=json"
5243 self.child_url = "uri/"+dn.get_readonly_uri()+"/child"
5244 d.addCallback(_get_dircap)
5245 d.addCallback(lambda ign: self.GET(self.dir_url_base, followRedirect=True))
5246 d.addCallback(lambda body: self.failUnlessIn("<html>", body))
5247 d.addCallback(lambda ign: self.GET(self.dir_url_json1))
5248 d.addCallback(lambda res: simplejson.loads(res)) # just check it decodes
5249 d.addCallback(lambda ign: self.GET(self.dir_url_json2))
5250 d.addCallback(lambda res: simplejson.loads(res)) # just check it decodes
5251 d.addCallback(lambda ign: self.GET(self.dir_url_json_ro))
5252 d.addCallback(lambda res: simplejson.loads(res)) # just check it decodes
5253 d.addCallback(lambda ign: self.GET(self.child_url))
5254 d.addCallback(lambda body: self.failUnlessEqual(DATA, body))
5256 def _block_dir(ign):
5258 f.write("%s %s\n" % (self.dir_si_b32, "dir-off-limits to you"))
5260 self.g.clients[0].blacklist.last_mtime -= 2.0
5261 d.addCallback(_block_dir)
5262 d.addCallback(lambda ign: self.shouldHTTPError("get_from_blacklisted_dir base",
5264 "Access Prohibited: dir-off-limits",
5265 self.GET, self.dir_url_base))
5266 d.addCallback(lambda ign: self.shouldHTTPError("get_from_blacklisted_dir json1",
5268 "Access Prohibited: dir-off-limits",
5269 self.GET, self.dir_url_json1))
5270 d.addCallback(lambda ign: self.shouldHTTPError("get_from_blacklisted_dir json2",
5272 "Access Prohibited: dir-off-limits",
5273 self.GET, self.dir_url_json2))
5274 d.addCallback(lambda ign: self.shouldHTTPError("get_from_blacklisted_dir json_ro",
5276 "Access Prohibited: dir-off-limits",
5277 self.GET, self.dir_url_json_ro))
5278 d.addCallback(lambda ign: self.shouldHTTPError("get_from_blacklisted_dir child",
5280 "Access Prohibited: dir-off-limits",
5281 self.GET, self.child_url))
5285 class CompletelyUnhandledError(Exception):
5287 class ErrorBoom(rend.Page):
5288 def beforeRender(self, ctx):
5289 raise CompletelyUnhandledError("whoops")