1 import os.path, re, urllib, time
3 from StringIO import StringIO
4 from twisted.application import service
5 from twisted.trial import unittest
6 from twisted.internet import defer, reactor
7 from twisted.internet.task import Clock
8 from twisted.web import client, error, http
9 from twisted.python import failure, log
10 from nevow import rend
11 from allmydata import interfaces, uri, webish, dirnode
12 from allmydata.storage.shares import get_share_file
13 from allmydata.storage_client import StorageFarmBroker
14 from allmydata.immutable import upload
15 from allmydata.immutable.downloader.status import DownloadStatus
16 from allmydata.dirnode import DirectoryNode
17 from allmydata.nodemaker import NodeMaker
18 from allmydata.unknown import UnknownNode
19 from allmydata.web import status, common
20 from allmydata.scripts.debug import CorruptShareOptions, corrupt_share
21 from allmydata.util import fileutil, base32, hashutil
22 from allmydata.util.consumer import download_to_data
23 from allmydata.util.netstring import split_netstring
24 from allmydata.util.encodingutil import to_str
25 from allmydata.test.common import FakeCHKFileNode, FakeMutableFileNode, \
26 create_chk_filenode, WebErrorMixin, ShouldFailMixin, \
27 make_mutable_file_uri, create_mutable_filenode
28 from allmydata.interfaces import IMutableFileNode, SDMF_VERSION, MDMF_VERSION
29 from allmydata.mutable import servermap, publish, retrieve
30 import allmydata.test.common_util as testutil
31 from allmydata.test.no_network import GridTestMixin
32 from allmydata.test.common_web import HTTPClientGETFactory, \
34 from allmydata.client import Client, SecretHolder
36 # create a fake uploader/downloader, and a couple of fake dirnodes, then
37 # create a webserver that works against them
39 timeout = 480 # Most of these take longer than 240 seconds on Francois's arm box.
41 unknown_rwcap = u"lafs://from_the_future_rw_\u263A".encode('utf-8')
42 unknown_rocap = u"ro.lafs://readonly_from_the_future_ro_\u263A".encode('utf-8')
43 unknown_immcap = u"imm.lafs://immutable_from_the_future_imm_\u263A".encode('utf-8')
45 class FakeStatsProvider:
47 stats = {'stats': {}, 'counters': {}}
50 class FakeNodeMaker(NodeMaker):
55 'max_segment_size':128*1024 # 1024=KiB
57 def _create_lit(self, cap):
58 return FakeCHKFileNode(cap)
59 def _create_immutable(self, cap):
60 return FakeCHKFileNode(cap)
61 def _create_mutable(self, cap):
62 return FakeMutableFileNode(None,
64 self.encoding_params, None).init_from_cap(cap)
65 def create_mutable_file(self, contents="", keysize=None,
66 version=SDMF_VERSION):
67 n = FakeMutableFileNode(None, None, self.encoding_params, None)
68 return n.create(contents, version=version)
70 class FakeUploader(service.Service):
72 def upload(self, uploadable):
73 d = uploadable.get_size()
74 d.addCallback(lambda size: uploadable.read(size))
77 n = create_chk_filenode(data)
78 results = upload.UploadResults()
79 results.uri = n.get_uri()
81 d.addCallback(_got_data)
83 def get_helper_info(self):
87 def __init__(self, binaryserverid):
88 self.binaryserverid = binaryserverid
89 def get_name(self): return "short"
90 def get_longname(self): return "long"
91 def get_serverid(self): return self.binaryserverid
94 ds = DownloadStatus("storage_index", 1234)
97 serverA = FakeIServer(hashutil.tagged_hash("foo", "serverid_a")[:20])
98 serverB = FakeIServer(hashutil.tagged_hash("foo", "serverid_b")[:20])
99 storage_index = hashutil.storage_index_hash("SI")
100 e0 = ds.add_segment_request(0, now)
102 e0.deliver(now+1, 0, 100, 0.5) # when, start,len, decodetime
103 e1 = ds.add_segment_request(1, now+2)
105 # two outstanding requests
106 e2 = ds.add_segment_request(2, now+4)
107 e3 = ds.add_segment_request(3, now+5)
108 del e2,e3 # hush pyflakes
110 # simulate a segment which gets delivered faster than a system clock tick (ticket #1166)
111 e = ds.add_segment_request(4, now)
113 e.deliver(now, 0, 140, 0.5)
115 e = ds.add_dyhb_request(serverA, now)
116 e.finished([1,2], now+1)
117 e = ds.add_dyhb_request(serverB, now+2) # left unfinished
119 e = ds.add_read_event(0, 120, now)
120 e.update(60, 0.5, 0.1) # bytes, decrypttime, pausetime
122 e = ds.add_read_event(120, 30, now+2) # left unfinished
124 e = ds.add_block_request(serverA, 1, 100, 20, now)
125 e.finished(20, now+1)
126 e = ds.add_block_request(serverB, 1, 120, 30, now+1) # left unfinished
128 # make sure that add_read_event() can come first too
129 ds1 = DownloadStatus(storage_index, 1234)
130 e = ds1.add_read_event(0, 120, now)
131 e.update(60, 0.5, 0.1) # bytes, decrypttime, pausetime
137 _all_upload_status = [upload.UploadStatus()]
138 _all_download_status = [build_one_ds()]
139 _all_mapupdate_statuses = [servermap.UpdateStatus()]
140 _all_publish_statuses = [publish.PublishStatus()]
141 _all_retrieve_statuses = [retrieve.RetrieveStatus()]
143 def list_all_upload_statuses(self):
144 return self._all_upload_status
145 def list_all_download_statuses(self):
146 return self._all_download_status
147 def list_all_mapupdate_statuses(self):
148 return self._all_mapupdate_statuses
149 def list_all_publish_statuses(self):
150 return self._all_publish_statuses
151 def list_all_retrieve_statuses(self):
152 return self._all_retrieve_statuses
153 def list_all_helper_statuses(self):
156 class FakeClient(Client):
158 # don't upcall to Client.__init__, since we only want to initialize a
160 service.MultiService.__init__(self)
161 self.nodeid = "fake_nodeid"
162 self.nickname = "fake_nickname"
163 self.introducer_furl = "None"
164 self.stats_provider = FakeStatsProvider()
165 self._secret_holder = SecretHolder("lease secret", "convergence secret")
167 self.convergence = "some random string"
168 self.storage_broker = StorageFarmBroker(None, permute_peers=True)
169 self.introducer_client = None
170 self.history = FakeHistory()
171 self.uploader = FakeUploader()
172 self.uploader.setServiceParent(self)
173 self.blacklist = None
174 self.nodemaker = FakeNodeMaker(None, self._secret_holder, None,
177 self.mutable_file_default = SDMF_VERSION
179 def startService(self):
180 return service.MultiService.startService(self)
181 def stopService(self):
182 return service.MultiService.stopService(self)
184 MUTABLE_SIZELIMIT = FakeMutableFileNode.MUTABLE_SIZELIMIT
186 class WebMixin(object):
188 self.s = FakeClient()
189 self.s.startService()
190 self.staticdir = self.mktemp()
192 self.ws = webish.WebishServer(self.s, "0", staticdir=self.staticdir,
194 self.ws.setServiceParent(self.s)
195 self.webish_port = self.ws.getPortnum()
196 self.webish_url = self.ws.getURL()
197 assert self.webish_url.endswith("/")
198 self.webish_url = self.webish_url[:-1] # these tests add their own /
200 l = [ self.s.create_dirnode() for x in range(6) ]
201 d = defer.DeferredList(l)
203 self.public_root = res[0][1]
204 assert interfaces.IDirectoryNode.providedBy(self.public_root), res
205 self.public_url = "/uri/" + self.public_root.get_uri()
206 self.private_root = res[1][1]
210 self._foo_uri = foo.get_uri()
211 self._foo_readonly_uri = foo.get_readonly_uri()
212 self._foo_verifycap = foo.get_verify_cap().to_string()
213 # NOTE: we ignore the deferred on all set_uri() calls, because we
214 # know the fake nodes do these synchronously
215 self.public_root.set_uri(u"foo", foo.get_uri(),
216 foo.get_readonly_uri())
218 self.BAR_CONTENTS, n, self._bar_txt_uri = self.makefile(0)
219 foo.set_uri(u"bar.txt", self._bar_txt_uri, self._bar_txt_uri)
220 self._bar_txt_verifycap = n.get_verify_cap().to_string()
223 # XXX: Do we ever use this?
224 self.BAZ_CONTENTS, n, self._baz_txt_uri, self._baz_txt_readonly_uri = self.makefile_mutable(0)
226 foo.set_uri(u"baz.txt", self._baz_txt_uri, self._baz_txt_readonly_uri)
229 self.QUUX_CONTENTS, n, self._quux_txt_uri, self._quux_txt_readonly_uri = self.makefile_mutable(0, mdmf=True)
230 assert self._quux_txt_uri.startswith("URI:MDMF")
231 foo.set_uri(u"quux.txt", self._quux_txt_uri, self._quux_txt_readonly_uri)
233 foo.set_uri(u"empty", res[3][1].get_uri(),
234 res[3][1].get_readonly_uri())
235 sub_uri = res[4][1].get_uri()
236 self._sub_uri = sub_uri
237 foo.set_uri(u"sub", sub_uri, sub_uri)
238 sub = self.s.create_node_from_uri(sub_uri)
240 _ign, n, blocking_uri = self.makefile(1)
241 foo.set_uri(u"blockingfile", blocking_uri, blocking_uri)
243 unicode_filename = u"n\u00fc.txt" # n u-umlaut . t x t
244 # ok, unicode calls it LATIN SMALL LETTER U WITH DIAERESIS but I
245 # still think of it as an umlaut
246 foo.set_uri(unicode_filename, self._bar_txt_uri, self._bar_txt_uri)
248 _ign, n, baz_file = self.makefile(2)
249 self._baz_file_uri = baz_file
250 sub.set_uri(u"baz.txt", baz_file, baz_file)
252 _ign, n, self._bad_file_uri = self.makefile(3)
253 # this uri should not be downloadable
254 del FakeCHKFileNode.all_contents[self._bad_file_uri]
257 self.public_root.set_uri(u"reedownlee", rodir.get_readonly_uri(),
258 rodir.get_readonly_uri())
259 rodir.set_uri(u"nor", baz_file, baz_file)
265 # public/foo/quux.txt
266 # public/foo/blockingfile
269 # public/foo/sub/baz.txt
271 # public/reedownlee/nor
272 self.NEWFILE_CONTENTS = "newfile contents\n"
274 return foo.get_metadata_for(u"bar.txt")
276 def _got_metadata(metadata):
277 self._bar_txt_metadata = metadata
278 d.addCallback(_got_metadata)
281 def makefile(self, number):
282 contents = "contents of file %s\n" % number
283 n = create_chk_filenode(contents)
284 return contents, n, n.get_uri()
286 def makefile_mutable(self, number, mdmf=False):
287 contents = "contents of mutable file %s\n" % number
288 n = create_mutable_filenode(contents, mdmf)
289 return contents, n, n.get_uri(), n.get_readonly_uri()
292 return self.s.stopService()
294 def failUnlessIsBarDotTxt(self, res):
295 self.failUnlessReallyEqual(res, self.BAR_CONTENTS, res)
297 def failUnlessIsQuuxDotTxt(self, res):
298 self.failUnlessReallyEqual(res, self.QUUX_CONTENTS, res)
300 def failUnlessIsBazDotTxt(self, res):
301 self.failUnlessReallyEqual(res, self.BAZ_CONTENTS, res)
303 def failUnlessIsBarJSON(self, res):
304 data = simplejson.loads(res)
305 self.failUnless(isinstance(data, list))
306 self.failUnlessEqual(data[0], "filenode")
307 self.failUnless(isinstance(data[1], dict))
308 self.failIf(data[1]["mutable"])
309 self.failIfIn("rw_uri", data[1]) # immutable
310 self.failUnlessReallyEqual(to_str(data[1]["ro_uri"]), self._bar_txt_uri)
311 self.failUnlessReallyEqual(to_str(data[1]["verify_uri"]), self._bar_txt_verifycap)
312 self.failUnlessReallyEqual(data[1]["size"], len(self.BAR_CONTENTS))
314 def failUnlessIsQuuxJSON(self, res, readonly=False):
315 data = simplejson.loads(res)
316 self.failUnless(isinstance(data, list))
317 self.failUnlessEqual(data[0], "filenode")
318 self.failUnless(isinstance(data[1], dict))
320 return self.failUnlessIsQuuxDotTxtMetadata(metadata, readonly)
322 def failUnlessIsQuuxDotTxtMetadata(self, metadata, readonly):
323 self.failUnless(metadata['mutable'])
325 self.failIfIn("rw_uri", metadata)
327 self.failUnlessIn("rw_uri", metadata)
328 self.failUnlessEqual(metadata['rw_uri'], self._quux_txt_uri)
329 self.failUnlessIn("ro_uri", metadata)
330 self.failUnlessEqual(metadata['ro_uri'], self._quux_txt_readonly_uri)
331 self.failUnlessReallyEqual(metadata['size'], len(self.QUUX_CONTENTS))
333 def failUnlessIsFooJSON(self, res):
334 data = simplejson.loads(res)
335 self.failUnless(isinstance(data, list))
336 self.failUnlessEqual(data[0], "dirnode", res)
337 self.failUnless(isinstance(data[1], dict))
338 self.failUnless(data[1]["mutable"])
339 self.failUnlessIn("rw_uri", data[1]) # mutable
340 self.failUnlessReallyEqual(to_str(data[1]["rw_uri"]), self._foo_uri)
341 self.failUnlessReallyEqual(to_str(data[1]["ro_uri"]), self._foo_readonly_uri)
342 self.failUnlessReallyEqual(to_str(data[1]["verify_uri"]), self._foo_verifycap)
344 kidnames = sorted([unicode(n) for n in data[1]["children"]])
345 self.failUnlessEqual(kidnames,
346 [u"bar.txt", u"baz.txt", u"blockingfile",
347 u"empty", u"n\u00fc.txt", u"quux.txt", u"sub"])
348 kids = dict( [(unicode(name),value)
350 in data[1]["children"].iteritems()] )
351 self.failUnlessEqual(kids[u"sub"][0], "dirnode")
352 self.failUnlessIn("metadata", kids[u"sub"][1])
353 self.failUnlessIn("tahoe", kids[u"sub"][1]["metadata"])
354 tahoe_md = kids[u"sub"][1]["metadata"]["tahoe"]
355 self.failUnlessIn("linkcrtime", tahoe_md)
356 self.failUnlessIn("linkmotime", tahoe_md)
357 self.failUnlessEqual(kids[u"bar.txt"][0], "filenode")
358 self.failUnlessReallyEqual(kids[u"bar.txt"][1]["size"], len(self.BAR_CONTENTS))
359 self.failUnlessReallyEqual(to_str(kids[u"bar.txt"][1]["ro_uri"]), self._bar_txt_uri)
360 self.failUnlessReallyEqual(to_str(kids[u"bar.txt"][1]["verify_uri"]),
361 self._bar_txt_verifycap)
362 self.failUnlessIn("metadata", kids[u"bar.txt"][1])
363 self.failUnlessIn("tahoe", kids[u"bar.txt"][1]["metadata"])
364 self.failUnlessReallyEqual(kids[u"bar.txt"][1]["metadata"]["tahoe"]["linkcrtime"],
365 self._bar_txt_metadata["tahoe"]["linkcrtime"])
366 self.failUnlessReallyEqual(to_str(kids[u"n\u00fc.txt"][1]["ro_uri"]),
368 self.failUnlessIn("quux.txt", kids)
369 self.failUnlessReallyEqual(to_str(kids[u"quux.txt"][1]["rw_uri"]),
371 self.failUnlessReallyEqual(to_str(kids[u"quux.txt"][1]["ro_uri"]),
372 self._quux_txt_readonly_uri)
374 def GET(self, urlpath, followRedirect=False, return_response=False,
376 # if return_response=True, this fires with (data, statuscode,
377 # respheaders) instead of just data.
378 assert not isinstance(urlpath, unicode)
379 url = self.webish_url + urlpath
380 factory = HTTPClientGETFactory(url, method="GET",
381 followRedirect=followRedirect, **kwargs)
382 reactor.connectTCP("localhost", self.webish_port, factory)
385 return (data, factory.status, factory.response_headers)
387 d.addCallback(_got_data)
388 return factory.deferred
390 def HEAD(self, urlpath, return_response=False, **kwargs):
391 # this requires some surgery, because twisted.web.client doesn't want
392 # to give us back the response headers.
393 factory = HTTPClientHEADFactory(urlpath, method="HEAD", **kwargs)
394 reactor.connectTCP("localhost", self.webish_port, factory)
397 return (data, factory.status, factory.response_headers)
399 d.addCallback(_got_data)
400 return factory.deferred
402 def PUT(self, urlpath, data, **kwargs):
403 url = self.webish_url + urlpath
404 return client.getPage(url, method="PUT", postdata=data, **kwargs)
406 def DELETE(self, urlpath):
407 url = self.webish_url + urlpath
408 return client.getPage(url, method="DELETE")
410 def POST(self, urlpath, followRedirect=False, **fields):
411 sepbase = "boogabooga"
415 form.append('Content-Disposition: form-data; name="_charset"')
419 for name, value in fields.iteritems():
420 if isinstance(value, tuple):
421 filename, value = value
422 form.append('Content-Disposition: form-data; name="%s"; '
423 'filename="%s"' % (name, filename.encode("utf-8")))
425 form.append('Content-Disposition: form-data; name="%s"' % name)
427 if isinstance(value, unicode):
428 value = value.encode("utf-8")
431 assert isinstance(value, str)
438 body = "\r\n".join(form) + "\r\n"
439 headers["content-type"] = "multipart/form-data; boundary=%s" % sepbase
440 return self.POST2(urlpath, body, headers, followRedirect)
442 def POST2(self, urlpath, body="", headers={}, followRedirect=False):
443 url = self.webish_url + urlpath
444 return client.getPage(url, method="POST", postdata=body,
445 headers=headers, followRedirect=followRedirect)
447 def shouldFail(self, res, expected_failure, which,
448 substring=None, response_substring=None):
449 if isinstance(res, failure.Failure):
450 res.trap(expected_failure)
452 self.failUnlessIn(substring, str(res), which)
453 if response_substring:
454 self.failUnlessIn(response_substring, res.value.response, which)
456 self.fail("%s was supposed to raise %s, not get '%s'" %
457 (which, expected_failure, res))
459 def shouldFail2(self, expected_failure, which, substring,
461 callable, *args, **kwargs):
462 assert substring is None or isinstance(substring, str)
463 assert response_substring is None or isinstance(response_substring, str)
464 d = defer.maybeDeferred(callable, *args, **kwargs)
466 if isinstance(res, failure.Failure):
467 res.trap(expected_failure)
469 self.failUnlessIn(substring, str(res), which)
470 if response_substring:
471 self.failUnlessIn(response_substring, res.value.response, which)
473 self.fail("%s was supposed to raise %s, not get '%s'" %
474 (which, expected_failure, res))
478 def should404(self, res, which):
479 if isinstance(res, failure.Failure):
480 res.trap(error.Error)
481 self.failUnlessReallyEqual(res.value.status, "404")
483 self.fail("%s was supposed to Error(404), not get '%s'" %
486 def should302(self, res, which):
487 if isinstance(res, failure.Failure):
488 res.trap(error.Error)
489 self.failUnlessReallyEqual(res.value.status, "302")
491 self.fail("%s was supposed to Error(302), not get '%s'" %
495 class Web(WebMixin, WebErrorMixin, testutil.StallMixin, testutil.ReallyEqualMixin, unittest.TestCase):
496 def test_create(self):
499 def test_welcome(self):
502 self.failUnlessIn('Welcome To Tahoe-LAFS', res)
504 self.s.basedir = 'web/test_welcome'
505 fileutil.make_dirs("web/test_welcome")
506 fileutil.make_dirs("web/test_welcome/private")
508 d.addCallback(_check)
511 def test_provisioning(self):
512 d = self.GET("/provisioning/")
514 self.failUnlessIn('Provisioning Tool', res)
516 fields = {'filled': True,
517 "num_users": int(50e3),
518 "files_per_user": 1000,
519 "space_per_user": int(1e9),
520 "sharing_ratio": 1.0,
521 "encoding_parameters": "3-of-10-5",
523 "ownership_mode": "A",
524 "download_rate": 100,
529 return self.POST("/provisioning/", **fields)
531 d.addCallback(_check)
533 self.failUnlessIn('Provisioning Tool', res)
534 self.failUnlessIn("Share space consumed: 167.01TB", res)
536 fields = {'filled': True,
537 "num_users": int(50e6),
538 "files_per_user": 1000,
539 "space_per_user": int(5e9),
540 "sharing_ratio": 1.0,
541 "encoding_parameters": "25-of-100-50",
542 "num_servers": 30000,
543 "ownership_mode": "E",
544 "drive_failure_model": "U",
546 "download_rate": 1000,
551 return self.POST("/provisioning/", **fields)
552 d.addCallback(_check2)
554 self.failUnlessIn("Share space consumed: huge!", res)
555 fields = {'filled': True}
556 return self.POST("/provisioning/", **fields)
557 d.addCallback(_check3)
559 self.failUnlessIn("Share space consumed:", res)
560 d.addCallback(_check4)
563 def test_reliability_tool(self):
565 from allmydata import reliability
566 _hush_pyflakes = reliability
569 raise unittest.SkipTest("reliability tool requires NumPy")
571 d = self.GET("/reliability/")
573 self.failUnlessIn('Reliability Tool', res)
574 fields = {'drive_lifetime': "8Y",
579 "check_period": "1M",
580 "report_period": "3M",
583 return self.POST("/reliability/", **fields)
585 d.addCallback(_check)
587 self.failUnlessIn('Reliability Tool', res)
588 r = r'Probability of loss \(no maintenance\):\s+<span>0.033591'
589 self.failUnless(re.search(r, res), res)
590 d.addCallback(_check2)
593 def test_status(self):
594 h = self.s.get_history()
595 dl_num = h.list_all_download_statuses()[0].get_counter()
596 ul_num = h.list_all_upload_statuses()[0].get_counter()
597 mu_num = h.list_all_mapupdate_statuses()[0].get_counter()
598 pub_num = h.list_all_publish_statuses()[0].get_counter()
599 ret_num = h.list_all_retrieve_statuses()[0].get_counter()
600 d = self.GET("/status", followRedirect=True)
602 self.failUnlessIn('Upload and Download Status', res)
603 self.failUnlessIn('"down-%d"' % dl_num, res)
604 self.failUnlessIn('"up-%d"' % ul_num, res)
605 self.failUnlessIn('"mapupdate-%d"' % mu_num, res)
606 self.failUnlessIn('"publish-%d"' % pub_num, res)
607 self.failUnlessIn('"retrieve-%d"' % ret_num, res)
608 d.addCallback(_check)
609 d.addCallback(lambda res: self.GET("/status/?t=json"))
610 def _check_json(res):
611 data = simplejson.loads(res)
612 self.failUnless(isinstance(data, dict))
613 #active = data["active"]
614 # TODO: test more. We need a way to fake an active operation
616 d.addCallback(_check_json)
618 d.addCallback(lambda res: self.GET("/status/down-%d" % dl_num))
620 self.failUnlessIn("File Download Status", res)
621 d.addCallback(_check_dl)
622 d.addCallback(lambda res: self.GET("/status/down-%d/event_json" % dl_num))
623 def _check_dl_json(res):
624 data = simplejson.loads(res)
625 self.failUnless(isinstance(data, dict))
626 self.failUnlessIn("read", data)
627 self.failUnlessEqual(data["read"][0]["length"], 120)
628 self.failUnlessEqual(data["segment"][0]["segment_length"], 100)
629 self.failUnlessEqual(data["segment"][2]["segment_number"], 2)
630 self.failUnlessEqual(data["segment"][2]["finish_time"], None)
631 phwr_id = base32.b2a(hashutil.tagged_hash("foo", "serverid_a")[:20])
632 cmpu_id = base32.b2a(hashutil.tagged_hash("foo", "serverid_b")[:20])
633 # serverids[] keys are strings, since that's what JSON does, but
634 # we'd really like them to be ints
635 self.failUnlessEqual(data["serverids"]["0"], "phwr")
636 self.failUnless(data["serverids"].has_key("1"), data["serverids"])
637 self.failUnlessEqual(data["serverids"]["1"], "cmpu", data["serverids"])
638 self.failUnlessEqual(data["server_info"][phwr_id]["short"], "phwr")
639 self.failUnlessEqual(data["server_info"][cmpu_id]["short"], "cmpu")
640 self.failUnlessIn("dyhb", data)
641 self.failUnlessIn("misc", data)
642 d.addCallback(_check_dl_json)
643 d.addCallback(lambda res: self.GET("/status/up-%d" % ul_num))
645 self.failUnlessIn("File Upload Status", res)
646 d.addCallback(_check_ul)
647 d.addCallback(lambda res: self.GET("/status/mapupdate-%d" % mu_num))
648 def _check_mapupdate(res):
649 self.failUnlessIn("Mutable File Servermap Update Status", res)
650 d.addCallback(_check_mapupdate)
651 d.addCallback(lambda res: self.GET("/status/publish-%d" % pub_num))
652 def _check_publish(res):
653 self.failUnlessIn("Mutable File Publish Status", res)
654 d.addCallback(_check_publish)
655 d.addCallback(lambda res: self.GET("/status/retrieve-%d" % ret_num))
656 def _check_retrieve(res):
657 self.failUnlessIn("Mutable File Retrieve Status", res)
658 d.addCallback(_check_retrieve)
662 def test_status_numbers(self):
663 drrm = status.DownloadResultsRendererMixin()
664 self.failUnlessReallyEqual(drrm.render_time(None, None), "")
665 self.failUnlessReallyEqual(drrm.render_time(None, 2.5), "2.50s")
666 self.failUnlessReallyEqual(drrm.render_time(None, 0.25), "250ms")
667 self.failUnlessReallyEqual(drrm.render_time(None, 0.0021), "2.1ms")
668 self.failUnlessReallyEqual(drrm.render_time(None, 0.000123), "123us")
669 self.failUnlessReallyEqual(drrm.render_rate(None, None), "")
670 self.failUnlessReallyEqual(drrm.render_rate(None, 2500000), "2.50MBps")
671 self.failUnlessReallyEqual(drrm.render_rate(None, 30100), "30.1kBps")
672 self.failUnlessReallyEqual(drrm.render_rate(None, 123), "123Bps")
674 urrm = status.UploadResultsRendererMixin()
675 self.failUnlessReallyEqual(urrm.render_time(None, None), "")
676 self.failUnlessReallyEqual(urrm.render_time(None, 2.5), "2.50s")
677 self.failUnlessReallyEqual(urrm.render_time(None, 0.25), "250ms")
678 self.failUnlessReallyEqual(urrm.render_time(None, 0.0021), "2.1ms")
679 self.failUnlessReallyEqual(urrm.render_time(None, 0.000123), "123us")
680 self.failUnlessReallyEqual(urrm.render_rate(None, None), "")
681 self.failUnlessReallyEqual(urrm.render_rate(None, 2500000), "2.50MBps")
682 self.failUnlessReallyEqual(urrm.render_rate(None, 30100), "30.1kBps")
683 self.failUnlessReallyEqual(urrm.render_rate(None, 123), "123Bps")
685 def test_GET_FILEURL(self):
686 d = self.GET(self.public_url + "/foo/bar.txt")
687 d.addCallback(self.failUnlessIsBarDotTxt)
690 def test_GET_FILEURL_range(self):
691 headers = {"range": "bytes=1-10"}
692 d = self.GET(self.public_url + "/foo/bar.txt", headers=headers,
693 return_response=True)
694 def _got((res, status, headers)):
695 self.failUnlessReallyEqual(int(status), 206)
696 self.failUnless(headers.has_key("content-range"))
697 self.failUnlessReallyEqual(headers["content-range"][0],
698 "bytes 1-10/%d" % len(self.BAR_CONTENTS))
699 self.failUnlessReallyEqual(res, self.BAR_CONTENTS[1:11])
703 def test_GET_FILEURL_partial_range(self):
704 headers = {"range": "bytes=5-"}
705 length = len(self.BAR_CONTENTS)
706 d = self.GET(self.public_url + "/foo/bar.txt", headers=headers,
707 return_response=True)
708 def _got((res, status, headers)):
709 self.failUnlessReallyEqual(int(status), 206)
710 self.failUnless(headers.has_key("content-range"))
711 self.failUnlessReallyEqual(headers["content-range"][0],
712 "bytes 5-%d/%d" % (length-1, length))
713 self.failUnlessReallyEqual(res, self.BAR_CONTENTS[5:])
717 def test_GET_FILEURL_partial_end_range(self):
718 headers = {"range": "bytes=-5"}
719 length = len(self.BAR_CONTENTS)
720 d = self.GET(self.public_url + "/foo/bar.txt", headers=headers,
721 return_response=True)
722 def _got((res, status, headers)):
723 self.failUnlessReallyEqual(int(status), 206)
724 self.failUnless(headers.has_key("content-range"))
725 self.failUnlessReallyEqual(headers["content-range"][0],
726 "bytes %d-%d/%d" % (length-5, length-1, length))
727 self.failUnlessReallyEqual(res, self.BAR_CONTENTS[-5:])
731 def test_GET_FILEURL_partial_range_overrun(self):
732 headers = {"range": "bytes=100-200"}
733 d = self.shouldFail2(error.Error, "test_GET_FILEURL_range_overrun",
734 "416 Requested Range not satisfiable",
735 "First beyond end of file",
736 self.GET, self.public_url + "/foo/bar.txt",
740 def test_HEAD_FILEURL_range(self):
741 headers = {"range": "bytes=1-10"}
742 d = self.HEAD(self.public_url + "/foo/bar.txt", headers=headers,
743 return_response=True)
744 def _got((res, status, headers)):
745 self.failUnlessReallyEqual(res, "")
746 self.failUnlessReallyEqual(int(status), 206)
747 self.failUnless(headers.has_key("content-range"))
748 self.failUnlessReallyEqual(headers["content-range"][0],
749 "bytes 1-10/%d" % len(self.BAR_CONTENTS))
753 def test_HEAD_FILEURL_partial_range(self):
754 headers = {"range": "bytes=5-"}
755 length = len(self.BAR_CONTENTS)
756 d = self.HEAD(self.public_url + "/foo/bar.txt", headers=headers,
757 return_response=True)
758 def _got((res, status, headers)):
759 self.failUnlessReallyEqual(int(status), 206)
760 self.failUnless(headers.has_key("content-range"))
761 self.failUnlessReallyEqual(headers["content-range"][0],
762 "bytes 5-%d/%d" % (length-1, length))
766 def test_HEAD_FILEURL_partial_end_range(self):
767 headers = {"range": "bytes=-5"}
768 length = len(self.BAR_CONTENTS)
769 d = self.HEAD(self.public_url + "/foo/bar.txt", headers=headers,
770 return_response=True)
771 def _got((res, status, headers)):
772 self.failUnlessReallyEqual(int(status), 206)
773 self.failUnless(headers.has_key("content-range"))
774 self.failUnlessReallyEqual(headers["content-range"][0],
775 "bytes %d-%d/%d" % (length-5, length-1, length))
779 def test_HEAD_FILEURL_partial_range_overrun(self):
780 headers = {"range": "bytes=100-200"}
781 d = self.shouldFail2(error.Error, "test_HEAD_FILEURL_range_overrun",
782 "416 Requested Range not satisfiable",
784 self.HEAD, self.public_url + "/foo/bar.txt",
788 def test_GET_FILEURL_range_bad(self):
789 headers = {"range": "BOGUS=fizbop-quarnak"}
790 d = self.GET(self.public_url + "/foo/bar.txt", headers=headers,
791 return_response=True)
792 def _got((res, status, headers)):
793 self.failUnlessReallyEqual(int(status), 200)
794 self.failUnless(not headers.has_key("content-range"))
795 self.failUnlessReallyEqual(res, self.BAR_CONTENTS)
799 def test_HEAD_FILEURL(self):
800 d = self.HEAD(self.public_url + "/foo/bar.txt", return_response=True)
801 def _got((res, status, headers)):
802 self.failUnlessReallyEqual(res, "")
803 self.failUnlessReallyEqual(headers["content-length"][0],
804 str(len(self.BAR_CONTENTS)))
805 self.failUnlessReallyEqual(headers["content-type"], ["text/plain"])
809 def test_GET_FILEURL_named(self):
810 base = "/file/%s" % urllib.quote(self._bar_txt_uri)
811 base2 = "/named/%s" % urllib.quote(self._bar_txt_uri)
812 d = self.GET(base + "/@@name=/blah.txt")
813 d.addCallback(self.failUnlessIsBarDotTxt)
814 d.addCallback(lambda res: self.GET(base + "/blah.txt"))
815 d.addCallback(self.failUnlessIsBarDotTxt)
816 d.addCallback(lambda res: self.GET(base + "/ignore/lots/blah.txt"))
817 d.addCallback(self.failUnlessIsBarDotTxt)
818 d.addCallback(lambda res: self.GET(base2 + "/@@name=/blah.txt"))
819 d.addCallback(self.failUnlessIsBarDotTxt)
820 save_url = base + "?save=true&filename=blah.txt"
821 d.addCallback(lambda res: self.GET(save_url))
822 d.addCallback(self.failUnlessIsBarDotTxt) # TODO: check headers
823 u_filename = u"n\u00e9wer.txt" # n e-acute w e r . t x t
824 u_fn_e = urllib.quote(u_filename.encode("utf-8"))
825 u_url = base + "?save=true&filename=" + u_fn_e
826 d.addCallback(lambda res: self.GET(u_url))
827 d.addCallback(self.failUnlessIsBarDotTxt) # TODO: check headers
830 def test_PUT_FILEURL_named_bad(self):
831 base = "/file/%s" % urllib.quote(self._bar_txt_uri)
832 d = self.shouldFail2(error.Error, "test_PUT_FILEURL_named_bad",
834 "/file can only be used with GET or HEAD",
835 self.PUT, base + "/@@name=/blah.txt", "")
839 def test_GET_DIRURL_named_bad(self):
840 base = "/file/%s" % urllib.quote(self._foo_uri)
841 d = self.shouldFail2(error.Error, "test_PUT_DIRURL_named_bad",
844 self.GET, base + "/@@name=/blah.txt")
847 def test_GET_slash_file_bad(self):
848 d = self.shouldFail2(error.Error, "test_GET_slash_file_bad",
850 "/file must be followed by a file-cap and a name",
854 def test_GET_unhandled_URI_named(self):
855 contents, n, newuri = self.makefile(12)
856 verifier_cap = n.get_verify_cap().to_string()
857 base = "/file/%s" % urllib.quote(verifier_cap)
858 # client.create_node_from_uri() can't handle verify-caps
859 d = self.shouldFail2(error.Error, "GET_unhandled_URI_named",
860 "400 Bad Request", "is not a file-cap",
864 def test_GET_unhandled_URI(self):
865 contents, n, newuri = self.makefile(12)
866 verifier_cap = n.get_verify_cap().to_string()
867 base = "/uri/%s" % urllib.quote(verifier_cap)
868 # client.create_node_from_uri() can't handle verify-caps
869 d = self.shouldFail2(error.Error, "test_GET_unhandled_URI",
871 "GET unknown URI type: can only do t=info",
875 def test_GET_FILE_URI(self):
876 base = "/uri/%s" % urllib.quote(self._bar_txt_uri)
878 d.addCallback(self.failUnlessIsBarDotTxt)
881 def test_GET_FILE_URI_mdmf(self):
882 base = "/uri/%s" % urllib.quote(self._quux_txt_uri)
884 d.addCallback(self.failUnlessIsQuuxDotTxt)
887 def test_GET_FILE_URI_mdmf_extensions(self):
888 base = "/uri/%s" % urllib.quote("%s:RANDOMSTUFF" % self._quux_txt_uri)
890 d.addCallback(self.failUnlessIsQuuxDotTxt)
893 def test_GET_FILE_URI_mdmf_readonly(self):
894 base = "/uri/%s" % urllib.quote(self._quux_txt_readonly_uri)
896 d.addCallback(self.failUnlessIsQuuxDotTxt)
899 def test_GET_FILE_URI_badchild(self):
900 base = "/uri/%s/boguschild" % urllib.quote(self._bar_txt_uri)
901 errmsg = "Files have no children, certainly not named 'boguschild'"
902 d = self.shouldFail2(error.Error, "test_GET_FILE_URI_badchild",
903 "400 Bad Request", errmsg,
907 def test_PUT_FILE_URI_badchild(self):
908 base = "/uri/%s/boguschild" % urllib.quote(self._bar_txt_uri)
909 errmsg = "Cannot create directory 'boguschild', because its parent is a file, not a directory"
910 d = self.shouldFail2(error.Error, "test_GET_FILE_URI_badchild",
911 "400 Bad Request", errmsg,
915 def test_PUT_FILE_URI_mdmf(self):
916 base = "/uri/%s" % urllib.quote(self._quux_txt_uri)
917 self._quux_new_contents = "new_contents"
919 d.addCallback(lambda res:
920 self.failUnlessIsQuuxDotTxt(res))
921 d.addCallback(lambda ignored:
922 self.PUT(base, self._quux_new_contents))
923 d.addCallback(lambda ignored:
925 d.addCallback(lambda res:
926 self.failUnlessReallyEqual(res, self._quux_new_contents))
929 def test_PUT_FILE_URI_mdmf_extensions(self):
930 base = "/uri/%s" % urllib.quote("%s:EXTENSIONSTUFF" % self._quux_txt_uri)
931 self._quux_new_contents = "new_contents"
933 d.addCallback(lambda res: self.failUnlessIsQuuxDotTxt(res))
934 d.addCallback(lambda ignored: self.PUT(base, self._quux_new_contents))
935 d.addCallback(lambda ignored: self.GET(base))
936 d.addCallback(lambda res: self.failUnlessEqual(self._quux_new_contents,
940 def test_PUT_FILE_URI_mdmf_readonly(self):
941 # We're not allowed to PUT things to a readonly cap.
942 base = "/uri/%s" % self._quux_txt_readonly_uri
944 d.addCallback(lambda res:
945 self.failUnlessIsQuuxDotTxt(res))
946 # What should we get here? We get a 500 error now; that's not right.
947 d.addCallback(lambda ignored:
948 self.shouldFail2(error.Error, "test_PUT_FILE_URI_mdmf_readonly",
949 "400 Bad Request", "read-only cap",
950 self.PUT, base, "new data"))
953 def test_PUT_FILE_URI_sdmf_readonly(self):
954 # We're not allowed to put things to a readonly cap.
955 base = "/uri/%s" % self._baz_txt_readonly_uri
957 d.addCallback(lambda res:
958 self.failUnlessIsBazDotTxt(res))
959 d.addCallback(lambda ignored:
960 self.shouldFail2(error.Error, "test_PUT_FILE_URI_sdmf_readonly",
961 "400 Bad Request", "read-only cap",
962 self.PUT, base, "new_data"))
965 # TODO: version of this with a Unicode filename
966 def test_GET_FILEURL_save(self):
967 d = self.GET(self.public_url + "/foo/bar.txt?filename=bar.txt&save=true",
968 return_response=True)
969 def _got((res, statuscode, headers)):
970 content_disposition = headers["content-disposition"][0]
971 self.failUnless(content_disposition == 'attachment; filename="bar.txt"', content_disposition)
972 self.failUnlessIsBarDotTxt(res)
976 def test_GET_FILEURL_missing(self):
977 d = self.GET(self.public_url + "/foo/missing")
978 d.addBoth(self.should404, "test_GET_FILEURL_missing")
981 def test_GET_FILEURL_info_mdmf(self):
982 d = self.GET("/uri/%s?t=info" % self._quux_txt_uri)
984 self.failUnlessIn("mutable file (mdmf)", res)
985 self.failUnlessIn(self._quux_txt_uri, res)
986 self.failUnlessIn(self._quux_txt_readonly_uri, res)
990 def test_GET_FILEURL_info_mdmf_readonly(self):
991 d = self.GET("/uri/%s?t=info" % self._quux_txt_readonly_uri)
993 self.failUnlessIn("mutable file (mdmf)", res)
994 self.failIfIn(self._quux_txt_uri, res)
995 self.failUnlessIn(self._quux_txt_readonly_uri, res)
999 def test_GET_FILEURL_info_sdmf(self):
1000 d = self.GET("/uri/%s?t=info" % self._baz_txt_uri)
1002 self.failUnlessIn("mutable file (sdmf)", res)
1003 self.failUnlessIn(self._baz_txt_uri, res)
1007 def test_GET_FILEURL_info_mdmf_extensions(self):
1008 d = self.GET("/uri/%s:STUFF?t=info" % self._quux_txt_uri)
1010 self.failUnlessIn("mutable file (mdmf)", res)
1011 self.failUnlessIn(self._quux_txt_uri, res)
1012 self.failUnlessIn(self._quux_txt_readonly_uri, res)
1016 def test_PUT_overwrite_only_files(self):
1017 # create a directory, put a file in that directory.
1018 contents, n, filecap = self.makefile(8)
1019 d = self.PUT(self.public_url + "/foo/dir?t=mkdir", "")
1020 d.addCallback(lambda res:
1021 self.PUT(self.public_url + "/foo/dir/file1.txt",
1022 self.NEWFILE_CONTENTS))
1023 # try to overwrite the file with replace=only-files
1024 # (this should work)
1025 d.addCallback(lambda res:
1026 self.PUT(self.public_url + "/foo/dir/file1.txt?t=uri&replace=only-files",
1028 d.addCallback(lambda res:
1029 self.shouldFail2(error.Error, "PUT_bad_t", "409 Conflict",
1030 "There was already a child by that name, and you asked me "
1031 "to not replace it",
1032 self.PUT, self.public_url + "/foo/dir?t=uri&replace=only-files",
1036 def test_PUT_NEWFILEURL(self):
1037 d = self.PUT(self.public_url + "/foo/new.txt", self.NEWFILE_CONTENTS)
1038 # TODO: we lose the response code, so we can't check this
1039 #self.failUnlessReallyEqual(responsecode, 201)
1040 d.addCallback(self.failUnlessURIMatchesROChild, self._foo_node, u"new.txt")
1041 d.addCallback(lambda res:
1042 self.failUnlessChildContentsAre(self._foo_node, u"new.txt",
1043 self.NEWFILE_CONTENTS))
1046 def test_PUT_NEWFILEURL_not_mutable(self):
1047 d = self.PUT(self.public_url + "/foo/new.txt?mutable=false",
1048 self.NEWFILE_CONTENTS)
1049 # TODO: we lose the response code, so we can't check this
1050 #self.failUnlessReallyEqual(responsecode, 201)
1051 d.addCallback(self.failUnlessURIMatchesROChild, self._foo_node, u"new.txt")
1052 d.addCallback(lambda res:
1053 self.failUnlessChildContentsAre(self._foo_node, u"new.txt",
1054 self.NEWFILE_CONTENTS))
1057 def test_PUT_NEWFILEURL_unlinked_mdmf(self):
1058 # this should get us a few segments of an MDMF mutable file,
1059 # which we can then test for.
1060 contents = self.NEWFILE_CONTENTS * 300000
1061 d = self.PUT("/uri?format=mdmf",
1063 def _got_filecap(filecap):
1064 self.failUnless(filecap.startswith("URI:MDMF"))
1066 d.addCallback(_got_filecap)
1067 d.addCallback(lambda filecap: self.GET("/uri/%s?t=json" % filecap))
1068 d.addCallback(lambda json: self.failUnlessIn("MDMF", json))
1071 def test_PUT_NEWFILEURL_unlinked_sdmf(self):
1072 contents = self.NEWFILE_CONTENTS * 300000
1073 d = self.PUT("/uri?format=sdmf",
1075 d.addCallback(lambda filecap: self.GET("/uri/%s?t=json" % filecap))
1076 d.addCallback(lambda json: self.failUnlessIn("SDMF", json))
1079 def test_PUT_NEWFILEURL_unlinked_bad_format(self):
1080 contents = self.NEWFILE_CONTENTS * 300000
1081 return self.shouldHTTPError("PUT_NEWFILEURL_unlinked_bad_format",
1082 400, "Bad Request", "Unknown format: foo",
1083 self.PUT, "/uri?format=foo",
1086 def test_PUT_NEWFILEURL_range_bad(self):
1087 headers = {"content-range": "bytes 1-10/%d" % len(self.NEWFILE_CONTENTS)}
1088 target = self.public_url + "/foo/new.txt"
1089 d = self.shouldFail2(error.Error, "test_PUT_NEWFILEURL_range_bad",
1090 "501 Not Implemented",
1091 "Content-Range in PUT not yet supported",
1092 # (and certainly not for immutable files)
1093 self.PUT, target, self.NEWFILE_CONTENTS[1:11],
1095 d.addCallback(lambda res:
1096 self.failIfNodeHasChild(self._foo_node, u"new.txt"))
1099 def test_PUT_NEWFILEURL_mutable(self):
1100 d = self.PUT(self.public_url + "/foo/new.txt?mutable=true",
1101 self.NEWFILE_CONTENTS)
1102 # TODO: we lose the response code, so we can't check this
1103 #self.failUnlessReallyEqual(responsecode, 201)
1104 def _check_uri(res):
1105 u = uri.from_string_mutable_filenode(res)
1106 self.failUnless(u.is_mutable())
1107 self.failIf(u.is_readonly())
1109 d.addCallback(_check_uri)
1110 d.addCallback(self.failUnlessURIMatchesRWChild, self._foo_node, u"new.txt")
1111 d.addCallback(lambda res:
1112 self.failUnlessMutableChildContentsAre(self._foo_node,
1114 self.NEWFILE_CONTENTS))
1117 def test_PUT_NEWFILEURL_mutable_toobig(self):
1118 # It is okay to upload large mutable files, so we should be able
1120 d = self.PUT(self.public_url + "/foo/new.txt?mutable=true",
1121 "b" * (self.s.MUTABLE_SIZELIMIT + 1))
1124 def test_PUT_NEWFILEURL_replace(self):
1125 d = self.PUT(self.public_url + "/foo/bar.txt", self.NEWFILE_CONTENTS)
1126 # TODO: we lose the response code, so we can't check this
1127 #self.failUnlessReallyEqual(responsecode, 200)
1128 d.addCallback(self.failUnlessURIMatchesROChild, self._foo_node, u"bar.txt")
1129 d.addCallback(lambda res:
1130 self.failUnlessChildContentsAre(self._foo_node, u"bar.txt",
1131 self.NEWFILE_CONTENTS))
1134 def test_PUT_NEWFILEURL_bad_t(self):
1135 d = self.shouldFail2(error.Error, "PUT_bad_t", "400 Bad Request",
1136 "PUT to a file: bad t=bogus",
1137 self.PUT, self.public_url + "/foo/bar.txt?t=bogus",
1141 def test_PUT_NEWFILEURL_no_replace(self):
1142 d = self.PUT(self.public_url + "/foo/bar.txt?replace=false",
1143 self.NEWFILE_CONTENTS)
1144 d.addBoth(self.shouldFail, error.Error, "PUT_NEWFILEURL_no_replace",
1146 "There was already a child by that name, and you asked me "
1147 "to not replace it")
1150 def test_PUT_NEWFILEURL_mkdirs(self):
1151 d = self.PUT(self.public_url + "/foo/newdir/new.txt", self.NEWFILE_CONTENTS)
1153 d.addCallback(self.failUnlessURIMatchesROChild, fn, u"newdir/new.txt")
1154 d.addCallback(lambda res: self.failIfNodeHasChild(fn, u"new.txt"))
1155 d.addCallback(lambda res: self.failUnlessNodeHasChild(fn, u"newdir"))
1156 d.addCallback(lambda res:
1157 self.failUnlessChildContentsAre(fn, u"newdir/new.txt",
1158 self.NEWFILE_CONTENTS))
1161 def test_PUT_NEWFILEURL_blocked(self):
1162 d = self.PUT(self.public_url + "/foo/blockingfile/new.txt",
1163 self.NEWFILE_CONTENTS)
1164 d.addBoth(self.shouldFail, error.Error, "PUT_NEWFILEURL_blocked",
1166 "Unable to create directory 'blockingfile': a file was in the way")
1169 def test_PUT_NEWFILEURL_emptyname(self):
1170 # an empty pathname component (i.e. a double-slash) is disallowed
1171 d = self.shouldFail2(error.Error, "test_PUT_NEWFILEURL_emptyname",
1173 "The webapi does not allow empty pathname components",
1174 self.PUT, self.public_url + "/foo//new.txt", "")
1177 def test_DELETE_FILEURL(self):
1178 d = self.DELETE(self.public_url + "/foo/bar.txt")
1179 d.addCallback(lambda res:
1180 self.failIfNodeHasChild(self._foo_node, u"bar.txt"))
1183 def test_DELETE_FILEURL_missing(self):
1184 d = self.DELETE(self.public_url + "/foo/missing")
1185 d.addBoth(self.should404, "test_DELETE_FILEURL_missing")
1188 def test_DELETE_FILEURL_missing2(self):
1189 d = self.DELETE(self.public_url + "/missing/missing")
1190 d.addBoth(self.should404, "test_DELETE_FILEURL_missing2")
1193 def failUnlessHasBarDotTxtMetadata(self, res):
1194 data = simplejson.loads(res)
1195 self.failUnless(isinstance(data, list))
1196 self.failUnlessIn("metadata", data[1])
1197 self.failUnlessIn("tahoe", data[1]["metadata"])
1198 self.failUnlessIn("linkcrtime", data[1]["metadata"]["tahoe"])
1199 self.failUnlessIn("linkmotime", data[1]["metadata"]["tahoe"])
1200 self.failUnlessReallyEqual(data[1]["metadata"]["tahoe"]["linkcrtime"],
1201 self._bar_txt_metadata["tahoe"]["linkcrtime"])
1203 def test_GET_FILEURL_json(self):
1204 # twisted.web.http.parse_qs ignores any query args without an '=', so
1205 # I can't do "GET /path?json", I have to do "GET /path/t=json"
1206 # instead. This may make it tricky to emulate the S3 interface
1208 d = self.GET(self.public_url + "/foo/bar.txt?t=json")
1210 self.failUnlessIsBarJSON(data)
1211 self.failUnlessHasBarDotTxtMetadata(data)
1213 d.addCallback(_check1)
1216 def test_GET_FILEURL_json_mutable_type(self):
1217 # The JSON should include format, which says whether the
1218 # file is SDMF or MDMF
1219 d = self.PUT("/uri?format=mdmf",
1220 self.NEWFILE_CONTENTS * 300000)
1221 d.addCallback(lambda filecap: self.GET("/uri/%s?t=json" % filecap))
1222 def _got_json(json, version):
1223 data = simplejson.loads(json)
1224 assert "filenode" == data[0]
1226 assert isinstance(data, dict)
1228 self.failUnlessIn("format", data)
1229 self.failUnlessEqual(data["format"], version)
1231 d.addCallback(_got_json, "MDMF")
1232 # Now make an SDMF file and check that it is reported correctly.
1233 d.addCallback(lambda ignored:
1234 self.PUT("/uri?format=sdmf",
1235 self.NEWFILE_CONTENTS * 300000))
1236 d.addCallback(lambda filecap: self.GET("/uri/%s?t=json" % filecap))
1237 d.addCallback(_got_json, "SDMF")
1240 def test_GET_FILEURL_json_mdmf(self):
1241 d = self.GET("/uri/%s?t=json" % urllib.quote(self._quux_txt_uri))
1242 d.addCallback(self.failUnlessIsQuuxJSON)
1245 def test_GET_FILEURL_json_missing(self):
1246 d = self.GET(self.public_url + "/foo/missing?json")
1247 d.addBoth(self.should404, "test_GET_FILEURL_json_missing")
1250 def test_GET_FILEURL_uri(self):
1251 d = self.GET(self.public_url + "/foo/bar.txt?t=uri")
1253 self.failUnlessReallyEqual(res, self._bar_txt_uri)
1254 d.addCallback(_check)
1255 d.addCallback(lambda res:
1256 self.GET(self.public_url + "/foo/bar.txt?t=readonly-uri"))
1258 # for now, for files, uris and readonly-uris are the same
1259 self.failUnlessReallyEqual(res, self._bar_txt_uri)
1260 d.addCallback(_check2)
1263 def test_GET_FILEURL_badtype(self):
1264 d = self.shouldHTTPError("GET t=bogus", 400, "Bad Request",
1267 self.public_url + "/foo/bar.txt?t=bogus")
1270 def test_CSS_FILE(self):
1271 d = self.GET("/tahoe.css", followRedirect=True)
1273 CSS_STYLE=re.compile('toolbar\s{.+text-align:\scenter.+toolbar-item.+display:\sinline',re.DOTALL)
1274 self.failUnless(CSS_STYLE.search(res), res)
1275 d.addCallback(_check)
1278 def test_GET_FILEURL_uri_missing(self):
1279 d = self.GET(self.public_url + "/foo/missing?t=uri")
1280 d.addBoth(self.should404, "test_GET_FILEURL_uri_missing")
1283 def _check_upload_and_mkdir_forms(self, html):
1284 # We should have a form to create a file, with radio buttons that allow
1285 # the user to toggle whether it is a CHK/LIT (default), SDMF, or MDMF file.
1286 self.failUnlessIn('name="t" value="upload"', html)
1287 self.failUnlessIn('input checked="checked" type="radio" id="upload-chk" value="chk" name="format"', html)
1288 self.failUnlessIn('input type="radio" id="upload-sdmf" value="sdmf" name="format"', html)
1289 self.failUnlessIn('input type="radio" id="upload-mdmf" value="mdmf" name="format"', html)
1291 # We should also have the ability to create a mutable directory, with
1292 # radio buttons that allow the user to toggle whether it is an SDMF (default)
1293 # or MDMF directory.
1294 self.failUnlessIn('name="t" value="mkdir"', html)
1295 self.failUnlessIn('input checked="checked" type="radio" id="mkdir-sdmf" value="sdmf" name="format"', html)
1296 self.failUnlessIn('input type="radio" id="mkdir-mdmf" value="mdmf" name="format"', html)
1298 def test_GET_DIRECTORY_html(self):
1299 d = self.GET(self.public_url + "/foo", followRedirect=True)
1301 self.failUnlessIn('<div class="toolbar-item"><a href="../../..">Return to Welcome page</a></div>', html)
1302 self._check_upload_and_mkdir_forms(html)
1303 self.failUnlessIn("quux", html)
1304 d.addCallback(_check)
1307 def test_GET_root_html(self):
1309 d.addCallback(self._check_upload_and_mkdir_forms)
1312 def test_GET_DIRURL(self):
1313 # the addSlash means we get a redirect here
1314 # from /uri/$URI/foo/ , we need ../../../ to get back to the root
1316 d = self.GET(self.public_url + "/foo", followRedirect=True)
1318 self.failUnlessIn('<a href="%s">Return to Welcome page' % ROOT, res)
1320 # the FILE reference points to a URI, but it should end in bar.txt
1321 bar_url = ("%s/file/%s/@@named=/bar.txt" %
1322 (ROOT, urllib.quote(self._bar_txt_uri)))
1323 get_bar = "".join([r'<td>FILE</td>',
1325 r'<a href="%s">bar.txt</a>' % bar_url,
1327 r'\s+<td align="right">%d</td>' % len(self.BAR_CONTENTS),
1329 self.failUnless(re.search(get_bar, res), res)
1330 for label in ['unlink', 'rename']:
1331 for line in res.split("\n"):
1332 # find the line that contains the relevant button for bar.txt
1333 if ("form action" in line and
1334 ('value="%s"' % (label,)) in line and
1335 'value="bar.txt"' in line):
1336 # the form target should use a relative URL
1337 foo_url = urllib.quote("%s/uri/%s/" % (ROOT, self._foo_uri))
1338 self.failUnlessIn('action="%s"' % foo_url, line)
1339 # and the when_done= should too
1340 #done_url = urllib.quote(???)
1341 #self.failUnlessIn('name="when_done" value="%s"' % done_url, line)
1343 # 'unlink' needs to use POST because it directly has a side effect
1344 if label == 'unlink':
1345 self.failUnlessIn('method="post"', line)
1348 self.fail("unable to find '%s bar.txt' line" % (label,), res)
1350 # the DIR reference just points to a URI
1351 sub_url = ("%s/uri/%s/" % (ROOT, urllib.quote(self._sub_uri)))
1352 get_sub = ((r'<td>DIR</td>')
1353 +r'\s+<td><a href="%s">sub</a></td>' % sub_url)
1354 self.failUnless(re.search(get_sub, res), res)
1355 d.addCallback(_check)
1357 # look at a readonly directory
1358 d.addCallback(lambda res:
1359 self.GET(self.public_url + "/reedownlee", followRedirect=True))
1361 self.failUnlessIn("(read-only)", res)
1362 self.failIfIn("Upload a file", res)
1363 d.addCallback(_check2)
1365 # and at a directory that contains a readonly directory
1366 d.addCallback(lambda res:
1367 self.GET(self.public_url, followRedirect=True))
1369 self.failUnless(re.search('<td>DIR-RO</td>'
1370 r'\s+<td><a href="[\.\/]+/uri/URI%3ADIR2-RO%3A[^"]+">reedownlee</a></td>', res), res)
1371 d.addCallback(_check3)
1373 # and an empty directory
1374 d.addCallback(lambda res: self.GET(self.public_url + "/foo/empty/"))
1376 self.failUnlessIn("directory is empty", res)
1377 MKDIR_BUTTON_RE=re.compile('<input type="hidden" name="t" value="mkdir" />.*<legend class="freeform-form-label">Create a new directory in this directory</legend>.*<input type="submit" value="Create" />', re.I)
1378 self.failUnless(MKDIR_BUTTON_RE.search(res), res)
1379 d.addCallback(_check4)
1381 # and at a literal directory
1382 tiny_litdir_uri = "URI:DIR2-LIT:gqytunj2onug64tufqzdcosvkjetutcjkq5gw4tvm5vwszdgnz5hgyzufqydulbshj5x2lbm" # contains one child which is itself also LIT
1383 d.addCallback(lambda res:
1384 self.GET("/uri/" + tiny_litdir_uri + "/", followRedirect=True))
1386 self.failUnlessIn('(immutable)', res)
1387 self.failUnless(re.search('<td>FILE</td>'
1388 r'\s+<td><a href="[\.\/]+/file/URI%3ALIT%3Akrugkidfnzsc4/@@named=/short">short</a></td>', res), res)
1389 d.addCallback(_check5)
1392 def test_GET_DIRURL_badtype(self):
1393 d = self.shouldHTTPError("test_GET_DIRURL_badtype",
1397 self.public_url + "/foo?t=bogus")
1400 def test_GET_DIRURL_json(self):
1401 d = self.GET(self.public_url + "/foo?t=json")
1402 d.addCallback(self.failUnlessIsFooJSON)
1405 def test_GET_DIRURL_json_format(self):
1406 d = self.PUT(self.public_url + \
1407 "/foo/sdmf.txt?format=sdmf",
1408 self.NEWFILE_CONTENTS * 300000)
1409 d.addCallback(lambda ignored:
1410 self.PUT(self.public_url + \
1411 "/foo/mdmf.txt?format=mdmf",
1412 self.NEWFILE_CONTENTS * 300000))
1413 # Now we have an MDMF and SDMF file in the directory. If we GET
1414 # its JSON, we should see their encodings.
1415 d.addCallback(lambda ignored:
1416 self.GET(self.public_url + "/foo?t=json"))
1417 def _got_json(json):
1418 data = simplejson.loads(json)
1419 assert data[0] == "dirnode"
1422 kids = data['children']
1424 mdmf_data = kids['mdmf.txt'][1]
1425 self.failUnlessIn("format", mdmf_data)
1426 self.failUnlessEqual(mdmf_data["format"], "MDMF")
1428 sdmf_data = kids['sdmf.txt'][1]
1429 self.failUnlessIn("format", sdmf_data)
1430 self.failUnlessEqual(sdmf_data["format"], "SDMF")
1431 d.addCallback(_got_json)
1435 def test_POST_DIRURL_manifest_no_ophandle(self):
1436 d = self.shouldFail2(error.Error,
1437 "test_POST_DIRURL_manifest_no_ophandle",
1439 "slow operation requires ophandle=",
1440 self.POST, self.public_url, t="start-manifest")
1443 def test_POST_DIRURL_manifest(self):
1444 d = defer.succeed(None)
1445 def getman(ignored, output):
1446 d = self.POST(self.public_url + "/foo/?t=start-manifest&ophandle=125",
1447 followRedirect=True)
1448 d.addCallback(self.wait_for_operation, "125")
1449 d.addCallback(self.get_operation_results, "125", output)
1451 d.addCallback(getman, None)
1452 def _got_html(manifest):
1453 self.failUnlessIn("Manifest of SI=", manifest)
1454 self.failUnlessIn("<td>sub</td>", manifest)
1455 self.failUnlessIn(self._sub_uri, manifest)
1456 self.failUnlessIn("<td>sub/baz.txt</td>", manifest)
1457 d.addCallback(_got_html)
1459 # both t=status and unadorned GET should be identical
1460 d.addCallback(lambda res: self.GET("/operations/125"))
1461 d.addCallback(_got_html)
1463 d.addCallback(getman, "html")
1464 d.addCallback(_got_html)
1465 d.addCallback(getman, "text")
1466 def _got_text(manifest):
1467 self.failUnlessIn("\nsub " + self._sub_uri + "\n", manifest)
1468 self.failUnlessIn("\nsub/baz.txt URI:CHK:", manifest)
1469 d.addCallback(_got_text)
1470 d.addCallback(getman, "JSON")
1472 data = res["manifest"]
1474 for (path_list, cap) in data:
1475 got[tuple(path_list)] = cap
1476 self.failUnlessReallyEqual(to_str(got[(u"sub",)]), self._sub_uri)
1477 self.failUnlessIn((u"sub", u"baz.txt"), got)
1478 self.failUnlessIn("finished", res)
1479 self.failUnlessIn("origin", res)
1480 self.failUnlessIn("storage-index", res)
1481 self.failUnlessIn("verifycaps", res)
1482 self.failUnlessIn("stats", res)
1483 d.addCallback(_got_json)
1486 def test_POST_DIRURL_deepsize_no_ophandle(self):
1487 d = self.shouldFail2(error.Error,
1488 "test_POST_DIRURL_deepsize_no_ophandle",
1490 "slow operation requires ophandle=",
1491 self.POST, self.public_url, t="start-deep-size")
1494 def test_POST_DIRURL_deepsize(self):
1495 d = self.POST(self.public_url + "/foo/?t=start-deep-size&ophandle=126",
1496 followRedirect=True)
1497 d.addCallback(self.wait_for_operation, "126")
1498 d.addCallback(self.get_operation_results, "126", "json")
1499 def _got_json(data):
1500 self.failUnlessReallyEqual(data["finished"], True)
1502 self.failUnless(size > 1000)
1503 d.addCallback(_got_json)
1504 d.addCallback(self.get_operation_results, "126", "text")
1506 mo = re.search(r'^size: (\d+)$', res, re.M)
1507 self.failUnless(mo, res)
1508 size = int(mo.group(1))
1509 # with directories, the size varies.
1510 self.failUnless(size > 1000)
1511 d.addCallback(_got_text)
1514 def test_POST_DIRURL_deepstats_no_ophandle(self):
1515 d = self.shouldFail2(error.Error,
1516 "test_POST_DIRURL_deepstats_no_ophandle",
1518 "slow operation requires ophandle=",
1519 self.POST, self.public_url, t="start-deep-stats")
1522 def test_POST_DIRURL_deepstats(self):
1523 d = self.POST(self.public_url + "/foo/?t=start-deep-stats&ophandle=127",
1524 followRedirect=True)
1525 d.addCallback(self.wait_for_operation, "127")
1526 d.addCallback(self.get_operation_results, "127", "json")
1527 def _got_json(stats):
1528 expected = {"count-immutable-files": 3,
1529 "count-mutable-files": 2,
1530 "count-literal-files": 0,
1532 "count-directories": 3,
1533 "size-immutable-files": 57,
1534 "size-literal-files": 0,
1535 #"size-directories": 1912, # varies
1536 #"largest-directory": 1590,
1537 "largest-directory-children": 7,
1538 "largest-immutable-file": 19,
1540 for k,v in expected.iteritems():
1541 self.failUnlessReallyEqual(stats[k], v,
1542 "stats[%s] was %s, not %s" %
1544 self.failUnlessReallyEqual(stats["size-files-histogram"],
1546 d.addCallback(_got_json)
1549 def test_POST_DIRURL_stream_manifest(self):
1550 d = self.POST(self.public_url + "/foo/?t=stream-manifest")
1552 self.failUnless(res.endswith("\n"))
1553 units = [simplejson.loads(t) for t in res[:-1].split("\n")]
1554 self.failUnlessReallyEqual(len(units), 9)
1555 self.failUnlessEqual(units[-1]["type"], "stats")
1557 self.failUnlessEqual(first["path"], [])
1558 self.failUnlessReallyEqual(to_str(first["cap"]), self._foo_uri)
1559 self.failUnlessEqual(first["type"], "directory")
1560 baz = [u for u in units[:-1] if to_str(u["cap"]) == self._baz_file_uri][0]
1561 self.failUnlessEqual(baz["path"], ["sub", "baz.txt"])
1562 self.failIfEqual(baz["storage-index"], None)
1563 self.failIfEqual(baz["verifycap"], None)
1564 self.failIfEqual(baz["repaircap"], None)
1565 # XXX: Add quux and baz to this test.
1567 d.addCallback(_check)
1570 def test_GET_DIRURL_uri(self):
1571 d = self.GET(self.public_url + "/foo?t=uri")
1573 self.failUnlessReallyEqual(to_str(res), self._foo_uri)
1574 d.addCallback(_check)
1577 def test_GET_DIRURL_readonly_uri(self):
1578 d = self.GET(self.public_url + "/foo?t=readonly-uri")
1580 self.failUnlessReallyEqual(to_str(res), self._foo_readonly_uri)
1581 d.addCallback(_check)
1584 def test_PUT_NEWDIRURL(self):
1585 d = self.PUT(self.public_url + "/foo/newdir?t=mkdir", "")
1586 d.addCallback(lambda res:
1587 self.failUnlessNodeHasChild(self._foo_node, u"newdir"))
1588 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
1589 d.addCallback(self.failUnlessNodeKeysAre, [])
1592 def test_PUT_NEWDIRURL_mdmf(self):
1593 d = self.PUT(self.public_url + "/foo/newdir?t=mkdir&format=mdmf", "")
1594 d.addCallback(lambda res:
1595 self.failUnlessNodeHasChild(self._foo_node, u"newdir"))
1596 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
1597 d.addCallback(lambda node:
1598 self.failUnlessEqual(node._node.get_version(), MDMF_VERSION))
1601 def test_PUT_NEWDIRURL_sdmf(self):
1602 d = self.PUT(self.public_url + "/foo/newdir?t=mkdir&format=sdmf",
1604 d.addCallback(lambda res:
1605 self.failUnlessNodeHasChild(self._foo_node, u"newdir"))
1606 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
1607 d.addCallback(lambda node:
1608 self.failUnlessEqual(node._node.get_version(), SDMF_VERSION))
1611 def test_PUT_NEWDIRURL_bad_format(self):
1612 return self.shouldHTTPError("PUT_NEWDIRURL_bad_format",
1613 400, "Bad Request", "Unknown format: foo",
1614 self.PUT, self.public_url +
1615 "/foo/newdir=?t=mkdir&format=foo", "")
1617 def test_POST_NEWDIRURL(self):
1618 d = self.POST2(self.public_url + "/foo/newdir?t=mkdir", "")
1619 d.addCallback(lambda res:
1620 self.failUnlessNodeHasChild(self._foo_node, u"newdir"))
1621 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
1622 d.addCallback(self.failUnlessNodeKeysAre, [])
1625 def test_POST_NEWDIRURL_mdmf(self):
1626 d = self.POST2(self.public_url + "/foo/newdir?t=mkdir&format=mdmf", "")
1627 d.addCallback(lambda res:
1628 self.failUnlessNodeHasChild(self._foo_node, u"newdir"))
1629 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
1630 d.addCallback(lambda node:
1631 self.failUnlessEqual(node._node.get_version(), MDMF_VERSION))
1634 def test_POST_NEWDIRURL_sdmf(self):
1635 d = self.POST2(self.public_url + "/foo/newdir?t=mkdir&format=sdmf", "")
1636 d.addCallback(lambda res:
1637 self.failUnlessNodeHasChild(self._foo_node, u"newdir"))
1638 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
1639 d.addCallback(lambda node:
1640 self.failUnlessEqual(node._node.get_version(), SDMF_VERSION))
1643 def test_POST_NEWDIRURL_bad_format(self):
1644 return self.shouldHTTPError("POST_NEWDIRURL_bad_format",
1645 400, "Bad Request", "Unknown format: foo",
1646 self.POST2, self.public_url + \
1647 "/foo/newdir?t=mkdir&format=foo", "")
1649 def test_POST_NEWDIRURL_emptyname(self):
1650 # an empty pathname component (i.e. a double-slash) is disallowed
1651 d = self.shouldFail2(error.Error, "POST_NEWDIRURL_emptyname",
1653 "The webapi does not allow empty pathname components, i.e. a double slash",
1654 self.POST, self.public_url + "//?t=mkdir")
1657 def _do_POST_NEWDIRURL_initial_children_test(self, version=None):
1658 (newkids, caps) = self._create_initial_children()
1659 query = "/foo/newdir?t=mkdir-with-children"
1660 if version == MDMF_VERSION:
1661 query += "&format=mdmf"
1662 elif version == SDMF_VERSION:
1663 query += "&format=sdmf"
1665 version = SDMF_VERSION # for later
1666 d = self.POST2(self.public_url + query,
1667 simplejson.dumps(newkids))
1669 n = self.s.create_node_from_uri(uri.strip())
1670 d2 = self.failUnlessNodeKeysAre(n, newkids.keys())
1671 self.failUnlessEqual(n._node.get_version(), version)
1672 d2.addCallback(lambda ign:
1673 self.failUnlessROChildURIIs(n, u"child-imm",
1675 d2.addCallback(lambda ign:
1676 self.failUnlessRWChildURIIs(n, u"child-mutable",
1678 d2.addCallback(lambda ign:
1679 self.failUnlessROChildURIIs(n, u"child-mutable-ro",
1681 d2.addCallback(lambda ign:
1682 self.failUnlessROChildURIIs(n, u"unknownchild-ro",
1683 caps['unknown_rocap']))
1684 d2.addCallback(lambda ign:
1685 self.failUnlessRWChildURIIs(n, u"unknownchild-rw",
1686 caps['unknown_rwcap']))
1687 d2.addCallback(lambda ign:
1688 self.failUnlessROChildURIIs(n, u"unknownchild-imm",
1689 caps['unknown_immcap']))
1690 d2.addCallback(lambda ign:
1691 self.failUnlessRWChildURIIs(n, u"dirchild",
1693 d2.addCallback(lambda ign:
1694 self.failUnlessROChildURIIs(n, u"dirchild-lit",
1696 d2.addCallback(lambda ign:
1697 self.failUnlessROChildURIIs(n, u"dirchild-empty",
1698 caps['emptydircap']))
1700 d.addCallback(_check)
1701 d.addCallback(lambda res:
1702 self.failUnlessNodeHasChild(self._foo_node, u"newdir"))
1703 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
1704 d.addCallback(self.failUnlessNodeKeysAre, newkids.keys())
1705 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
1706 d.addCallback(self.failUnlessROChildURIIs, u"child-imm", caps['filecap1'])
1709 def test_POST_NEWDIRURL_initial_children(self):
1710 return self._do_POST_NEWDIRURL_initial_children_test()
1712 def test_POST_NEWDIRURL_initial_children_mdmf(self):
1713 return self._do_POST_NEWDIRURL_initial_children_test(MDMF_VERSION)
1715 def test_POST_NEWDIRURL_initial_children_sdmf(self):
1716 return self._do_POST_NEWDIRURL_initial_children_test(SDMF_VERSION)
1718 def test_POST_NEWDIRURL_initial_children_bad_format(self):
1719 (newkids, caps) = self._create_initial_children()
1720 return self.shouldHTTPError("POST_NEWDIRURL_initial_children_bad_format",
1721 400, "Bad Request", "Unknown format: foo",
1722 self.POST2, self.public_url + \
1723 "/foo/newdir?t=mkdir-with-children&format=foo",
1724 simplejson.dumps(newkids))
1726 def test_POST_NEWDIRURL_immutable(self):
1727 (newkids, caps) = self._create_immutable_children()
1728 d = self.POST2(self.public_url + "/foo/newdir?t=mkdir-immutable",
1729 simplejson.dumps(newkids))
1731 n = self.s.create_node_from_uri(uri.strip())
1732 d2 = self.failUnlessNodeKeysAre(n, newkids.keys())
1733 d2.addCallback(lambda ign:
1734 self.failUnlessROChildURIIs(n, u"child-imm",
1736 d2.addCallback(lambda ign:
1737 self.failUnlessROChildURIIs(n, u"unknownchild-imm",
1738 caps['unknown_immcap']))
1739 d2.addCallback(lambda ign:
1740 self.failUnlessROChildURIIs(n, u"dirchild-imm",
1742 d2.addCallback(lambda ign:
1743 self.failUnlessROChildURIIs(n, u"dirchild-lit",
1745 d2.addCallback(lambda ign:
1746 self.failUnlessROChildURIIs(n, u"dirchild-empty",
1747 caps['emptydircap']))
1749 d.addCallback(_check)
1750 d.addCallback(lambda res:
1751 self.failUnlessNodeHasChild(self._foo_node, u"newdir"))
1752 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
1753 d.addCallback(self.failUnlessNodeKeysAre, newkids.keys())
1754 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
1755 d.addCallback(self.failUnlessROChildURIIs, u"child-imm", caps['filecap1'])
1756 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
1757 d.addCallback(self.failUnlessROChildURIIs, u"unknownchild-imm", caps['unknown_immcap'])
1758 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
1759 d.addCallback(self.failUnlessROChildURIIs, u"dirchild-imm", caps['immdircap'])
1760 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
1761 d.addCallback(self.failUnlessROChildURIIs, u"dirchild-lit", caps['litdircap'])
1762 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
1763 d.addCallback(self.failUnlessROChildURIIs, u"dirchild-empty", caps['emptydircap'])
1764 d.addErrback(self.explain_web_error)
1767 def test_POST_NEWDIRURL_immutable_bad(self):
1768 (newkids, caps) = self._create_initial_children()
1769 d = self.shouldFail2(error.Error, "test_POST_NEWDIRURL_immutable_bad",
1771 "needed to be immutable but was not",
1773 self.public_url + "/foo/newdir?t=mkdir-immutable",
1774 simplejson.dumps(newkids))
1777 def test_PUT_NEWDIRURL_exists(self):
1778 d = self.PUT(self.public_url + "/foo/sub?t=mkdir", "")
1779 d.addCallback(lambda res:
1780 self.failUnlessNodeHasChild(self._foo_node, u"sub"))
1781 d.addCallback(lambda res: self._foo_node.get(u"sub"))
1782 d.addCallback(self.failUnlessNodeKeysAre, [u"baz.txt"])
1785 def test_PUT_NEWDIRURL_blocked(self):
1786 d = self.shouldFail2(error.Error, "PUT_NEWDIRURL_blocked",
1787 "409 Conflict", "Unable to create directory 'bar.txt': a file was in the way",
1789 self.public_url + "/foo/bar.txt/sub?t=mkdir", "")
1790 d.addCallback(lambda res:
1791 self.failUnlessNodeHasChild(self._foo_node, u"sub"))
1792 d.addCallback(lambda res: self._foo_node.get(u"sub"))
1793 d.addCallback(self.failUnlessNodeKeysAre, [u"baz.txt"])
1796 def test_PUT_NEWDIRURL_mkdir_p(self):
1797 d = defer.succeed(None)
1798 d.addCallback(lambda res: self.POST(self.public_url + "/foo", t='mkdir', name='mkp'))
1799 d.addCallback(lambda res: self.failUnlessNodeHasChild(self._foo_node, u"mkp"))
1800 d.addCallback(lambda res: self._foo_node.get(u"mkp"))
1801 def mkdir_p(mkpnode):
1802 url = '/uri/%s?t=mkdir-p&path=/sub1/sub2' % urllib.quote(mkpnode.get_uri())
1804 def made_subsub(ssuri):
1805 d = self._foo_node.get_child_at_path(u"mkp/sub1/sub2")
1806 d.addCallback(lambda ssnode: self.failUnlessReallyEqual(ssnode.get_uri(), ssuri))
1808 d.addCallback(lambda uri2: self.failUnlessReallyEqual(uri2, ssuri))
1810 d.addCallback(made_subsub)
1812 d.addCallback(mkdir_p)
1815 def test_PUT_NEWDIRURL_mkdirs(self):
1816 d = self.PUT(self.public_url + "/foo/subdir/newdir?t=mkdir", "")
1817 d.addCallback(lambda res:
1818 self.failIfNodeHasChild(self._foo_node, u"newdir"))
1819 d.addCallback(lambda res:
1820 self.failUnlessNodeHasChild(self._foo_node, u"subdir"))
1821 d.addCallback(lambda res:
1822 self._foo_node.get_child_at_path(u"subdir/newdir"))
1823 d.addCallback(self.failUnlessNodeKeysAre, [])
1826 def test_PUT_NEWDIRURL_mkdirs_mdmf(self):
1827 d = self.PUT(self.public_url + "/foo/subdir/newdir?t=mkdir&format=mdmf", "")
1828 d.addCallback(lambda ignored:
1829 self.failUnlessNodeHasChild(self._foo_node, u"subdir"))
1830 d.addCallback(lambda ignored:
1831 self.failIfNodeHasChild(self._foo_node, u"newdir"))
1832 d.addCallback(lambda ignored:
1833 self._foo_node.get_child_at_path(u"subdir"))
1834 def _got_subdir(subdir):
1835 # XXX: What we want?
1836 #self.failUnlessEqual(subdir._node.get_version(), MDMF_VERSION)
1837 self.failUnlessNodeHasChild(subdir, u"newdir")
1838 return subdir.get_child_at_path(u"newdir")
1839 d.addCallback(_got_subdir)
1840 d.addCallback(lambda newdir:
1841 self.failUnlessEqual(newdir._node.get_version(), MDMF_VERSION))
1844 def test_PUT_NEWDIRURL_mkdirs_sdmf(self):
1845 d = self.PUT(self.public_url + "/foo/subdir/newdir?t=mkdir&format=sdmf", "")
1846 d.addCallback(lambda ignored:
1847 self.failUnlessNodeHasChild(self._foo_node, u"subdir"))
1848 d.addCallback(lambda ignored:
1849 self.failIfNodeHasChild(self._foo_node, u"newdir"))
1850 d.addCallback(lambda ignored:
1851 self._foo_node.get_child_at_path(u"subdir"))
1852 def _got_subdir(subdir):
1853 # XXX: What we want?
1854 #self.failUnlessEqual(subdir._node.get_version(), MDMF_VERSION)
1855 self.failUnlessNodeHasChild(subdir, u"newdir")
1856 return subdir.get_child_at_path(u"newdir")
1857 d.addCallback(_got_subdir)
1858 d.addCallback(lambda newdir:
1859 self.failUnlessEqual(newdir._node.get_version(), SDMF_VERSION))
1862 def test_PUT_NEWDIRURL_mkdirs_bad_format(self):
1863 return self.shouldHTTPError("PUT_NEWDIRURL_mkdirs_bad_format",
1864 400, "Bad Request", "Unknown format: foo",
1865 self.PUT, self.public_url + \
1866 "/foo/subdir/newdir?t=mkdir&format=foo",
1869 def test_DELETE_DIRURL(self):
1870 d = self.DELETE(self.public_url + "/foo")
1871 d.addCallback(lambda res:
1872 self.failIfNodeHasChild(self.public_root, u"foo"))
1875 def test_DELETE_DIRURL_missing(self):
1876 d = self.DELETE(self.public_url + "/foo/missing")
1877 d.addBoth(self.should404, "test_DELETE_DIRURL_missing")
1878 d.addCallback(lambda res:
1879 self.failUnlessNodeHasChild(self.public_root, u"foo"))
1882 def test_DELETE_DIRURL_missing2(self):
1883 d = self.DELETE(self.public_url + "/missing")
1884 d.addBoth(self.should404, "test_DELETE_DIRURL_missing2")
1887 def dump_root(self):
1889 w = webish.DirnodeWalkerMixin()
1890 def visitor(childpath, childnode, metadata):
1892 d = w.walk(self.public_root, visitor)
1895 def failUnlessNodeKeysAre(self, node, expected_keys):
1896 for k in expected_keys:
1897 assert isinstance(k, unicode)
1899 def _check(children):
1900 self.failUnlessReallyEqual(sorted(children.keys()), sorted(expected_keys))
1901 d.addCallback(_check)
1903 def failUnlessNodeHasChild(self, node, name):
1904 assert isinstance(name, unicode)
1906 def _check(children):
1907 self.failUnlessIn(name, children)
1908 d.addCallback(_check)
1910 def failIfNodeHasChild(self, node, name):
1911 assert isinstance(name, unicode)
1913 def _check(children):
1914 self.failIfIn(name, children)
1915 d.addCallback(_check)
1918 def failUnlessChildContentsAre(self, node, name, expected_contents):
1919 assert isinstance(name, unicode)
1920 d = node.get_child_at_path(name)
1921 d.addCallback(lambda node: download_to_data(node))
1922 def _check(contents):
1923 self.failUnlessReallyEqual(contents, expected_contents)
1924 d.addCallback(_check)
1927 def failUnlessMutableChildContentsAre(self, node, name, expected_contents):
1928 assert isinstance(name, unicode)
1929 d = node.get_child_at_path(name)
1930 d.addCallback(lambda node: node.download_best_version())
1931 def _check(contents):
1932 self.failUnlessReallyEqual(contents, expected_contents)
1933 d.addCallback(_check)
1936 def failUnlessRWChildURIIs(self, node, name, expected_uri):
1937 assert isinstance(name, unicode)
1938 d = node.get_child_at_path(name)
1940 self.failUnless(child.is_unknown() or not child.is_readonly())
1941 self.failUnlessReallyEqual(child.get_uri(), expected_uri.strip())
1942 self.failUnlessReallyEqual(child.get_write_uri(), expected_uri.strip())
1943 expected_ro_uri = self._make_readonly(expected_uri)
1945 self.failUnlessReallyEqual(child.get_readonly_uri(), expected_ro_uri.strip())
1946 d.addCallback(_check)
1949 def failUnlessROChildURIIs(self, node, name, expected_uri):
1950 assert isinstance(name, unicode)
1951 d = node.get_child_at_path(name)
1953 self.failUnless(child.is_unknown() or child.is_readonly())
1954 self.failUnlessReallyEqual(child.get_write_uri(), None)
1955 self.failUnlessReallyEqual(child.get_uri(), expected_uri.strip())
1956 self.failUnlessReallyEqual(child.get_readonly_uri(), expected_uri.strip())
1957 d.addCallback(_check)
1960 def failUnlessURIMatchesRWChild(self, got_uri, node, name):
1961 assert isinstance(name, unicode)
1962 d = node.get_child_at_path(name)
1964 self.failUnless(child.is_unknown() or not child.is_readonly())
1965 self.failUnlessReallyEqual(child.get_uri(), got_uri.strip())
1966 self.failUnlessReallyEqual(child.get_write_uri(), got_uri.strip())
1967 expected_ro_uri = self._make_readonly(got_uri)
1969 self.failUnlessReallyEqual(child.get_readonly_uri(), expected_ro_uri.strip())
1970 d.addCallback(_check)
1973 def failUnlessURIMatchesROChild(self, got_uri, node, name):
1974 assert isinstance(name, unicode)
1975 d = node.get_child_at_path(name)
1977 self.failUnless(child.is_unknown() or child.is_readonly())
1978 self.failUnlessReallyEqual(child.get_write_uri(), None)
1979 self.failUnlessReallyEqual(got_uri.strip(), child.get_uri())
1980 self.failUnlessReallyEqual(got_uri.strip(), child.get_readonly_uri())
1981 d.addCallback(_check)
1984 def failUnlessCHKURIHasContents(self, got_uri, contents):
1985 self.failUnless(FakeCHKFileNode.all_contents[got_uri] == contents)
1987 def test_POST_upload(self):
1988 d = self.POST(self.public_url + "/foo", t="upload",
1989 file=("new.txt", self.NEWFILE_CONTENTS))
1991 d.addCallback(self.failUnlessURIMatchesROChild, fn, u"new.txt")
1992 d.addCallback(lambda res:
1993 self.failUnlessChildContentsAre(fn, u"new.txt",
1994 self.NEWFILE_CONTENTS))
1997 def test_POST_upload_unicode(self):
1998 filename = u"n\u00e9wer.txt" # n e-acute w e r . t x t
1999 d = self.POST(self.public_url + "/foo", t="upload",
2000 file=(filename, self.NEWFILE_CONTENTS))
2002 d.addCallback(self.failUnlessURIMatchesROChild, fn, filename)
2003 d.addCallback(lambda res:
2004 self.failUnlessChildContentsAre(fn, filename,
2005 self.NEWFILE_CONTENTS))
2006 target_url = self.public_url + "/foo/" + filename.encode("utf-8")
2007 d.addCallback(lambda res: self.GET(target_url))
2008 d.addCallback(lambda contents: self.failUnlessReallyEqual(contents,
2009 self.NEWFILE_CONTENTS,
2013 def test_POST_upload_unicode_named(self):
2014 filename = u"n\u00e9wer.txt" # n e-acute w e r . t x t
2015 d = self.POST(self.public_url + "/foo", t="upload",
2017 file=("overridden", self.NEWFILE_CONTENTS))
2019 d.addCallback(self.failUnlessURIMatchesROChild, fn, filename)
2020 d.addCallback(lambda res:
2021 self.failUnlessChildContentsAre(fn, filename,
2022 self.NEWFILE_CONTENTS))
2023 target_url = self.public_url + "/foo/" + filename.encode("utf-8")
2024 d.addCallback(lambda res: self.GET(target_url))
2025 d.addCallback(lambda contents: self.failUnlessReallyEqual(contents,
2026 self.NEWFILE_CONTENTS,
2030 def test_POST_upload_no_link(self):
2031 d = self.POST("/uri", t="upload",
2032 file=("new.txt", self.NEWFILE_CONTENTS))
2033 def _check_upload_results(page):
2034 # this should be a page which describes the results of the upload
2035 # that just finished.
2036 self.failUnlessIn("Upload Results:", page)
2037 self.failUnlessIn("URI:", page)
2038 uri_re = re.compile("URI: <tt><span>(.*)</span>")
2039 mo = uri_re.search(page)
2040 self.failUnless(mo, page)
2041 new_uri = mo.group(1)
2043 d.addCallback(_check_upload_results)
2044 d.addCallback(self.failUnlessCHKURIHasContents, self.NEWFILE_CONTENTS)
2047 def test_POST_upload_no_link_whendone(self):
2048 d = self.POST("/uri", t="upload", when_done="/",
2049 file=("new.txt", self.NEWFILE_CONTENTS))
2050 d.addBoth(self.shouldRedirect, "/")
2053 def shouldRedirect2(self, which, checker, callable, *args, **kwargs):
2054 d = defer.maybeDeferred(callable, *args, **kwargs)
2056 if isinstance(res, failure.Failure):
2057 res.trap(error.PageRedirect)
2058 statuscode = res.value.status
2059 target = res.value.location
2060 return checker(statuscode, target)
2061 self.fail("%s: callable was supposed to redirect, not return '%s'"
2066 def test_POST_upload_no_link_whendone_results(self):
2067 def check(statuscode, target):
2068 self.failUnlessReallyEqual(statuscode, str(http.FOUND))
2069 self.failUnless(target.startswith(self.webish_url), target)
2070 return client.getPage(target, method="GET")
2071 d = self.shouldRedirect2("test_POST_upload_no_link_whendone_results",
2073 self.POST, "/uri", t="upload",
2074 when_done="/uri/%(uri)s",
2075 file=("new.txt", self.NEWFILE_CONTENTS))
2076 d.addCallback(lambda res:
2077 self.failUnlessReallyEqual(res, self.NEWFILE_CONTENTS))
2080 def test_POST_upload_no_link_mutable(self):
2081 d = self.POST("/uri", t="upload", mutable="true",
2082 file=("new.txt", self.NEWFILE_CONTENTS))
2083 def _check(filecap):
2084 filecap = filecap.strip()
2085 self.failUnless(filecap.startswith("URI:SSK:"), filecap)
2086 self.filecap = filecap
2087 u = uri.WriteableSSKFileURI.init_from_string(filecap)
2088 self.failUnlessIn(u.get_storage_index(), FakeMutableFileNode.all_contents)
2089 n = self.s.create_node_from_uri(filecap)
2090 return n.download_best_version()
2091 d.addCallback(_check)
2093 self.failUnlessReallyEqual(data, self.NEWFILE_CONTENTS)
2094 return self.GET("/uri/%s" % urllib.quote(self.filecap))
2095 d.addCallback(_check2)
2097 self.failUnlessReallyEqual(data, self.NEWFILE_CONTENTS)
2098 return self.GET("/file/%s" % urllib.quote(self.filecap))
2099 d.addCallback(_check3)
2101 self.failUnlessReallyEqual(data, self.NEWFILE_CONTENTS)
2102 d.addCallback(_check4)
2105 def test_POST_upload_no_link_mutable_toobig(self):
2106 # The SDMF size limit is no longer in place, so we should be
2107 # able to upload mutable files that are as large as we want them
2109 d = self.POST("/uri", t="upload", mutable="true",
2110 file=("new.txt", "b" * (self.s.MUTABLE_SIZELIMIT + 1)))
2114 def test_POST_upload_format_unlinked(self):
2115 def _check_upload_unlinked(ign, format, uri_prefix):
2116 filename = format + ".txt"
2117 d = self.POST("/uri?t=upload&format=" + format,
2118 file=(filename, self.NEWFILE_CONTENTS * 300000))
2119 def _got_results(results):
2120 if format.upper() in ("SDMF", "MDMF"):
2121 # webapi.rst says this returns a filecap
2124 # for immutable, it returns an "upload results page", and
2125 # the filecap is buried inside
2126 line = [l for l in results.split("\n") if "URI: " in l][0]
2127 mo = re.search(r'<span>([^<]+)</span>', line)
2128 filecap = mo.group(1)
2129 self.failUnless(filecap.startswith(uri_prefix),
2130 (uri_prefix, filecap))
2131 return self.GET("/uri/%s?t=json" % filecap)
2132 d.addCallback(_got_results)
2133 def _got_json(json):
2134 data = simplejson.loads(json)
2136 self.failUnlessIn("format", data)
2137 self.failUnlessEqual(data["format"], format.upper())
2138 d.addCallback(_got_json)
2140 d = defer.succeed(None)
2141 d.addCallback(_check_upload_unlinked, "chk", "URI:CHK")
2142 d.addCallback(_check_upload_unlinked, "CHK", "URI:CHK")
2143 d.addCallback(_check_upload_unlinked, "sdmf", "URI:SSK")
2144 d.addCallback(_check_upload_unlinked, "mdmf", "URI:MDMF")
2147 def test_POST_upload_bad_format_unlinked(self):
2148 return self.shouldHTTPError("POST_upload_bad_format_unlinked",
2149 400, "Bad Request", "Unknown format: foo",
2151 "/uri?t=upload&format=foo",
2152 file=("foo.txt", self.NEWFILE_CONTENTS * 300000))
2154 def test_POST_upload_format(self):
2155 def _check_upload(ign, format, uri_prefix, fn=None):
2156 filename = format + ".txt"
2157 d = self.POST(self.public_url +
2158 "/foo?t=upload&format=" + format,
2159 file=(filename, self.NEWFILE_CONTENTS * 300000))
2160 def _got_filecap(filecap):
2162 filenameu = unicode(filename)
2163 self.failUnlessURIMatchesRWChild(filecap, fn, filenameu)
2164 self.failUnless(filecap.startswith(uri_prefix))
2165 return self.GET(self.public_url + "/foo/%s?t=json" % filename)
2166 d.addCallback(_got_filecap)
2167 def _got_json(json):
2168 data = simplejson.loads(json)
2170 self.failUnlessIn("format", data)
2171 self.failUnlessEqual(data["format"], format.upper())
2172 d.addCallback(_got_json)
2175 d = defer.succeed(None)
2176 d.addCallback(_check_upload, "chk", "URI:CHK")
2177 d.addCallback(_check_upload, "sdmf", "URI:SSK", self._foo_node)
2178 d.addCallback(_check_upload, "mdmf", "URI:MDMF")
2179 d.addCallback(_check_upload, "MDMF", "URI:MDMF")
2182 def test_POST_upload_bad_format(self):
2183 return self.shouldHTTPError("POST_upload_bad_format",
2184 400, "Bad Request", "Unknown format: foo",
2185 self.POST, self.public_url + \
2186 "/foo?t=upload&format=foo",
2187 file=("foo.txt", self.NEWFILE_CONTENTS * 300000))
2189 def test_POST_upload_mutable(self):
2190 # this creates a mutable file
2191 d = self.POST(self.public_url + "/foo", t="upload", mutable="true",
2192 file=("new.txt", self.NEWFILE_CONTENTS))
2194 d.addCallback(self.failUnlessURIMatchesRWChild, fn, u"new.txt")
2195 d.addCallback(lambda res:
2196 self.failUnlessMutableChildContentsAre(fn, u"new.txt",
2197 self.NEWFILE_CONTENTS))
2198 d.addCallback(lambda res: self._foo_node.get(u"new.txt"))
2200 self.failUnless(IMutableFileNode.providedBy(newnode))
2201 self.failUnless(newnode.is_mutable())
2202 self.failIf(newnode.is_readonly())
2203 self._mutable_node = newnode
2204 self._mutable_uri = newnode.get_uri()
2207 # now upload it again and make sure that the URI doesn't change
2208 NEWER_CONTENTS = self.NEWFILE_CONTENTS + "newer\n"
2209 d.addCallback(lambda res:
2210 self.POST(self.public_url + "/foo", t="upload",
2212 file=("new.txt", NEWER_CONTENTS)))
2213 d.addCallback(self.failUnlessURIMatchesRWChild, fn, u"new.txt")
2214 d.addCallback(lambda res:
2215 self.failUnlessMutableChildContentsAre(fn, u"new.txt",
2217 d.addCallback(lambda res: self._foo_node.get(u"new.txt"))
2219 self.failUnless(IMutableFileNode.providedBy(newnode))
2220 self.failUnless(newnode.is_mutable())
2221 self.failIf(newnode.is_readonly())
2222 self.failUnlessReallyEqual(self._mutable_uri, newnode.get_uri())
2223 d.addCallback(_got2)
2225 # upload a second time, using PUT instead of POST
2226 NEW2_CONTENTS = NEWER_CONTENTS + "overwrite with PUT\n"
2227 d.addCallback(lambda res:
2228 self.PUT(self.public_url + "/foo/new.txt", NEW2_CONTENTS))
2229 d.addCallback(self.failUnlessURIMatchesRWChild, fn, u"new.txt")
2230 d.addCallback(lambda res:
2231 self.failUnlessMutableChildContentsAre(fn, u"new.txt",
2234 # finally list the directory, since mutable files are displayed
2235 # slightly differently
2237 d.addCallback(lambda res:
2238 self.GET(self.public_url + "/foo/",
2239 followRedirect=True))
2240 def _check_page(res):
2241 # TODO: assert more about the contents
2242 self.failUnlessIn("SSK", res)
2244 d.addCallback(_check_page)
2246 d.addCallback(lambda res: self._foo_node.get(u"new.txt"))
2248 self.failUnless(IMutableFileNode.providedBy(newnode))
2249 self.failUnless(newnode.is_mutable())
2250 self.failIf(newnode.is_readonly())
2251 self.failUnlessReallyEqual(self._mutable_uri, newnode.get_uri())
2252 d.addCallback(_got3)
2254 # look at the JSON form of the enclosing directory
2255 d.addCallback(lambda res:
2256 self.GET(self.public_url + "/foo/?t=json",
2257 followRedirect=True))
2258 def _check_page_json(res):
2259 parsed = simplejson.loads(res)
2260 self.failUnlessEqual(parsed[0], "dirnode")
2261 children = dict( [(unicode(name),value)
2263 in parsed[1]["children"].iteritems()] )
2264 self.failUnlessIn(u"new.txt", children)
2265 new_json = children[u"new.txt"]
2266 self.failUnlessEqual(new_json[0], "filenode")
2267 self.failUnless(new_json[1]["mutable"])
2268 self.failUnlessReallyEqual(to_str(new_json[1]["rw_uri"]), self._mutable_uri)
2269 ro_uri = self._mutable_node.get_readonly().to_string()
2270 self.failUnlessReallyEqual(to_str(new_json[1]["ro_uri"]), ro_uri)
2271 d.addCallback(_check_page_json)
2273 # and the JSON form of the file
2274 d.addCallback(lambda res:
2275 self.GET(self.public_url + "/foo/new.txt?t=json"))
2276 def _check_file_json(res):
2277 parsed = simplejson.loads(res)
2278 self.failUnlessEqual(parsed[0], "filenode")
2279 self.failUnless(parsed[1]["mutable"])
2280 self.failUnlessReallyEqual(to_str(parsed[1]["rw_uri"]), self._mutable_uri)
2281 ro_uri = self._mutable_node.get_readonly().to_string()
2282 self.failUnlessReallyEqual(to_str(parsed[1]["ro_uri"]), ro_uri)
2283 d.addCallback(_check_file_json)
2285 # and look at t=uri and t=readonly-uri
2286 d.addCallback(lambda res:
2287 self.GET(self.public_url + "/foo/new.txt?t=uri"))
2288 d.addCallback(lambda res: self.failUnlessReallyEqual(res, self._mutable_uri))
2289 d.addCallback(lambda res:
2290 self.GET(self.public_url + "/foo/new.txt?t=readonly-uri"))
2291 def _check_ro_uri(res):
2292 ro_uri = self._mutable_node.get_readonly().to_string()
2293 self.failUnlessReallyEqual(res, ro_uri)
2294 d.addCallback(_check_ro_uri)
2296 # make sure we can get to it from /uri/URI
2297 d.addCallback(lambda res:
2298 self.GET("/uri/%s" % urllib.quote(self._mutable_uri)))
2299 d.addCallback(lambda res:
2300 self.failUnlessReallyEqual(res, NEW2_CONTENTS))
2302 # and that HEAD computes the size correctly
2303 d.addCallback(lambda res:
2304 self.HEAD(self.public_url + "/foo/new.txt",
2305 return_response=True))
2306 def _got_headers((res, status, headers)):
2307 self.failUnlessReallyEqual(res, "")
2308 self.failUnlessReallyEqual(headers["content-length"][0],
2309 str(len(NEW2_CONTENTS)))
2310 self.failUnlessReallyEqual(headers["content-type"], ["text/plain"])
2311 d.addCallback(_got_headers)
2313 # make sure that outdated size limits aren't enforced anymore.
2314 d.addCallback(lambda ignored:
2315 self.POST(self.public_url + "/foo", t="upload",
2318 "b" * (self.s.MUTABLE_SIZELIMIT+1))))
2319 d.addErrback(self.dump_error)
2322 def test_POST_upload_mutable_toobig(self):
2323 # SDMF had a size limti that was removed a while ago. MDMF has
2324 # never had a size limit. Test to make sure that we do not
2325 # encounter errors when trying to upload large mutable files,
2326 # since there should be no coded prohibitions regarding large
2328 d = self.POST(self.public_url + "/foo",
2329 t="upload", mutable="true",
2330 file=("new.txt", "b" * (self.s.MUTABLE_SIZELIMIT + 1)))
2333 def dump_error(self, f):
2334 # if the web server returns an error code (like 400 Bad Request),
2335 # web.client.getPage puts the HTTP response body into the .response
2336 # attribute of the exception object that it gives back. It does not
2337 # appear in the Failure's repr(), so the ERROR that trial displays
2338 # will be rather terse and unhelpful. addErrback this method to the
2339 # end of your chain to get more information out of these errors.
2340 if f.check(error.Error):
2341 print "web.error.Error:"
2343 print f.value.response
2346 def test_POST_upload_replace(self):
2347 d = self.POST(self.public_url + "/foo", t="upload",
2348 file=("bar.txt", self.NEWFILE_CONTENTS))
2350 d.addCallback(self.failUnlessURIMatchesROChild, fn, u"bar.txt")
2351 d.addCallback(lambda res:
2352 self.failUnlessChildContentsAre(fn, u"bar.txt",
2353 self.NEWFILE_CONTENTS))
2356 def test_POST_upload_no_replace_ok(self):
2357 d = self.POST(self.public_url + "/foo?replace=false", t="upload",
2358 file=("new.txt", self.NEWFILE_CONTENTS))
2359 d.addCallback(lambda res: self.GET(self.public_url + "/foo/new.txt"))
2360 d.addCallback(lambda res: self.failUnlessReallyEqual(res,
2361 self.NEWFILE_CONTENTS))
2364 def test_POST_upload_no_replace_queryarg(self):
2365 d = self.POST(self.public_url + "/foo?replace=false", t="upload",
2366 file=("bar.txt", self.NEWFILE_CONTENTS))
2367 d.addBoth(self.shouldFail, error.Error,
2368 "POST_upload_no_replace_queryarg",
2370 "There was already a child by that name, and you asked me "
2371 "to not replace it")
2372 d.addCallback(lambda res: self.GET(self.public_url + "/foo/bar.txt"))
2373 d.addCallback(self.failUnlessIsBarDotTxt)
2376 def test_POST_upload_no_replace_field(self):
2377 d = self.POST(self.public_url + "/foo", t="upload", replace="false",
2378 file=("bar.txt", self.NEWFILE_CONTENTS))
2379 d.addBoth(self.shouldFail, error.Error, "POST_upload_no_replace_field",
2381 "There was already a child by that name, and you asked me "
2382 "to not replace it")
2383 d.addCallback(lambda res: self.GET(self.public_url + "/foo/bar.txt"))
2384 d.addCallback(self.failUnlessIsBarDotTxt)
2387 def test_POST_upload_whendone(self):
2388 d = self.POST(self.public_url + "/foo", t="upload", when_done="/THERE",
2389 file=("new.txt", self.NEWFILE_CONTENTS))
2390 d.addBoth(self.shouldRedirect, "/THERE")
2392 d.addCallback(lambda res:
2393 self.failUnlessChildContentsAre(fn, u"new.txt",
2394 self.NEWFILE_CONTENTS))
2397 def test_POST_upload_named(self):
2399 d = self.POST(self.public_url + "/foo", t="upload",
2400 name="new.txt", file=self.NEWFILE_CONTENTS)
2401 d.addCallback(self.failUnlessURIMatchesROChild, fn, u"new.txt")
2402 d.addCallback(lambda res:
2403 self.failUnlessChildContentsAre(fn, u"new.txt",
2404 self.NEWFILE_CONTENTS))
2407 def test_POST_upload_named_badfilename(self):
2408 d = self.POST(self.public_url + "/foo", t="upload",
2409 name="slashes/are/bad.txt", file=self.NEWFILE_CONTENTS)
2410 d.addBoth(self.shouldFail, error.Error,
2411 "test_POST_upload_named_badfilename",
2413 "name= may not contain a slash",
2415 # make sure that nothing was added
2416 d.addCallback(lambda res:
2417 self.failUnlessNodeKeysAre(self._foo_node,
2418 [u"bar.txt", u"baz.txt", u"blockingfile",
2419 u"empty", u"n\u00fc.txt", u"quux.txt",
2423 def test_POST_FILEURL_check(self):
2424 bar_url = self.public_url + "/foo/bar.txt"
2425 d = self.POST(bar_url, t="check")
2427 self.failUnlessIn("Healthy :", res)
2428 d.addCallback(_check)
2429 redir_url = "http://allmydata.org/TARGET"
2430 def _check2(statuscode, target):
2431 self.failUnlessReallyEqual(statuscode, str(http.FOUND))
2432 self.failUnlessReallyEqual(target, redir_url)
2433 d.addCallback(lambda res:
2434 self.shouldRedirect2("test_POST_FILEURL_check",
2438 when_done=redir_url))
2439 d.addCallback(lambda res:
2440 self.POST(bar_url, t="check", return_to=redir_url))
2442 self.failUnlessIn("Healthy :", res)
2443 self.failUnlessIn("Return to file", res)
2444 self.failUnlessIn(redir_url, res)
2445 d.addCallback(_check3)
2447 d.addCallback(lambda res:
2448 self.POST(bar_url, t="check", output="JSON"))
2449 def _check_json(res):
2450 data = simplejson.loads(res)
2451 self.failUnlessIn("storage-index", data)
2452 self.failUnless(data["results"]["healthy"])
2453 d.addCallback(_check_json)
2457 def test_POST_FILEURL_check_and_repair(self):
2458 bar_url = self.public_url + "/foo/bar.txt"
2459 d = self.POST(bar_url, t="check", repair="true")
2461 self.failUnlessIn("Healthy :", res)
2462 d.addCallback(_check)
2463 redir_url = "http://allmydata.org/TARGET"
2464 def _check2(statuscode, target):
2465 self.failUnlessReallyEqual(statuscode, str(http.FOUND))
2466 self.failUnlessReallyEqual(target, redir_url)
2467 d.addCallback(lambda res:
2468 self.shouldRedirect2("test_POST_FILEURL_check_and_repair",
2471 t="check", repair="true",
2472 when_done=redir_url))
2473 d.addCallback(lambda res:
2474 self.POST(bar_url, t="check", return_to=redir_url))
2476 self.failUnlessIn("Healthy :", res)
2477 self.failUnlessIn("Return to file", res)
2478 self.failUnlessIn(redir_url, res)
2479 d.addCallback(_check3)
2482 def test_POST_DIRURL_check(self):
2483 foo_url = self.public_url + "/foo/"
2484 d = self.POST(foo_url, t="check")
2486 self.failUnlessIn("Healthy :", res)
2487 d.addCallback(_check)
2488 redir_url = "http://allmydata.org/TARGET"
2489 def _check2(statuscode, target):
2490 self.failUnlessReallyEqual(statuscode, str(http.FOUND))
2491 self.failUnlessReallyEqual(target, redir_url)
2492 d.addCallback(lambda res:
2493 self.shouldRedirect2("test_POST_DIRURL_check",
2497 when_done=redir_url))
2498 d.addCallback(lambda res:
2499 self.POST(foo_url, t="check", return_to=redir_url))
2501 self.failUnlessIn("Healthy :", res)
2502 self.failUnlessIn("Return to file/directory", res)
2503 self.failUnlessIn(redir_url, res)
2504 d.addCallback(_check3)
2506 d.addCallback(lambda res:
2507 self.POST(foo_url, t="check", output="JSON"))
2508 def _check_json(res):
2509 data = simplejson.loads(res)
2510 self.failUnlessIn("storage-index", data)
2511 self.failUnless(data["results"]["healthy"])
2512 d.addCallback(_check_json)
2516 def test_POST_DIRURL_check_and_repair(self):
2517 foo_url = self.public_url + "/foo/"
2518 d = self.POST(foo_url, t="check", repair="true")
2520 self.failUnlessIn("Healthy :", res)
2521 d.addCallback(_check)
2522 redir_url = "http://allmydata.org/TARGET"
2523 def _check2(statuscode, target):
2524 self.failUnlessReallyEqual(statuscode, str(http.FOUND))
2525 self.failUnlessReallyEqual(target, redir_url)
2526 d.addCallback(lambda res:
2527 self.shouldRedirect2("test_POST_DIRURL_check_and_repair",
2530 t="check", repair="true",
2531 when_done=redir_url))
2532 d.addCallback(lambda res:
2533 self.POST(foo_url, t="check", return_to=redir_url))
2535 self.failUnlessIn("Healthy :", res)
2536 self.failUnlessIn("Return to file/directory", res)
2537 self.failUnlessIn(redir_url, res)
2538 d.addCallback(_check3)
2541 def test_POST_FILEURL_mdmf_check(self):
2542 quux_url = "/uri/%s" % urllib.quote(self._quux_txt_uri)
2543 d = self.POST(quux_url, t="check")
2545 self.failUnlessIn("Healthy", res)
2546 d.addCallback(_check)
2547 quux_extension_url = "/uri/%s" % urllib.quote("%s:3:131073" % self._quux_txt_uri)
2548 d.addCallback(lambda ignored:
2549 self.POST(quux_extension_url, t="check"))
2550 d.addCallback(_check)
2553 def test_POST_FILEURL_mdmf_check_and_repair(self):
2554 quux_url = "/uri/%s" % urllib.quote(self._quux_txt_uri)
2555 d = self.POST(quux_url, t="check", repair="true")
2557 self.failUnlessIn("Healthy", res)
2558 d.addCallback(_check)
2559 quux_extension_url = "/uri/%s" % urllib.quote("%s:3:131073" % self._quux_txt_uri)
2560 d.addCallback(lambda ignored:
2561 self.POST(quux_extension_url, t="check", repair="true"))
2562 d.addCallback(_check)
2565 def wait_for_operation(self, ignored, ophandle):
2566 url = "/operations/" + ophandle
2567 url += "?t=status&output=JSON"
2570 data = simplejson.loads(res)
2571 if not data["finished"]:
2572 d = self.stall(delay=1.0)
2573 d.addCallback(self.wait_for_operation, ophandle)
2579 def get_operation_results(self, ignored, ophandle, output=None):
2580 url = "/operations/" + ophandle
2583 url += "&output=" + output
2586 if output and output.lower() == "json":
2587 return simplejson.loads(res)
2592 def test_POST_DIRURL_deepcheck_no_ophandle(self):
2593 d = self.shouldFail2(error.Error,
2594 "test_POST_DIRURL_deepcheck_no_ophandle",
2596 "slow operation requires ophandle=",
2597 self.POST, self.public_url, t="start-deep-check")
2600 def test_POST_DIRURL_deepcheck(self):
2601 def _check_redirect(statuscode, target):
2602 self.failUnlessReallyEqual(statuscode, str(http.FOUND))
2603 self.failUnless(target.endswith("/operations/123"))
2604 d = self.shouldRedirect2("test_POST_DIRURL_deepcheck", _check_redirect,
2605 self.POST, self.public_url,
2606 t="start-deep-check", ophandle="123")
2607 d.addCallback(self.wait_for_operation, "123")
2608 def _check_json(data):
2609 self.failUnlessReallyEqual(data["finished"], True)
2610 self.failUnlessReallyEqual(data["count-objects-checked"], 10)
2611 self.failUnlessReallyEqual(data["count-objects-healthy"], 10)
2612 d.addCallback(_check_json)
2613 d.addCallback(self.get_operation_results, "123", "html")
2614 def _check_html(res):
2615 self.failUnlessIn("Objects Checked: <span>10</span>", res)
2616 self.failUnlessIn("Objects Healthy: <span>10</span>", res)
2617 d.addCallback(_check_html)
2619 d.addCallback(lambda res:
2620 self.GET("/operations/123/"))
2621 d.addCallback(_check_html) # should be the same as without the slash
2623 d.addCallback(lambda res:
2624 self.shouldFail2(error.Error, "one", "404 Not Found",
2625 "No detailed results for SI bogus",
2626 self.GET, "/operations/123/bogus"))
2628 foo_si = self._foo_node.get_storage_index()
2629 foo_si_s = base32.b2a(foo_si)
2630 d.addCallback(lambda res:
2631 self.GET("/operations/123/%s?output=JSON" % foo_si_s))
2632 def _check_foo_json(res):
2633 data = simplejson.loads(res)
2634 self.failUnlessEqual(data["storage-index"], foo_si_s)
2635 self.failUnless(data["results"]["healthy"])
2636 d.addCallback(_check_foo_json)
2639 def test_POST_DIRURL_deepcheck_and_repair(self):
2640 d = self.POST(self.public_url, t="start-deep-check", repair="true",
2641 ophandle="124", output="json", followRedirect=True)
2642 d.addCallback(self.wait_for_operation, "124")
2643 def _check_json(data):
2644 self.failUnlessReallyEqual(data["finished"], True)
2645 self.failUnlessReallyEqual(data["count-objects-checked"], 10)
2646 self.failUnlessReallyEqual(data["count-objects-healthy-pre-repair"], 10)
2647 self.failUnlessReallyEqual(data["count-objects-unhealthy-pre-repair"], 0)
2648 self.failUnlessReallyEqual(data["count-corrupt-shares-pre-repair"], 0)
2649 self.failUnlessReallyEqual(data["count-repairs-attempted"], 0)
2650 self.failUnlessReallyEqual(data["count-repairs-successful"], 0)
2651 self.failUnlessReallyEqual(data["count-repairs-unsuccessful"], 0)
2652 self.failUnlessReallyEqual(data["count-objects-healthy-post-repair"], 10)
2653 self.failUnlessReallyEqual(data["count-objects-unhealthy-post-repair"], 0)
2654 self.failUnlessReallyEqual(data["count-corrupt-shares-post-repair"], 0)
2655 d.addCallback(_check_json)
2656 d.addCallback(self.get_operation_results, "124", "html")
2657 def _check_html(res):
2658 self.failUnlessIn("Objects Checked: <span>10</span>", res)
2660 self.failUnlessIn("Objects Healthy (before repair): <span>10</span>", res)
2661 self.failUnlessIn("Objects Unhealthy (before repair): <span>0</span>", res)
2662 self.failUnlessIn("Corrupt Shares (before repair): <span>0</span>", res)
2664 self.failUnlessIn("Repairs Attempted: <span>0</span>", res)
2665 self.failUnlessIn("Repairs Successful: <span>0</span>", res)
2666 self.failUnlessIn("Repairs Unsuccessful: <span>0</span>", res)
2668 self.failUnlessIn("Objects Healthy (after repair): <span>10</span>", res)
2669 self.failUnlessIn("Objects Unhealthy (after repair): <span>0</span>", res)
2670 self.failUnlessIn("Corrupt Shares (after repair): <span>0</span>", res)
2671 d.addCallback(_check_html)
2674 def test_POST_FILEURL_bad_t(self):
2675 d = self.shouldFail2(error.Error, "POST_bad_t", "400 Bad Request",
2676 "POST to file: bad t=bogus",
2677 self.POST, self.public_url + "/foo/bar.txt",
2681 def test_POST_mkdir(self): # return value?
2682 d = self.POST(self.public_url + "/foo", t="mkdir", name="newdir")
2683 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
2684 d.addCallback(self.failUnlessNodeKeysAre, [])
2687 def test_POST_mkdir_mdmf(self):
2688 d = self.POST(self.public_url + "/foo?t=mkdir&name=newdir&format=mdmf")
2689 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
2690 d.addCallback(lambda node:
2691 self.failUnlessEqual(node._node.get_version(), MDMF_VERSION))
2694 def test_POST_mkdir_sdmf(self):
2695 d = self.POST(self.public_url + "/foo?t=mkdir&name=newdir&format=sdmf")
2696 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
2697 d.addCallback(lambda node:
2698 self.failUnlessEqual(node._node.get_version(), SDMF_VERSION))
2701 def test_POST_mkdir_bad_format(self):
2702 return self.shouldHTTPError("POST_mkdir_bad_format",
2703 400, "Bad Request", "Unknown format: foo",
2704 self.POST, self.public_url +
2705 "/foo?t=mkdir&name=newdir&format=foo")
2707 def test_POST_mkdir_initial_children(self):
2708 (newkids, caps) = self._create_initial_children()
2709 d = self.POST2(self.public_url +
2710 "/foo?t=mkdir-with-children&name=newdir",
2711 simplejson.dumps(newkids))
2712 d.addCallback(lambda res:
2713 self.failUnlessNodeHasChild(self._foo_node, u"newdir"))
2714 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
2715 d.addCallback(self.failUnlessNodeKeysAre, newkids.keys())
2716 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
2717 d.addCallback(self.failUnlessROChildURIIs, u"child-imm", caps['filecap1'])
2720 def test_POST_mkdir_initial_children_mdmf(self):
2721 (newkids, caps) = self._create_initial_children()
2722 d = self.POST2(self.public_url +
2723 "/foo?t=mkdir-with-children&name=newdir&format=mdmf",
2724 simplejson.dumps(newkids))
2725 d.addCallback(lambda res:
2726 self.failUnlessNodeHasChild(self._foo_node, u"newdir"))
2727 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
2728 d.addCallback(lambda node:
2729 self.failUnlessEqual(node._node.get_version(), MDMF_VERSION))
2730 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
2731 d.addCallback(self.failUnlessROChildURIIs, u"child-imm",
2736 def test_POST_mkdir_initial_children_sdmf(self):
2737 (newkids, caps) = self._create_initial_children()
2738 d = self.POST2(self.public_url +
2739 "/foo?t=mkdir-with-children&name=newdir&format=sdmf",
2740 simplejson.dumps(newkids))
2741 d.addCallback(lambda res:
2742 self.failUnlessNodeHasChild(self._foo_node, u"newdir"))
2743 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
2744 d.addCallback(lambda node:
2745 self.failUnlessEqual(node._node.get_version(), SDMF_VERSION))
2746 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
2747 d.addCallback(self.failUnlessROChildURIIs, u"child-imm",
2751 def test_POST_mkdir_initial_children_bad_format(self):
2752 (newkids, caps) = self._create_initial_children()
2753 return self.shouldHTTPError("POST_mkdir_initial_children_bad_format",
2754 400, "Bad Request", "Unknown format: foo",
2755 self.POST, self.public_url + \
2756 "/foo?t=mkdir-with-children&name=newdir&format=foo",
2757 simplejson.dumps(newkids))
2759 def test_POST_mkdir_immutable(self):
2760 (newkids, caps) = self._create_immutable_children()
2761 d = self.POST2(self.public_url +
2762 "/foo?t=mkdir-immutable&name=newdir",
2763 simplejson.dumps(newkids))
2764 d.addCallback(lambda res:
2765 self.failUnlessNodeHasChild(self._foo_node, u"newdir"))
2766 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
2767 d.addCallback(self.failUnlessNodeKeysAre, newkids.keys())
2768 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
2769 d.addCallback(self.failUnlessROChildURIIs, u"child-imm", caps['filecap1'])
2770 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
2771 d.addCallback(self.failUnlessROChildURIIs, u"unknownchild-imm", caps['unknown_immcap'])
2772 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
2773 d.addCallback(self.failUnlessROChildURIIs, u"dirchild-imm", caps['immdircap'])
2774 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
2775 d.addCallback(self.failUnlessROChildURIIs, u"dirchild-lit", caps['litdircap'])
2776 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
2777 d.addCallback(self.failUnlessROChildURIIs, u"dirchild-empty", caps['emptydircap'])
2780 def test_POST_mkdir_immutable_bad(self):
2781 (newkids, caps) = self._create_initial_children()
2782 d = self.shouldFail2(error.Error, "POST_mkdir_immutable_bad",
2784 "needed to be immutable but was not",
2787 "/foo?t=mkdir-immutable&name=newdir",
2788 simplejson.dumps(newkids))
2791 def test_POST_mkdir_2(self):
2792 d = self.POST(self.public_url + "/foo/newdir?t=mkdir", "")
2793 d.addCallback(lambda res:
2794 self.failUnlessNodeHasChild(self._foo_node, u"newdir"))
2795 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
2796 d.addCallback(self.failUnlessNodeKeysAre, [])
2799 def test_POST_mkdirs_2(self):
2800 d = self.POST(self.public_url + "/foo/bardir/newdir?t=mkdir", "")
2801 d.addCallback(lambda res:
2802 self.failUnlessNodeHasChild(self._foo_node, u"bardir"))
2803 d.addCallback(lambda res: self._foo_node.get(u"bardir"))
2804 d.addCallback(lambda bardirnode: bardirnode.get(u"newdir"))
2805 d.addCallback(self.failUnlessNodeKeysAre, [])
2808 def test_POST_mkdir_no_parentdir_noredirect(self):
2809 d = self.POST("/uri?t=mkdir")
2810 def _after_mkdir(res):
2811 uri.DirectoryURI.init_from_string(res)
2812 d.addCallback(_after_mkdir)
2815 def test_POST_mkdir_no_parentdir_noredirect_mdmf(self):
2816 d = self.POST("/uri?t=mkdir&format=mdmf")
2817 def _after_mkdir(res):
2818 u = uri.from_string(res)
2819 # Check that this is an MDMF writecap
2820 self.failUnlessIsInstance(u, uri.MDMFDirectoryURI)
2821 d.addCallback(_after_mkdir)
2824 def test_POST_mkdir_no_parentdir_noredirect_sdmf(self):
2825 d = self.POST("/uri?t=mkdir&format=sdmf")
2826 def _after_mkdir(res):
2827 u = uri.from_string(res)
2828 self.failUnlessIsInstance(u, uri.DirectoryURI)
2829 d.addCallback(_after_mkdir)
2832 def test_POST_mkdir_no_parentdir_noredirect_bad_format(self):
2833 return self.shouldHTTPError("POST_mkdir_no_parentdir_noredirect_bad_format",
2834 400, "Bad Request", "Unknown format: foo",
2835 self.POST, self.public_url +
2836 "/uri?t=mkdir&format=foo")
2838 def test_POST_mkdir_no_parentdir_noredirect2(self):
2839 # make sure form-based arguments (as on the welcome page) still work
2840 d = self.POST("/uri", t="mkdir")
2841 def _after_mkdir(res):
2842 uri.DirectoryURI.init_from_string(res)
2843 d.addCallback(_after_mkdir)
2844 d.addErrback(self.explain_web_error)
2847 def test_POST_mkdir_no_parentdir_redirect(self):
2848 d = self.POST("/uri?t=mkdir&redirect_to_result=true")
2849 d.addBoth(self.shouldRedirect, None, statuscode='303')
2850 def _check_target(target):
2851 target = urllib.unquote(target)
2852 self.failUnless(target.startswith("uri/URI:DIR2:"), target)
2853 d.addCallback(_check_target)
2856 def test_POST_mkdir_no_parentdir_redirect2(self):
2857 d = self.POST("/uri", t="mkdir", redirect_to_result="true")
2858 d.addBoth(self.shouldRedirect, None, statuscode='303')
2859 def _check_target(target):
2860 target = urllib.unquote(target)
2861 self.failUnless(target.startswith("uri/URI:DIR2:"), target)
2862 d.addCallback(_check_target)
2863 d.addErrback(self.explain_web_error)
2866 def _make_readonly(self, u):
2867 ro_uri = uri.from_string(u).get_readonly()
2870 return ro_uri.to_string()
2872 def _create_initial_children(self):
2873 contents, n, filecap1 = self.makefile(12)
2874 md1 = {"metakey1": "metavalue1"}
2875 filecap2 = make_mutable_file_uri()
2876 node3 = self.s.create_node_from_uri(make_mutable_file_uri())
2877 filecap3 = node3.get_readonly_uri()
2878 node4 = self.s.create_node_from_uri(make_mutable_file_uri())
2879 dircap = DirectoryNode(node4, None, None).get_uri()
2880 mdmfcap = make_mutable_file_uri(mdmf=True)
2881 litdircap = "URI:DIR2-LIT:ge3dumj2mewdcotyfqydulbshj5x2lbm"
2882 emptydircap = "URI:DIR2-LIT:"
2883 newkids = {u"child-imm": ["filenode", {"rw_uri": filecap1,
2884 "ro_uri": self._make_readonly(filecap1),
2885 "metadata": md1, }],
2886 u"child-mutable": ["filenode", {"rw_uri": filecap2,
2887 "ro_uri": self._make_readonly(filecap2)}],
2888 u"child-mutable-ro": ["filenode", {"ro_uri": filecap3}],
2889 u"unknownchild-rw": ["unknown", {"rw_uri": unknown_rwcap,
2890 "ro_uri": unknown_rocap}],
2891 u"unknownchild-ro": ["unknown", {"ro_uri": unknown_rocap}],
2892 u"unknownchild-imm": ["unknown", {"ro_uri": unknown_immcap}],
2893 u"dirchild": ["dirnode", {"rw_uri": dircap,
2894 "ro_uri": self._make_readonly(dircap)}],
2895 u"dirchild-lit": ["dirnode", {"ro_uri": litdircap}],
2896 u"dirchild-empty": ["dirnode", {"ro_uri": emptydircap}],
2897 u"child-mutable-mdmf": ["filenode", {"rw_uri": mdmfcap,
2898 "ro_uri": self._make_readonly(mdmfcap)}],
2900 return newkids, {'filecap1': filecap1,
2901 'filecap2': filecap2,
2902 'filecap3': filecap3,
2903 'unknown_rwcap': unknown_rwcap,
2904 'unknown_rocap': unknown_rocap,
2905 'unknown_immcap': unknown_immcap,
2907 'litdircap': litdircap,
2908 'emptydircap': emptydircap,
2911 def _create_immutable_children(self):
2912 contents, n, filecap1 = self.makefile(12)
2913 md1 = {"metakey1": "metavalue1"}
2914 tnode = create_chk_filenode("immutable directory contents\n"*10)
2915 dnode = DirectoryNode(tnode, None, None)
2916 assert not dnode.is_mutable()
2917 immdircap = dnode.get_uri()
2918 litdircap = "URI:DIR2-LIT:ge3dumj2mewdcotyfqydulbshj5x2lbm"
2919 emptydircap = "URI:DIR2-LIT:"
2920 newkids = {u"child-imm": ["filenode", {"ro_uri": filecap1,
2921 "metadata": md1, }],
2922 u"unknownchild-imm": ["unknown", {"ro_uri": unknown_immcap}],
2923 u"dirchild-imm": ["dirnode", {"ro_uri": immdircap}],
2924 u"dirchild-lit": ["dirnode", {"ro_uri": litdircap}],
2925 u"dirchild-empty": ["dirnode", {"ro_uri": emptydircap}],
2927 return newkids, {'filecap1': filecap1,
2928 'unknown_immcap': unknown_immcap,
2929 'immdircap': immdircap,
2930 'litdircap': litdircap,
2931 'emptydircap': emptydircap}
2933 def test_POST_mkdir_no_parentdir_initial_children(self):
2934 (newkids, caps) = self._create_initial_children()
2935 d = self.POST2("/uri?t=mkdir-with-children", simplejson.dumps(newkids))
2936 def _after_mkdir(res):
2937 self.failUnless(res.startswith("URI:DIR"), res)
2938 n = self.s.create_node_from_uri(res)
2939 d2 = self.failUnlessNodeKeysAre(n, newkids.keys())
2940 d2.addCallback(lambda ign:
2941 self.failUnlessROChildURIIs(n, u"child-imm",
2943 d2.addCallback(lambda ign:
2944 self.failUnlessRWChildURIIs(n, u"child-mutable",
2946 d2.addCallback(lambda ign:
2947 self.failUnlessROChildURIIs(n, u"child-mutable-ro",
2949 d2.addCallback(lambda ign:
2950 self.failUnlessRWChildURIIs(n, u"unknownchild-rw",
2951 caps['unknown_rwcap']))
2952 d2.addCallback(lambda ign:
2953 self.failUnlessROChildURIIs(n, u"unknownchild-ro",
2954 caps['unknown_rocap']))
2955 d2.addCallback(lambda ign:
2956 self.failUnlessROChildURIIs(n, u"unknownchild-imm",
2957 caps['unknown_immcap']))
2958 d2.addCallback(lambda ign:
2959 self.failUnlessRWChildURIIs(n, u"dirchild",
2962 d.addCallback(_after_mkdir)
2965 def test_POST_mkdir_no_parentdir_unexpected_children(self):
2966 # the regular /uri?t=mkdir operation is specified to ignore its body.
2967 # Only t=mkdir-with-children pays attention to it.
2968 (newkids, caps) = self._create_initial_children()
2969 d = self.shouldHTTPError("POST_mkdir_no_parentdir_unexpected_children",
2971 "t=mkdir does not accept children=, "
2972 "try t=mkdir-with-children instead",
2973 self.POST2, "/uri?t=mkdir", # without children
2974 simplejson.dumps(newkids))
2977 def test_POST_noparent_bad(self):
2978 d = self.shouldHTTPError("POST_noparent_bad",
2980 "/uri accepts only PUT, PUT?t=mkdir, "
2981 "POST?t=upload, and POST?t=mkdir",
2982 self.POST, "/uri?t=bogus")
2985 def test_POST_mkdir_no_parentdir_immutable(self):
2986 (newkids, caps) = self._create_immutable_children()
2987 d = self.POST2("/uri?t=mkdir-immutable", simplejson.dumps(newkids))
2988 def _after_mkdir(res):
2989 self.failUnless(res.startswith("URI:DIR"), res)
2990 n = self.s.create_node_from_uri(res)
2991 d2 = self.failUnlessNodeKeysAre(n, newkids.keys())
2992 d2.addCallback(lambda ign:
2993 self.failUnlessROChildURIIs(n, u"child-imm",
2995 d2.addCallback(lambda ign:
2996 self.failUnlessROChildURIIs(n, u"unknownchild-imm",
2997 caps['unknown_immcap']))
2998 d2.addCallback(lambda ign:
2999 self.failUnlessROChildURIIs(n, u"dirchild-imm",
3001 d2.addCallback(lambda ign:
3002 self.failUnlessROChildURIIs(n, u"dirchild-lit",
3004 d2.addCallback(lambda ign:
3005 self.failUnlessROChildURIIs(n, u"dirchild-empty",
3006 caps['emptydircap']))
3008 d.addCallback(_after_mkdir)
3011 def test_POST_mkdir_no_parentdir_immutable_bad(self):
3012 (newkids, caps) = self._create_initial_children()
3013 d = self.shouldFail2(error.Error,
3014 "test_POST_mkdir_no_parentdir_immutable_bad",
3016 "needed to be immutable but was not",
3018 "/uri?t=mkdir-immutable",
3019 simplejson.dumps(newkids))
3022 def test_welcome_page_mkdir_button(self):
3023 # Fetch the welcome page.
3025 def _after_get_welcome_page(res):
3026 MKDIR_BUTTON_RE = re.compile(
3027 '<form action="([^"]*)" method="post".*?'
3028 '<input type="hidden" name="t" value="([^"]*)" />'
3029 '<input type="hidden" name="([^"]*)" value="([^"]*)" />'
3030 '<input type="submit" value="Create a directory" />',
3032 mo = MKDIR_BUTTON_RE.search(res)
3033 formaction = mo.group(1)
3035 formaname = mo.group(3)
3036 formavalue = mo.group(4)
3037 return (formaction, formt, formaname, formavalue)
3038 d.addCallback(_after_get_welcome_page)
3039 def _after_parse_form(res):
3040 (formaction, formt, formaname, formavalue) = res
3041 return self.POST("/%s?t=%s&%s=%s" % (formaction, formt, formaname, formavalue))
3042 d.addCallback(_after_parse_form)
3043 d.addBoth(self.shouldRedirect, None, statuscode='303')
3046 def test_POST_mkdir_replace(self): # return value?
3047 d = self.POST(self.public_url + "/foo", t="mkdir", name="sub")
3048 d.addCallback(lambda res: self._foo_node.get(u"sub"))
3049 d.addCallback(self.failUnlessNodeKeysAre, [])
3052 def test_POST_mkdir_no_replace_queryarg(self): # return value?
3053 d = self.POST(self.public_url + "/foo?replace=false", t="mkdir", name="sub")
3054 d.addBoth(self.shouldFail, error.Error,
3055 "POST_mkdir_no_replace_queryarg",
3057 "There was already a child by that name, and you asked me "
3058 "to not replace it")
3059 d.addCallback(lambda res: self._foo_node.get(u"sub"))
3060 d.addCallback(self.failUnlessNodeKeysAre, [u"baz.txt"])
3063 def test_POST_mkdir_no_replace_field(self): # return value?
3064 d = self.POST(self.public_url + "/foo", t="mkdir", name="sub",
3066 d.addBoth(self.shouldFail, error.Error, "POST_mkdir_no_replace_field",
3068 "There was already a child by that name, and you asked me "
3069 "to not replace it")
3070 d.addCallback(lambda res: self._foo_node.get(u"sub"))
3071 d.addCallback(self.failUnlessNodeKeysAre, [u"baz.txt"])
3074 def test_POST_mkdir_whendone_field(self):
3075 d = self.POST(self.public_url + "/foo",
3076 t="mkdir", name="newdir", when_done="/THERE")
3077 d.addBoth(self.shouldRedirect, "/THERE")
3078 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
3079 d.addCallback(self.failUnlessNodeKeysAre, [])
3082 def test_POST_mkdir_whendone_queryarg(self):
3083 d = self.POST(self.public_url + "/foo?when_done=/THERE",
3084 t="mkdir", name="newdir")
3085 d.addBoth(self.shouldRedirect, "/THERE")
3086 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
3087 d.addCallback(self.failUnlessNodeKeysAre, [])
3090 def test_POST_bad_t(self):
3091 d = self.shouldFail2(error.Error, "POST_bad_t",
3093 "POST to a directory with bad t=BOGUS",
3094 self.POST, self.public_url + "/foo", t="BOGUS")
3097 def test_POST_set_children(self, command_name="set_children"):
3098 contents9, n9, newuri9 = self.makefile(9)
3099 contents10, n10, newuri10 = self.makefile(10)
3100 contents11, n11, newuri11 = self.makefile(11)
3103 "atomic_added_1": [ "filenode", { "rw_uri": "%s",
3106 "ctime": 1002777696.7564139,
3107 "mtime": 1002777696.7564139
3110 "atomic_added_2": [ "filenode", { "rw_uri": "%s",
3113 "ctime": 1002777696.7564139,
3114 "mtime": 1002777696.7564139
3117 "atomic_added_3": [ "filenode", { "rw_uri": "%s",
3120 "ctime": 1002777696.7564139,
3121 "mtime": 1002777696.7564139
3124 }""" % (newuri9, newuri10, newuri11)
3126 url = self.webish_url + self.public_url + "/foo" + "?t=" + command_name
3128 d = client.getPage(url, method="POST", postdata=reqbody)
3130 self.failUnlessURIMatchesROChild(newuri9, self._foo_node, u"atomic_added_1")
3131 self.failUnlessURIMatchesROChild(newuri10, self._foo_node, u"atomic_added_2")
3132 self.failUnlessURIMatchesROChild(newuri11, self._foo_node, u"atomic_added_3")
3134 d.addCallback(_then)
3135 d.addErrback(self.dump_error)
3138 def test_POST_set_children_with_hyphen(self):
3139 return self.test_POST_set_children(command_name="set-children")
3141 def test_POST_link_uri(self):
3142 contents, n, newuri = self.makefile(8)
3143 d = self.POST(self.public_url + "/foo", t="uri", name="new.txt", uri=newuri)
3144 d.addCallback(self.failUnlessURIMatchesROChild, self._foo_node, u"new.txt")
3145 d.addCallback(lambda res:
3146 self.failUnlessChildContentsAre(self._foo_node, u"new.txt",
3150 def test_POST_link_uri_replace(self):
3151 contents, n, newuri = self.makefile(8)
3152 d = self.POST(self.public_url + "/foo", t="uri", name="bar.txt", uri=newuri)
3153 d.addCallback(self.failUnlessURIMatchesROChild, self._foo_node, u"bar.txt")
3154 d.addCallback(lambda res:
3155 self.failUnlessChildContentsAre(self._foo_node, u"bar.txt",
3159 def test_POST_link_uri_unknown_bad(self):
3160 d = self.POST(self.public_url + "/foo", t="uri", name="future.txt", uri=unknown_rwcap)
3161 d.addBoth(self.shouldFail, error.Error,
3162 "POST_link_uri_unknown_bad",
3164 "unknown cap in a write slot")
3167 def test_POST_link_uri_unknown_ro_good(self):
3168 d = self.POST(self.public_url + "/foo", t="uri", name="future-ro.txt", uri=unknown_rocap)
3169 d.addCallback(self.failUnlessURIMatchesROChild, self._foo_node, u"future-ro.txt")
3172 def test_POST_link_uri_unknown_imm_good(self):
3173 d = self.POST(self.public_url + "/foo", t="uri", name="future-imm.txt", uri=unknown_immcap)
3174 d.addCallback(self.failUnlessURIMatchesROChild, self._foo_node, u"future-imm.txt")
3177 def test_POST_link_uri_no_replace_queryarg(self):
3178 contents, n, newuri = self.makefile(8)
3179 d = self.POST(self.public_url + "/foo?replace=false", t="uri",
3180 name="bar.txt", uri=newuri)
3181 d.addBoth(self.shouldFail, error.Error,
3182 "POST_link_uri_no_replace_queryarg",
3184 "There was already a child by that name, and you asked me "
3185 "to not replace it")
3186 d.addCallback(lambda res: self.GET(self.public_url + "/foo/bar.txt"))
3187 d.addCallback(self.failUnlessIsBarDotTxt)
3190 def test_POST_link_uri_no_replace_field(self):
3191 contents, n, newuri = self.makefile(8)
3192 d = self.POST(self.public_url + "/foo", t="uri", replace="false",
3193 name="bar.txt", uri=newuri)
3194 d.addBoth(self.shouldFail, error.Error,
3195 "POST_link_uri_no_replace_field",
3197 "There was already a child by that name, and you asked me "
3198 "to not replace it")
3199 d.addCallback(lambda res: self.GET(self.public_url + "/foo/bar.txt"))
3200 d.addCallback(self.failUnlessIsBarDotTxt)
3203 def test_POST_delete(self, command_name='delete'):
3204 d = self._foo_node.list()
3205 def _check_before(children):
3206 self.failUnlessIn(u"bar.txt", children)
3207 d.addCallback(_check_before)
3208 d.addCallback(lambda res: self.POST(self.public_url + "/foo", t=command_name, name="bar.txt"))
3209 d.addCallback(lambda res: self._foo_node.list())
3210 def _check_after(children):
3211 self.failIfIn(u"bar.txt", children)
3212 d.addCallback(_check_after)
3215 def test_POST_unlink(self):
3216 return self.test_POST_delete(command_name='unlink')
3218 def test_POST_rename_file(self):
3219 d = self.POST(self.public_url + "/foo", t="rename",
3220 from_name="bar.txt", to_name='wibble.txt')
3221 d.addCallback(lambda res:
3222 self.failIfNodeHasChild(self._foo_node, u"bar.txt"))
3223 d.addCallback(lambda res:
3224 self.failUnlessNodeHasChild(self._foo_node, u"wibble.txt"))
3225 d.addCallback(lambda res: self.GET(self.public_url + "/foo/wibble.txt"))
3226 d.addCallback(self.failUnlessIsBarDotTxt)
3227 d.addCallback(lambda res: self.GET(self.public_url + "/foo/wibble.txt?t=json"))
3228 d.addCallback(self.failUnlessIsBarJSON)
3231 def test_POST_rename_file_redundant(self):
3232 d = self.POST(self.public_url + "/foo", t="rename",
3233 from_name="bar.txt", to_name='bar.txt')
3234 d.addCallback(lambda res:
3235 self.failUnlessNodeHasChild(self._foo_node, u"bar.txt"))
3236 d.addCallback(lambda res: self.GET(self.public_url + "/foo/bar.txt"))
3237 d.addCallback(self.failUnlessIsBarDotTxt)
3238 d.addCallback(lambda res: self.GET(self.public_url + "/foo/bar.txt?t=json"))
3239 d.addCallback(self.failUnlessIsBarJSON)
3242 def test_POST_rename_file_replace(self):
3243 # rename a file and replace a directory with it
3244 d = self.POST(self.public_url + "/foo", t="rename",
3245 from_name="bar.txt", to_name='empty')
3246 d.addCallback(lambda res:
3247 self.failIfNodeHasChild(self._foo_node, u"bar.txt"))
3248 d.addCallback(lambda res:
3249 self.failUnlessNodeHasChild(self._foo_node, u"empty"))
3250 d.addCallback(lambda res: self.GET(self.public_url + "/foo/empty"))
3251 d.addCallback(self.failUnlessIsBarDotTxt)
3252 d.addCallback(lambda res: self.GET(self.public_url + "/foo/empty?t=json"))
3253 d.addCallback(self.failUnlessIsBarJSON)
3256 def test_POST_rename_file_no_replace_queryarg(self):
3257 # rename a file and replace a directory with it
3258 d = self.POST(self.public_url + "/foo?replace=false", t="rename",
3259 from_name="bar.txt", to_name='empty')
3260 d.addBoth(self.shouldFail, error.Error,
3261 "POST_rename_file_no_replace_queryarg",
3263 "There was already a child by that name, and you asked me "
3264 "to not replace it")
3265 d.addCallback(lambda res: self.GET(self.public_url + "/foo/empty?t=json"))
3266 d.addCallback(self.failUnlessIsEmptyJSON)
3269 def test_POST_rename_file_no_replace_field(self):
3270 # rename a file and replace a directory with it
3271 d = self.POST(self.public_url + "/foo", t="rename", replace="false",
3272 from_name="bar.txt", to_name='empty')
3273 d.addBoth(self.shouldFail, error.Error,
3274 "POST_rename_file_no_replace_field",
3276 "There was already a child by that name, and you asked me "
3277 "to not replace it")
3278 d.addCallback(lambda res: self.GET(self.public_url + "/foo/empty?t=json"))
3279 d.addCallback(self.failUnlessIsEmptyJSON)
3282 def failUnlessIsEmptyJSON(self, res):
3283 data = simplejson.loads(res)
3284 self.failUnlessEqual(data[0], "dirnode", data)
3285 self.failUnlessReallyEqual(len(data[1]["children"]), 0)
3287 def test_POST_rename_file_slash_fail(self):
3288 d = self.POST(self.public_url + "/foo", t="rename",
3289 from_name="bar.txt", to_name='kirk/spock.txt')
3290 d.addBoth(self.shouldFail, error.Error,
3291 "test_POST_rename_file_slash_fail",
3293 "to_name= may not contain a slash",
3295 d.addCallback(lambda res:
3296 self.failUnlessNodeHasChild(self._foo_node, u"bar.txt"))
3299 def test_POST_rename_dir(self):
3300 d = self.POST(self.public_url, t="rename",
3301 from_name="foo", to_name='plunk')
3302 d.addCallback(lambda res:
3303 self.failIfNodeHasChild(self.public_root, u"foo"))
3304 d.addCallback(lambda res:
3305 self.failUnlessNodeHasChild(self.public_root, u"plunk"))
3306 d.addCallback(lambda res: self.GET(self.public_url + "/plunk?t=json"))
3307 d.addCallback(self.failUnlessIsFooJSON)
3310 def shouldRedirect(self, res, target=None, statuscode=None, which=""):
3311 """ If target is not None then the redirection has to go to target. If
3312 statuscode is not None then the redirection has to be accomplished with
3313 that HTTP status code."""
3314 if not isinstance(res, failure.Failure):
3315 to_where = (target is None) and "somewhere" or ("to " + target)
3316 self.fail("%s: we were expecting to get redirected %s, not get an"
3317 " actual page: %s" % (which, to_where, res))
3318 res.trap(error.PageRedirect)
3319 if statuscode is not None:
3320 self.failUnlessReallyEqual(res.value.status, statuscode,
3321 "%s: not a redirect" % which)
3322 if target is not None:
3323 # the PageRedirect does not seem to capture the uri= query arg
3324 # properly, so we can't check for it.
3325 realtarget = self.webish_url + target
3326 self.failUnlessReallyEqual(res.value.location, realtarget,
3327 "%s: wrong target" % which)
3328 return res.value.location
3330 def test_GET_URI_form(self):
3331 base = "/uri?uri=%s" % self._bar_txt_uri
3332 # this is supposed to give us a redirect to /uri/$URI, plus arguments
3333 targetbase = "/uri/%s" % urllib.quote(self._bar_txt_uri)
3335 d.addBoth(self.shouldRedirect, targetbase)
3336 d.addCallback(lambda res: self.GET(base+"&filename=bar.txt"))
3337 d.addBoth(self.shouldRedirect, targetbase+"?filename=bar.txt")
3338 d.addCallback(lambda res: self.GET(base+"&t=json"))
3339 d.addBoth(self.shouldRedirect, targetbase+"?t=json")
3340 d.addCallback(self.log, "about to get file by uri")
3341 d.addCallback(lambda res: self.GET(base, followRedirect=True))
3342 d.addCallback(self.failUnlessIsBarDotTxt)
3343 d.addCallback(self.log, "got file by uri, about to get dir by uri")
3344 d.addCallback(lambda res: self.GET("/uri?uri=%s&t=json" % self._foo_uri,
3345 followRedirect=True))
3346 d.addCallback(self.failUnlessIsFooJSON)
3347 d.addCallback(self.log, "got dir by uri")
3351 def test_GET_URI_form_bad(self):
3352 d = self.shouldFail2(error.Error, "test_GET_URI_form_bad",
3353 "400 Bad Request", "GET /uri requires uri=",
3357 def test_GET_rename_form(self):
3358 d = self.GET(self.public_url + "/foo?t=rename-form&name=bar.txt",
3359 followRedirect=True)
3361 self.failUnlessIn('name="when_done" value="."', res)
3362 self.failUnless(re.search(r'name="from_name" value="bar\.txt"', res))
3363 d.addCallback(_check)
3366 def log(self, res, msg):
3367 #print "MSG: %s RES: %s" % (msg, res)
3371 def test_GET_URI_URL(self):
3372 base = "/uri/%s" % self._bar_txt_uri
3374 d.addCallback(self.failUnlessIsBarDotTxt)
3375 d.addCallback(lambda res: self.GET(base+"?filename=bar.txt"))
3376 d.addCallback(self.failUnlessIsBarDotTxt)
3377 d.addCallback(lambda res: self.GET(base+"?filename=bar.txt&save=true"))
3378 d.addCallback(self.failUnlessIsBarDotTxt)
3381 def test_GET_URI_URL_dir(self):
3382 base = "/uri/%s?t=json" % self._foo_uri
3384 d.addCallback(self.failUnlessIsFooJSON)
3387 def test_GET_URI_URL_missing(self):
3388 base = "/uri/%s" % self._bad_file_uri
3389 d = self.shouldHTTPError("test_GET_URI_URL_missing",
3390 http.GONE, None, "NotEnoughSharesError",
3392 # TODO: how can we exercise both sides of WebDownloadTarget.fail
3393 # here? we must arrange for a download to fail after target.open()
3394 # has been called, and then inspect the response to see that it is
3395 # shorter than we expected.
3398 def test_PUT_DIRURL_uri(self):
3399 d = self.s.create_dirnode()
3401 new_uri = dn.get_uri()
3402 # replace /foo with a new (empty) directory
3403 d = self.PUT(self.public_url + "/foo?t=uri", new_uri)
3404 d.addCallback(lambda res:
3405 self.failUnlessReallyEqual(res.strip(), new_uri))
3406 d.addCallback(lambda res:
3407 self.failUnlessRWChildURIIs(self.public_root,
3411 d.addCallback(_made_dir)
3414 def test_PUT_DIRURL_uri_noreplace(self):
3415 d = self.s.create_dirnode()
3417 new_uri = dn.get_uri()
3418 # replace /foo with a new (empty) directory, but ask that
3419 # replace=false, so it should fail
3420 d = self.shouldFail2(error.Error, "test_PUT_DIRURL_uri_noreplace",
3421 "409 Conflict", "There was already a child by that name, and you asked me to not replace it",
3423 self.public_url + "/foo?t=uri&replace=false",
3425 d.addCallback(lambda res:
3426 self.failUnlessRWChildURIIs(self.public_root,
3430 d.addCallback(_made_dir)
3433 def test_PUT_DIRURL_bad_t(self):
3434 d = self.shouldFail2(error.Error, "test_PUT_DIRURL_bad_t",
3435 "400 Bad Request", "PUT to a directory",
3436 self.PUT, self.public_url + "/foo?t=BOGUS", "")
3437 d.addCallback(lambda res:
3438 self.failUnlessRWChildURIIs(self.public_root,
3443 def test_PUT_NEWFILEURL_uri(self):
3444 contents, n, new_uri = self.makefile(8)
3445 d = self.PUT(self.public_url + "/foo/new.txt?t=uri", new_uri)
3446 d.addCallback(lambda res: self.failUnlessReallyEqual(res.strip(), new_uri))
3447 d.addCallback(lambda res:
3448 self.failUnlessChildContentsAre(self._foo_node, u"new.txt",
3452 def test_PUT_NEWFILEURL_mdmf(self):
3453 new_contents = self.NEWFILE_CONTENTS * 300000
3454 d = self.PUT(self.public_url + \
3455 "/foo/mdmf.txt?format=mdmf",
3457 d.addCallback(lambda ignored:
3458 self.GET(self.public_url + "/foo/mdmf.txt?t=json"))
3459 def _got_json(json):
3460 data = simplejson.loads(json)
3462 self.failUnlessIn("format", data)
3463 self.failUnlessEqual(data["format"], "MDMF")
3464 self.failUnless(data['rw_uri'].startswith("URI:MDMF"))
3465 self.failUnless(data['ro_uri'].startswith("URI:MDMF"))
3466 d.addCallback(_got_json)
3469 def test_PUT_NEWFILEURL_sdmf(self):
3470 new_contents = self.NEWFILE_CONTENTS * 300000
3471 d = self.PUT(self.public_url + \
3472 "/foo/sdmf.txt?format=sdmf",
3474 d.addCallback(lambda ignored:
3475 self.GET(self.public_url + "/foo/sdmf.txt?t=json"))
3476 def _got_json(json):
3477 data = simplejson.loads(json)
3479 self.failUnlessIn("format", data)
3480 self.failUnlessEqual(data["format"], "SDMF")
3481 d.addCallback(_got_json)
3484 def test_PUT_NEWFILEURL_bad_format(self):
3485 new_contents = self.NEWFILE_CONTENTS * 300000
3486 return self.shouldHTTPError("PUT_NEWFILEURL_bad_format",
3487 400, "Bad Request", "Unknown format: foo",
3488 self.PUT, self.public_url + \
3489 "/foo/foo.txt?format=foo",
3492 def test_PUT_NEWFILEURL_uri_replace(self):
3493 contents, n, new_uri = self.makefile(8)
3494 d = self.PUT(self.public_url + "/foo/bar.txt?t=uri", new_uri)
3495 d.addCallback(lambda res: self.failUnlessReallyEqual(res.strip(), new_uri))
3496 d.addCallback(lambda res:
3497 self.failUnlessChildContentsAre(self._foo_node, u"bar.txt",
3501 def test_PUT_NEWFILEURL_uri_no_replace(self):
3502 contents, n, new_uri = self.makefile(8)
3503 d = self.PUT(self.public_url + "/foo/bar.txt?t=uri&replace=false", new_uri)
3504 d.addBoth(self.shouldFail, error.Error,
3505 "PUT_NEWFILEURL_uri_no_replace",
3507 "There was already a child by that name, and you asked me "
3508 "to not replace it")
3511 def test_PUT_NEWFILEURL_uri_unknown_bad(self):
3512 d = self.PUT(self.public_url + "/foo/put-future.txt?t=uri", unknown_rwcap)
3513 d.addBoth(self.shouldFail, error.Error,
3514 "POST_put_uri_unknown_bad",
3516 "unknown cap in a write slot")
3519 def test_PUT_NEWFILEURL_uri_unknown_ro_good(self):
3520 d = self.PUT(self.public_url + "/foo/put-future-ro.txt?t=uri", unknown_rocap)
3521 d.addCallback(self.failUnlessURIMatchesROChild, self._foo_node,
3522 u"put-future-ro.txt")
3525 def test_PUT_NEWFILEURL_uri_unknown_imm_good(self):
3526 d = self.PUT(self.public_url + "/foo/put-future-imm.txt?t=uri", unknown_immcap)
3527 d.addCallback(self.failUnlessURIMatchesROChild, self._foo_node,
3528 u"put-future-imm.txt")
3531 def test_PUT_NEWFILE_URI(self):
3532 file_contents = "New file contents here\n"
3533 d = self.PUT("/uri", file_contents)
3535 assert isinstance(uri, str), uri
3536 self.failUnlessIn(uri, FakeCHKFileNode.all_contents)
3537 self.failUnlessReallyEqual(FakeCHKFileNode.all_contents[uri],
3539 return self.GET("/uri/%s" % uri)
3540 d.addCallback(_check)
3542 self.failUnlessReallyEqual(res, file_contents)
3543 d.addCallback(_check2)
3546 def test_PUT_NEWFILE_URI_not_mutable(self):
3547 file_contents = "New file contents here\n"
3548 d = self.PUT("/uri?mutable=false", file_contents)
3550 assert isinstance(uri, str), uri
3551 self.failUnlessIn(uri, FakeCHKFileNode.all_contents)
3552 self.failUnlessReallyEqual(FakeCHKFileNode.all_contents[uri],
3554 return self.GET("/uri/%s" % uri)
3555 d.addCallback(_check)
3557 self.failUnlessReallyEqual(res, file_contents)
3558 d.addCallback(_check2)
3561 def test_PUT_NEWFILE_URI_only_PUT(self):
3562 d = self.PUT("/uri?t=bogus", "")
3563 d.addBoth(self.shouldFail, error.Error,
3564 "PUT_NEWFILE_URI_only_PUT",
3566 "/uri accepts only PUT, PUT?t=mkdir, POST?t=upload, and POST?t=mkdir")
3569 def test_PUT_NEWFILE_URI_mutable(self):
3570 file_contents = "New file contents here\n"
3571 d = self.PUT("/uri?mutable=true", file_contents)
3572 def _check1(filecap):
3573 filecap = filecap.strip()
3574 self.failUnless(filecap.startswith("URI:SSK:"), filecap)
3575 self.filecap = filecap
3576 u = uri.WriteableSSKFileURI.init_from_string(filecap)
3577 self.failUnlessIn(u.get_storage_index(), FakeMutableFileNode.all_contents)
3578 n = self.s.create_node_from_uri(filecap)
3579 return n.download_best_version()
3580 d.addCallback(_check1)
3582 self.failUnlessReallyEqual(data, file_contents)
3583 return self.GET("/uri/%s" % urllib.quote(self.filecap))
3584 d.addCallback(_check2)
3586 self.failUnlessReallyEqual(res, file_contents)
3587 d.addCallback(_check3)
3590 def test_PUT_mkdir(self):
3591 d = self.PUT("/uri?t=mkdir", "")
3593 n = self.s.create_node_from_uri(uri.strip())
3594 d2 = self.failUnlessNodeKeysAre(n, [])
3595 d2.addCallback(lambda res:
3596 self.GET("/uri/%s?t=json" % uri))
3598 d.addCallback(_check)
3599 d.addCallback(self.failUnlessIsEmptyJSON)
3602 def test_PUT_mkdir_mdmf(self):
3603 d = self.PUT("/uri?t=mkdir&format=mdmf", "")
3605 u = uri.from_string(res)
3606 # Check that this is an MDMF writecap
3607 self.failUnlessIsInstance(u, uri.MDMFDirectoryURI)
3611 def test_PUT_mkdir_sdmf(self):
3612 d = self.PUT("/uri?t=mkdir&format=sdmf", "")
3614 u = uri.from_string(res)
3615 self.failUnlessIsInstance(u, uri.DirectoryURI)
3619 def test_PUT_mkdir_bad_format(self):
3620 return self.shouldHTTPError("PUT_mkdir_bad_format",
3621 400, "Bad Request", "Unknown format: foo",
3622 self.PUT, "/uri?t=mkdir&format=foo",
3625 def test_POST_check(self):
3626 d = self.POST(self.public_url + "/foo", t="check", name="bar.txt")
3628 # this returns a string form of the results, which are probably
3629 # None since we're using fake filenodes.
3630 # TODO: verify that the check actually happened, by changing
3631 # FakeCHKFileNode to count how many times .check() has been
3634 d.addCallback(_done)
3638 def test_PUT_update_at_offset(self):
3639 file_contents = "test file" * 100000 # about 900 KiB
3640 d = self.PUT("/uri?mutable=true", file_contents)
3642 self.filecap = filecap
3643 new_data = file_contents[:100]
3644 new = "replaced and so on"
3646 new_data += file_contents[len(new_data):]
3647 assert len(new_data) == len(file_contents)
3648 self.new_data = new_data
3649 d.addCallback(_then)
3650 d.addCallback(lambda ignored:
3651 self.PUT("/uri/%s?replace=True&offset=100" % self.filecap,
3652 "replaced and so on"))
3653 def _get_data(filecap):
3654 n = self.s.create_node_from_uri(filecap)
3655 return n.download_best_version()
3656 d.addCallback(_get_data)
3657 d.addCallback(lambda results:
3658 self.failUnlessEqual(results, self.new_data))
3659 # Now try appending things to the file
3660 d.addCallback(lambda ignored:
3661 self.PUT("/uri/%s?offset=%d" % (self.filecap, len(self.new_data)),
3663 d.addCallback(_get_data)
3664 d.addCallback(lambda results:
3665 self.failUnlessEqual(results, self.new_data + ("puppies" * 100)))
3666 # and try replacing the beginning of the file
3667 d.addCallback(lambda ignored:
3668 self.PUT("/uri/%s?offset=0" % self.filecap, "begin"))
3669 d.addCallback(_get_data)
3670 d.addCallback(lambda results:
3671 self.failUnlessEqual(results, "begin"+self.new_data[len("begin"):]+("puppies"*100)))
3674 def test_PUT_update_at_invalid_offset(self):
3675 file_contents = "test file" * 100000 # about 900 KiB
3676 d = self.PUT("/uri?mutable=true", file_contents)
3678 self.filecap = filecap
3679 d.addCallback(_then)
3680 # Negative offsets should cause an error.
3681 d.addCallback(lambda ignored:
3682 self.shouldHTTPError("PUT_update_at_invalid_offset",
3686 "/uri/%s?offset=-1" % self.filecap,
3690 def test_PUT_update_at_offset_immutable(self):
3691 file_contents = "Test file" * 100000
3692 d = self.PUT("/uri", file_contents)
3694 self.filecap = filecap
3695 d.addCallback(_then)
3696 d.addCallback(lambda ignored:
3697 self.shouldHTTPError("PUT_update_at_offset_immutable",
3701 "/uri/%s?offset=50" % self.filecap,
3706 def test_bad_method(self):
3707 url = self.webish_url + self.public_url + "/foo/bar.txt"
3708 d = self.shouldHTTPError("bad_method",
3709 501, "Not Implemented",
3710 "I don't know how to treat a BOGUS request.",
3711 client.getPage, url, method="BOGUS")
3714 def test_short_url(self):
3715 url = self.webish_url + "/uri"
3716 d = self.shouldHTTPError("short_url", 501, "Not Implemented",
3717 "I don't know how to treat a DELETE request.",
3718 client.getPage, url, method="DELETE")
3721 def test_ophandle_bad(self):
3722 url = self.webish_url + "/operations/bogus?t=status"
3723 d = self.shouldHTTPError("ophandle_bad", 404, "404 Not Found",
3724 "unknown/expired handle 'bogus'",
3725 client.getPage, url)
3728 def test_ophandle_cancel(self):
3729 d = self.POST(self.public_url + "/foo/?t=start-manifest&ophandle=128",
3730 followRedirect=True)
3731 d.addCallback(lambda ignored:
3732 self.GET("/operations/128?t=status&output=JSON"))
3734 data = simplejson.loads(res)
3735 self.failUnless("finished" in data, res)
3736 monitor = self.ws.root.child_operations.handles["128"][0]
3737 d = self.POST("/operations/128?t=cancel&output=JSON")
3739 data = simplejson.loads(res)
3740 self.failUnless("finished" in data, res)
3741 # t=cancel causes the handle to be forgotten
3742 self.failUnless(monitor.is_cancelled())
3743 d.addCallback(_check2)
3745 d.addCallback(_check1)
3746 d.addCallback(lambda ignored:
3747 self.shouldHTTPError("ophandle_cancel",
3748 404, "404 Not Found",
3749 "unknown/expired handle '128'",
3751 "/operations/128?t=status&output=JSON"))
3754 def test_ophandle_retainfor(self):
3755 d = self.POST(self.public_url + "/foo/?t=start-manifest&ophandle=129&retain-for=60",
3756 followRedirect=True)
3757 d.addCallback(lambda ignored:
3758 self.GET("/operations/129?t=status&output=JSON&retain-for=0"))
3760 data = simplejson.loads(res)
3761 self.failUnless("finished" in data, res)
3762 d.addCallback(_check1)
3763 # the retain-for=0 will cause the handle to be expired very soon
3764 d.addCallback(lambda ign:
3765 self.clock.advance(2.0))
3766 d.addCallback(lambda ignored:
3767 self.shouldHTTPError("ophandle_retainfor",
3768 404, "404 Not Found",
3769 "unknown/expired handle '129'",
3771 "/operations/129?t=status&output=JSON"))
3774 def test_ophandle_release_after_complete(self):
3775 d = self.POST(self.public_url + "/foo/?t=start-manifest&ophandle=130",
3776 followRedirect=True)
3777 d.addCallback(self.wait_for_operation, "130")
3778 d.addCallback(lambda ignored:
3779 self.GET("/operations/130?t=status&output=JSON&release-after-complete=true"))
3780 # the release-after-complete=true will cause the handle to be expired
3781 d.addCallback(lambda ignored:
3782 self.shouldHTTPError("ophandle_release_after_complete",
3783 404, "404 Not Found",
3784 "unknown/expired handle '130'",
3786 "/operations/130?t=status&output=JSON"))
3789 def test_uncollected_ophandle_expiration(self):
3790 # uncollected ophandles should expire after 4 days
3791 def _make_uncollected_ophandle(ophandle):
3792 d = self.POST(self.public_url +
3793 "/foo/?t=start-manifest&ophandle=%d" % ophandle,
3794 followRedirect=False)
3795 # When we start the operation, the webapi server will want
3796 # to redirect us to the page for the ophandle, so we get
3797 # confirmation that the operation has started. If the
3798 # manifest operation has finished by the time we get there,
3799 # following that redirect (by setting followRedirect=True
3800 # above) has the side effect of collecting the ophandle that
3801 # we've just created, which means that we can't use the
3802 # ophandle to test the uncollected timeout anymore. So,
3803 # instead, catch the 302 here and don't follow it.
3804 d.addBoth(self.should302, "uncollected_ophandle_creation")
3806 # Create an ophandle, don't collect it, then advance the clock by
3807 # 4 days - 1 second and make sure that the ophandle is still there.
3808 d = _make_uncollected_ophandle(131)
3809 d.addCallback(lambda ign:
3810 self.clock.advance((96*60*60) - 1)) # 96 hours = 4 days
3811 d.addCallback(lambda ign:
3812 self.GET("/operations/131?t=status&output=JSON"))
3814 data = simplejson.loads(res)
3815 self.failUnless("finished" in data, res)
3816 d.addCallback(_check1)
3817 # Create an ophandle, don't collect it, then try to collect it
3818 # after 4 days. It should be gone.
3819 d.addCallback(lambda ign:
3820 _make_uncollected_ophandle(132))
3821 d.addCallback(lambda ign:
3822 self.clock.advance(96*60*60))
3823 d.addCallback(lambda ign:
3824 self.shouldHTTPError("uncollected_ophandle_expired_after_100_hours",
3825 404, "404 Not Found",
3826 "unknown/expired handle '132'",
3828 "/operations/132?t=status&output=JSON"))
3831 def test_collected_ophandle_expiration(self):
3832 # collected ophandles should expire after 1 day
3833 def _make_collected_ophandle(ophandle):
3834 d = self.POST(self.public_url +
3835 "/foo/?t=start-manifest&ophandle=%d" % ophandle,
3836 followRedirect=True)
3837 # By following the initial redirect, we collect the ophandle
3838 # we've just created.
3840 # Create a collected ophandle, then collect it after 23 hours
3841 # and 59 seconds to make sure that it is still there.
3842 d = _make_collected_ophandle(133)
3843 d.addCallback(lambda ign:
3844 self.clock.advance((24*60*60) - 1))
3845 d.addCallback(lambda ign:
3846 self.GET("/operations/133?t=status&output=JSON"))
3848 data = simplejson.loads(res)
3849 self.failUnless("finished" in data, res)
3850 d.addCallback(_check1)
3851 # Create another uncollected ophandle, then try to collect it
3852 # after 24 hours to make sure that it is gone.
3853 d.addCallback(lambda ign:
3854 _make_collected_ophandle(134))
3855 d.addCallback(lambda ign:
3856 self.clock.advance(24*60*60))
3857 d.addCallback(lambda ign:
3858 self.shouldHTTPError("collected_ophandle_expired_after_1_day",
3859 404, "404 Not Found",
3860 "unknown/expired handle '134'",
3862 "/operations/134?t=status&output=JSON"))
3865 def test_incident(self):
3866 d = self.POST("/report_incident", details="eek")
3868 self.failUnless("Thank you for your report!" in res, res)
3869 d.addCallback(_done)
3872 def test_static(self):
3873 webdir = os.path.join(self.staticdir, "subdir")
3874 fileutil.make_dirs(webdir)
3875 f = open(os.path.join(webdir, "hello.txt"), "wb")
3879 d = self.GET("/static/subdir/hello.txt")
3881 self.failUnlessReallyEqual(res, "hello")
3882 d.addCallback(_check)
3886 class Util(ShouldFailMixin, testutil.ReallyEqualMixin, unittest.TestCase):
3887 def test_load_file(self):
3888 # This will raise an exception unless a well-formed XML file is found under that name.
3889 common.getxmlfile('directory.xhtml').load()
3891 def test_parse_replace_arg(self):
3892 self.failUnlessReallyEqual(common.parse_replace_arg("true"), True)
3893 self.failUnlessReallyEqual(common.parse_replace_arg("false"), False)
3894 self.failUnlessReallyEqual(common.parse_replace_arg("only-files"),
3896 self.shouldFail(AssertionError, "test_parse_replace_arg", "",
3897 common.parse_replace_arg, "only_fles")
3899 def test_abbreviate_time(self):
3900 self.failUnlessReallyEqual(common.abbreviate_time(None), "")
3901 self.failUnlessReallyEqual(common.abbreviate_time(1.234), "1.23s")
3902 self.failUnlessReallyEqual(common.abbreviate_time(0.123), "123ms")
3903 self.failUnlessReallyEqual(common.abbreviate_time(0.00123), "1.2ms")
3904 self.failUnlessReallyEqual(common.abbreviate_time(0.000123), "123us")
3905 self.failUnlessReallyEqual(common.abbreviate_time(-123000), "-123000000000us")
3907 def test_compute_rate(self):
3908 self.failUnlessReallyEqual(common.compute_rate(None, None), None)
3909 self.failUnlessReallyEqual(common.compute_rate(None, 1), None)
3910 self.failUnlessReallyEqual(common.compute_rate(250000, None), None)
3911 self.failUnlessReallyEqual(common.compute_rate(250000, 0), None)
3912 self.failUnlessReallyEqual(common.compute_rate(250000, 10), 25000.0)
3913 self.failUnlessReallyEqual(common.compute_rate(0, 10), 0.0)
3914 self.shouldFail(AssertionError, "test_compute_rate", "",
3915 common.compute_rate, -100, 10)
3916 self.shouldFail(AssertionError, "test_compute_rate", "",
3917 common.compute_rate, 100, -10)
3920 rate = common.compute_rate(10*1000*1000, 1)
3921 self.failUnlessReallyEqual(common.abbreviate_rate(rate), "10.00MBps")
3923 def test_abbreviate_rate(self):
3924 self.failUnlessReallyEqual(common.abbreviate_rate(None), "")
3925 self.failUnlessReallyEqual(common.abbreviate_rate(1234000), "1.23MBps")
3926 self.failUnlessReallyEqual(common.abbreviate_rate(12340), "12.3kBps")
3927 self.failUnlessReallyEqual(common.abbreviate_rate(123), "123Bps")
3929 def test_abbreviate_size(self):
3930 self.failUnlessReallyEqual(common.abbreviate_size(None), "")
3931 self.failUnlessReallyEqual(common.abbreviate_size(1.23*1000*1000*1000), "1.23GB")
3932 self.failUnlessReallyEqual(common.abbreviate_size(1.23*1000*1000), "1.23MB")
3933 self.failUnlessReallyEqual(common.abbreviate_size(1230), "1.2kB")
3934 self.failUnlessReallyEqual(common.abbreviate_size(123), "123B")
3936 def test_plural(self):
3938 return "%d second%s" % (s, status.plural(s))
3939 self.failUnlessReallyEqual(convert(0), "0 seconds")
3940 self.failUnlessReallyEqual(convert(1), "1 second")
3941 self.failUnlessReallyEqual(convert(2), "2 seconds")
3943 return "has share%s: %s" % (status.plural(s), ",".join(s))
3944 self.failUnlessReallyEqual(convert2([]), "has shares: ")
3945 self.failUnlessReallyEqual(convert2(["1"]), "has share: 1")
3946 self.failUnlessReallyEqual(convert2(["1","2"]), "has shares: 1,2")
3949 class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMixin, unittest.TestCase):
3951 def CHECK(self, ign, which, args, clientnum=0):
3952 fileurl = self.fileurls[which]
3953 url = fileurl + "?" + args
3954 return self.GET(url, method="POST", clientnum=clientnum)
3956 def test_filecheck(self):
3957 self.basedir = "web/Grid/filecheck"
3959 c0 = self.g.clients[0]
3962 d = c0.upload(upload.Data(DATA, convergence=""))
3963 def _stash_uri(ur, which):
3964 self.uris[which] = ur.uri
3965 d.addCallback(_stash_uri, "good")
3966 d.addCallback(lambda ign:
3967 c0.upload(upload.Data(DATA+"1", convergence="")))
3968 d.addCallback(_stash_uri, "sick")
3969 d.addCallback(lambda ign:
3970 c0.upload(upload.Data(DATA+"2", convergence="")))
3971 d.addCallback(_stash_uri, "dead")
3972 def _stash_mutable_uri(n, which):
3973 self.uris[which] = n.get_uri()
3974 assert isinstance(self.uris[which], str)
3975 d.addCallback(lambda ign:
3976 c0.create_mutable_file(publish.MutableData(DATA+"3")))
3977 d.addCallback(_stash_mutable_uri, "corrupt")
3978 d.addCallback(lambda ign:
3979 c0.upload(upload.Data("literal", convergence="")))
3980 d.addCallback(_stash_uri, "small")
3981 d.addCallback(lambda ign: c0.create_immutable_dirnode({}))
3982 d.addCallback(_stash_mutable_uri, "smalldir")
3984 def _compute_fileurls(ignored):
3986 for which in self.uris:
3987 self.fileurls[which] = "uri/" + urllib.quote(self.uris[which])
3988 d.addCallback(_compute_fileurls)
3990 def _clobber_shares(ignored):
3991 good_shares = self.find_uri_shares(self.uris["good"])
3992 self.failUnlessReallyEqual(len(good_shares), 10)
3993 sick_shares = self.find_uri_shares(self.uris["sick"])
3994 os.unlink(sick_shares[0][2])
3995 dead_shares = self.find_uri_shares(self.uris["dead"])
3996 for i in range(1, 10):
3997 os.unlink(dead_shares[i][2])
3998 c_shares = self.find_uri_shares(self.uris["corrupt"])
3999 cso = CorruptShareOptions()
4000 cso.stdout = StringIO()
4001 cso.parseOptions([c_shares[0][2]])
4003 d.addCallback(_clobber_shares)
4005 d.addCallback(self.CHECK, "good", "t=check")
4006 def _got_html_good(res):
4007 self.failUnlessIn("Healthy", res)
4008 self.failIfIn("Not Healthy", res)
4009 d.addCallback(_got_html_good)
4010 d.addCallback(self.CHECK, "good", "t=check&return_to=somewhere")
4011 def _got_html_good_return_to(res):
4012 self.failUnlessIn("Healthy", res)
4013 self.failIfIn("Not Healthy", res)
4014 self.failUnlessIn('<a href="somewhere">Return to file', res)
4015 d.addCallback(_got_html_good_return_to)
4016 d.addCallback(self.CHECK, "good", "t=check&output=json")
4017 def _got_json_good(res):
4018 r = simplejson.loads(res)
4019 self.failUnlessEqual(r["summary"], "Healthy")
4020 self.failUnless(r["results"]["healthy"])
4021 self.failIf(r["results"]["needs-rebalancing"])
4022 self.failUnless(r["results"]["recoverable"])
4023 d.addCallback(_got_json_good)
4025 d.addCallback(self.CHECK, "small", "t=check")
4026 def _got_html_small(res):
4027 self.failUnlessIn("Literal files are always healthy", res)
4028 self.failIfIn("Not Healthy", res)
4029 d.addCallback(_got_html_small)
4030 d.addCallback(self.CHECK, "small", "t=check&return_to=somewhere")
4031 def _got_html_small_return_to(res):
4032 self.failUnlessIn("Literal files are always healthy", res)
4033 self.failIfIn("Not Healthy", res)
4034 self.failUnlessIn('<a href="somewhere">Return to file', res)
4035 d.addCallback(_got_html_small_return_to)
4036 d.addCallback(self.CHECK, "small", "t=check&output=json")
4037 def _got_json_small(res):
4038 r = simplejson.loads(res)
4039 self.failUnlessEqual(r["storage-index"], "")
4040 self.failUnless(r["results"]["healthy"])
4041 d.addCallback(_got_json_small)
4043 d.addCallback(self.CHECK, "smalldir", "t=check")
4044 def _got_html_smalldir(res):
4045 self.failUnlessIn("Literal files are always healthy", res)
4046 self.failIfIn("Not Healthy", res)
4047 d.addCallback(_got_html_smalldir)
4048 d.addCallback(self.CHECK, "smalldir", "t=check&output=json")
4049 def _got_json_smalldir(res):
4050 r = simplejson.loads(res)
4051 self.failUnlessEqual(r["storage-index"], "")
4052 self.failUnless(r["results"]["healthy"])
4053 d.addCallback(_got_json_smalldir)
4055 d.addCallback(self.CHECK, "sick", "t=check")
4056 def _got_html_sick(res):
4057 self.failUnlessIn("Not Healthy", res)
4058 d.addCallback(_got_html_sick)
4059 d.addCallback(self.CHECK, "sick", "t=check&output=json")
4060 def _got_json_sick(res):
4061 r = simplejson.loads(res)
4062 self.failUnlessEqual(r["summary"],
4063 "Not Healthy: 9 shares (enc 3-of-10)")
4064 self.failIf(r["results"]["healthy"])
4065 self.failIf(r["results"]["needs-rebalancing"])
4066 self.failUnless(r["results"]["recoverable"])
4067 d.addCallback(_got_json_sick)
4069 d.addCallback(self.CHECK, "dead", "t=check")
4070 def _got_html_dead(res):
4071 self.failUnlessIn("Not Healthy", res)
4072 d.addCallback(_got_html_dead)
4073 d.addCallback(self.CHECK, "dead", "t=check&output=json")
4074 def _got_json_dead(res):
4075 r = simplejson.loads(res)
4076 self.failUnlessEqual(r["summary"],
4077 "Not Healthy: 1 shares (enc 3-of-10)")
4078 self.failIf(r["results"]["healthy"])
4079 self.failIf(r["results"]["needs-rebalancing"])
4080 self.failIf(r["results"]["recoverable"])
4081 d.addCallback(_got_json_dead)
4083 d.addCallback(self.CHECK, "corrupt", "t=check&verify=true")
4084 def _got_html_corrupt(res):
4085 self.failUnlessIn("Not Healthy! : Unhealthy", res)
4086 d.addCallback(_got_html_corrupt)
4087 d.addCallback(self.CHECK, "corrupt", "t=check&verify=true&output=json")
4088 def _got_json_corrupt(res):
4089 r = simplejson.loads(res)
4090 self.failUnlessIn("Unhealthy: 9 shares (enc 3-of-10)", r["summary"])
4091 self.failIf(r["results"]["healthy"])
4092 self.failUnless(r["results"]["recoverable"])
4093 self.failUnlessReallyEqual(r["results"]["count-shares-good"], 9)
4094 self.failUnlessReallyEqual(r["results"]["count-corrupt-shares"], 1)
4095 d.addCallback(_got_json_corrupt)
4097 d.addErrback(self.explain_web_error)
4100 def test_repair_html(self):
4101 self.basedir = "web/Grid/repair_html"
4103 c0 = self.g.clients[0]
4106 d = c0.upload(upload.Data(DATA, convergence=""))
4107 def _stash_uri(ur, which):
4108 self.uris[which] = ur.uri
4109 d.addCallback(_stash_uri, "good")
4110 d.addCallback(lambda ign:
4111 c0.upload(upload.Data(DATA+"1", convergence="")))
4112 d.addCallback(_stash_uri, "sick")
4113 d.addCallback(lambda ign:
4114 c0.upload(upload.Data(DATA+"2", convergence="")))
4115 d.addCallback(_stash_uri, "dead")
4116 def _stash_mutable_uri(n, which):
4117 self.uris[which] = n.get_uri()
4118 assert isinstance(self.uris[which], str)
4119 d.addCallback(lambda ign:
4120 c0.create_mutable_file(publish.MutableData(DATA+"3")))
4121 d.addCallback(_stash_mutable_uri, "corrupt")
4123 def _compute_fileurls(ignored):
4125 for which in self.uris:
4126 self.fileurls[which] = "uri/" + urllib.quote(self.uris[which])
4127 d.addCallback(_compute_fileurls)
4129 def _clobber_shares(ignored):
4130 good_shares = self.find_uri_shares(self.uris["good"])
4131 self.failUnlessReallyEqual(len(good_shares), 10)
4132 sick_shares = self.find_uri_shares(self.uris["sick"])
4133 os.unlink(sick_shares[0][2])
4134 dead_shares = self.find_uri_shares(self.uris["dead"])
4135 for i in range(1, 10):
4136 os.unlink(dead_shares[i][2])
4137 c_shares = self.find_uri_shares(self.uris["corrupt"])
4138 cso = CorruptShareOptions()
4139 cso.stdout = StringIO()
4140 cso.parseOptions([c_shares[0][2]])
4142 d.addCallback(_clobber_shares)
4144 d.addCallback(self.CHECK, "good", "t=check&repair=true")
4145 def _got_html_good(res):
4146 self.failUnlessIn("Healthy", res)
4147 self.failIfIn("Not Healthy", res)
4148 self.failUnlessIn("No repair necessary", res)
4149 d.addCallback(_got_html_good)
4151 d.addCallback(self.CHECK, "sick", "t=check&repair=true")
4152 def _got_html_sick(res):
4153 self.failUnlessIn("Healthy : healthy", res)
4154 self.failIfIn("Not Healthy", res)
4155 self.failUnlessIn("Repair successful", res)
4156 d.addCallback(_got_html_sick)
4158 # repair of a dead file will fail, of course, but it isn't yet
4159 # clear how this should be reported. Right now it shows up as
4162 #d.addCallback(self.CHECK, "dead", "t=check&repair=true")
4163 #def _got_html_dead(res):
4165 # self.failUnlessIn("Healthy : healthy", res)
4166 # self.failIfIn("Not Healthy", res)
4167 # self.failUnlessIn("No repair necessary", res)
4168 #d.addCallback(_got_html_dead)
4170 d.addCallback(self.CHECK, "corrupt", "t=check&verify=true&repair=true")
4171 def _got_html_corrupt(res):
4172 self.failUnlessIn("Healthy : Healthy", res)
4173 self.failIfIn("Not Healthy", res)
4174 self.failUnlessIn("Repair successful", res)
4175 d.addCallback(_got_html_corrupt)
4177 d.addErrback(self.explain_web_error)
4180 def test_repair_json(self):
4181 self.basedir = "web/Grid/repair_json"
4183 c0 = self.g.clients[0]
4186 d = c0.upload(upload.Data(DATA+"1", convergence=""))
4187 def _stash_uri(ur, which):
4188 self.uris[which] = ur.uri
4189 d.addCallback(_stash_uri, "sick")
4191 def _compute_fileurls(ignored):
4193 for which in self.uris:
4194 self.fileurls[which] = "uri/" + urllib.quote(self.uris[which])
4195 d.addCallback(_compute_fileurls)
4197 def _clobber_shares(ignored):
4198 sick_shares = self.find_uri_shares(self.uris["sick"])
4199 os.unlink(sick_shares[0][2])
4200 d.addCallback(_clobber_shares)
4202 d.addCallback(self.CHECK, "sick", "t=check&repair=true&output=json")
4203 def _got_json_sick(res):
4204 r = simplejson.loads(res)
4205 self.failUnlessReallyEqual(r["repair-attempted"], True)
4206 self.failUnlessReallyEqual(r["repair-successful"], True)
4207 self.failUnlessEqual(r["pre-repair-results"]["summary"],
4208 "Not Healthy: 9 shares (enc 3-of-10)")
4209 self.failIf(r["pre-repair-results"]["results"]["healthy"])
4210 self.failUnlessEqual(r["post-repair-results"]["summary"], "healthy")
4211 self.failUnless(r["post-repair-results"]["results"]["healthy"])
4212 d.addCallback(_got_json_sick)
4214 d.addErrback(self.explain_web_error)
4217 def test_unknown(self, immutable=False):
4218 self.basedir = "web/Grid/unknown"
4220 self.basedir = "web/Grid/unknown-immutable"
4223 c0 = self.g.clients[0]
4227 # the future cap format may contain slashes, which must be tolerated
4228 expected_info_url = "uri/%s?t=info" % urllib.quote(unknown_rwcap,
4232 name = u"future-imm"
4233 future_node = UnknownNode(None, unknown_immcap, deep_immutable=True)
4234 d = c0.create_immutable_dirnode({name: (future_node, {})})
4237 future_node = UnknownNode(unknown_rwcap, unknown_rocap)
4238 d = c0.create_dirnode()
4240 def _stash_root_and_create_file(n):
4242 self.rooturl = "uri/" + urllib.quote(n.get_uri()) + "/"
4243 self.rourl = "uri/" + urllib.quote(n.get_readonly_uri()) + "/"
4245 return self.rootnode.set_node(name, future_node)
4246 d.addCallback(_stash_root_and_create_file)
4248 # make sure directory listing tolerates unknown nodes
4249 d.addCallback(lambda ign: self.GET(self.rooturl))
4250 def _check_directory_html(res, expected_type_suffix):
4251 pattern = re.compile(r'<td>\?%s</td>[ \t\n\r]*'
4252 '<td>%s</td>' % (expected_type_suffix, str(name)),
4254 self.failUnless(re.search(pattern, res), res)
4255 # find the More Info link for name, should be relative
4256 mo = re.search(r'<a href="([^"]+)">More Info</a>', res)
4257 info_url = mo.group(1)
4258 self.failUnlessReallyEqual(info_url, "%s?t=info" % (str(name),))
4260 d.addCallback(_check_directory_html, "-IMM")
4262 d.addCallback(_check_directory_html, "")
4264 d.addCallback(lambda ign: self.GET(self.rooturl+"?t=json"))
4265 def _check_directory_json(res, expect_rw_uri):
4266 data = simplejson.loads(res)
4267 self.failUnlessEqual(data[0], "dirnode")
4268 f = data[1]["children"][name]
4269 self.failUnlessEqual(f[0], "unknown")
4271 self.failUnlessReallyEqual(to_str(f[1]["rw_uri"]), unknown_rwcap, data)
4273 self.failIfIn("rw_uri", f[1])
4275 self.failUnlessReallyEqual(to_str(f[1]["ro_uri"]), unknown_immcap, data)
4277 self.failUnlessReallyEqual(to_str(f[1]["ro_uri"]), unknown_rocap, data)
4278 self.failUnlessIn("metadata", f[1])
4279 d.addCallback(_check_directory_json, expect_rw_uri=not immutable)
4281 def _check_info(res, expect_rw_uri, expect_ro_uri):
4282 self.failUnlessIn("Object Type: <span>unknown</span>", res)
4284 self.failUnlessIn(unknown_rwcap, res)
4287 self.failUnlessIn(unknown_immcap, res)
4289 self.failUnlessIn(unknown_rocap, res)
4291 self.failIfIn(unknown_rocap, res)
4292 self.failIfIn("Raw data as", res)
4293 self.failIfIn("Directory writecap", res)
4294 self.failIfIn("Checker Operations", res)
4295 self.failIfIn("Mutable File Operations", res)
4296 self.failIfIn("Directory Operations", res)
4298 # FIXME: these should have expect_rw_uri=not immutable; I don't know
4299 # why they fail. Possibly related to ticket #922.
4301 d.addCallback(lambda ign: self.GET(expected_info_url))
4302 d.addCallback(_check_info, expect_rw_uri=False, expect_ro_uri=False)
4303 d.addCallback(lambda ign: self.GET("%s%s?t=info" % (self.rooturl, str(name))))
4304 d.addCallback(_check_info, expect_rw_uri=False, expect_ro_uri=True)
4306 def _check_json(res, expect_rw_uri):
4307 data = simplejson.loads(res)
4308 self.failUnlessEqual(data[0], "unknown")
4310 self.failUnlessReallyEqual(to_str(data[1]["rw_uri"]), unknown_rwcap, data)
4312 self.failIfIn("rw_uri", data[1])
4315 self.failUnlessReallyEqual(to_str(data[1]["ro_uri"]), unknown_immcap, data)
4316 self.failUnlessReallyEqual(data[1]["mutable"], False)
4318 self.failUnlessReallyEqual(to_str(data[1]["ro_uri"]), unknown_rocap, data)
4319 self.failUnlessReallyEqual(data[1]["mutable"], True)
4321 self.failUnlessReallyEqual(to_str(data[1]["ro_uri"]), unknown_rocap, data)
4322 self.failIfIn("mutable", data[1])
4324 # TODO: check metadata contents
4325 self.failUnlessIn("metadata", data[1])
4327 d.addCallback(lambda ign: self.GET("%s%s?t=json" % (self.rooturl, str(name))))
4328 d.addCallback(_check_json, expect_rw_uri=not immutable)
4330 # and make sure that a read-only version of the directory can be
4331 # rendered too. This version will not have unknown_rwcap, whether
4332 # or not future_node was immutable.
4333 d.addCallback(lambda ign: self.GET(self.rourl))
4335 d.addCallback(_check_directory_html, "-IMM")
4337 d.addCallback(_check_directory_html, "-RO")
4339 d.addCallback(lambda ign: self.GET(self.rourl+"?t=json"))
4340 d.addCallback(_check_directory_json, expect_rw_uri=False)
4342 d.addCallback(lambda ign: self.GET("%s%s?t=json" % (self.rourl, str(name))))
4343 d.addCallback(_check_json, expect_rw_uri=False)
4345 # TODO: check that getting t=info from the Info link in the ro directory
4346 # works, and does not include the writecap URI.
4349 def test_immutable_unknown(self):
4350 return self.test_unknown(immutable=True)
4352 def test_mutant_dirnodes_are_omitted(self):
4353 self.basedir = "web/Grid/mutant_dirnodes_are_omitted"
4356 c = self.g.clients[0]
4361 lonely_uri = "URI:LIT:n5xgk" # LIT for "one"
4362 mut_write_uri = "URI:SSK:vfvcbdfbszyrsaxchgevhmmlii:euw4iw7bbnkrrwpzuburbhppuxhc3gwxv26f6imekhz7zyw2ojnq"
4363 mut_read_uri = "URI:SSK-RO:e3mdrzfwhoq42hy5ubcz6rp3o4:ybyibhnp3vvwuq2vaw2ckjmesgkklfs6ghxleztqidihjyofgw7q"
4365 # This method tests mainly dirnode, but we'd have to duplicate code in order to
4366 # test the dirnode and web layers separately.
4368 # 'lonely' is a valid LIT child, 'ro' is a mutant child with an SSK-RO readcap,
4369 # and 'write-in-ro' is a mutant child with an SSK writecap in the ro_uri field.
4370 # When the directory is read, the mutants should be silently disposed of, leaving
4371 # their lonely sibling.
4372 # We don't test the case of a retrieving a cap from the encrypted rw_uri field,
4373 # because immutable directories don't have a writecap and therefore that field
4374 # isn't (and can't be) decrypted.
4375 # TODO: The field still exists in the netstring. Technically we should check what
4376 # happens if something is put there (_unpack_contents should raise ValueError),
4377 # but that can wait.
4379 lonely_child = nm.create_from_cap(lonely_uri)
4380 mutant_ro_child = nm.create_from_cap(mut_read_uri)
4381 mutant_write_in_ro_child = nm.create_from_cap(mut_write_uri)
4383 def _by_hook_or_by_crook():
4385 for n in [mutant_ro_child, mutant_write_in_ro_child]:
4386 n.is_allowed_in_immutable_directory = _by_hook_or_by_crook
4388 mutant_write_in_ro_child.get_write_uri = lambda: None
4389 mutant_write_in_ro_child.get_readonly_uri = lambda: mut_write_uri
4391 kids = {u"lonely": (lonely_child, {}),
4392 u"ro": (mutant_ro_child, {}),
4393 u"write-in-ro": (mutant_write_in_ro_child, {}),
4395 d = c.create_immutable_dirnode(kids)
4398 self.failUnless(isinstance(dn, dirnode.DirectoryNode))
4399 self.failIf(dn.is_mutable())
4400 self.failUnless(dn.is_readonly())
4401 # This checks that if we somehow ended up calling dn._decrypt_rwcapdata, it would fail.
4402 self.failIf(hasattr(dn._node, 'get_writekey'))
4404 self.failUnlessIn("RO-IMM", rep)
4406 self.failUnlessIn("CHK", cap.to_string())
4409 self.rooturl = "uri/" + urllib.quote(dn.get_uri()) + "/"
4410 return download_to_data(dn._node)
4411 d.addCallback(_created)
4413 def _check_data(data):
4414 # Decode the netstring representation of the directory to check that all children
4415 # are present. This is a bit of an abstraction violation, but there's not really
4416 # any other way to do it given that the real DirectoryNode._unpack_contents would
4417 # strip the mutant children out (which is what we're trying to test, later).
4420 while position < len(data):
4421 entries, position = split_netstring(data, 1, position)
4423 (name_utf8, ro_uri, rwcapdata, metadata_s), subpos = split_netstring(entry, 4)
4424 name = name_utf8.decode("utf-8")
4425 self.failUnlessEqual(rwcapdata, "")
4426 self.failUnlessIn(name, kids)
4427 (expected_child, ign) = kids[name]
4428 self.failUnlessReallyEqual(ro_uri, expected_child.get_readonly_uri())
4431 self.failUnlessReallyEqual(numkids, 3)
4432 return self.rootnode.list()
4433 d.addCallback(_check_data)
4435 # Now when we use the real directory listing code, the mutants should be absent.
4436 def _check_kids(children):
4437 self.failUnlessReallyEqual(sorted(children.keys()), [u"lonely"])
4438 lonely_node, lonely_metadata = children[u"lonely"]
4440 self.failUnlessReallyEqual(lonely_node.get_write_uri(), None)
4441 self.failUnlessReallyEqual(lonely_node.get_readonly_uri(), lonely_uri)
4442 d.addCallback(_check_kids)
4444 d.addCallback(lambda ign: nm.create_from_cap(self.cap.to_string()))
4445 d.addCallback(lambda n: n.list())
4446 d.addCallback(_check_kids) # again with dirnode recreated from cap
4448 # Make sure the lonely child can be listed in HTML...
4449 d.addCallback(lambda ign: self.GET(self.rooturl))
4450 def _check_html(res):
4451 self.failIfIn("URI:SSK", res)
4452 get_lonely = "".join([r'<td>FILE</td>',
4454 r'<a href="[^"]+%s[^"]+">lonely</a>' % (urllib.quote(lonely_uri),),
4456 r'\s+<td align="right">%d</td>' % len("one"),
4458 self.failUnless(re.search(get_lonely, res), res)
4460 # find the More Info link for name, should be relative
4461 mo = re.search(r'<a href="([^"]+)">More Info</a>', res)
4462 info_url = mo.group(1)
4463 self.failUnless(info_url.endswith(urllib.quote(lonely_uri) + "?t=info"), info_url)
4464 d.addCallback(_check_html)
4467 d.addCallback(lambda ign: self.GET(self.rooturl+"?t=json"))
4468 def _check_json(res):
4469 data = simplejson.loads(res)
4470 self.failUnlessEqual(data[0], "dirnode")
4471 listed_children = data[1]["children"]
4472 self.failUnlessReallyEqual(sorted(listed_children.keys()), [u"lonely"])
4473 ll_type, ll_data = listed_children[u"lonely"]
4474 self.failUnlessEqual(ll_type, "filenode")
4475 self.failIfIn("rw_uri", ll_data)
4476 self.failUnlessReallyEqual(to_str(ll_data["ro_uri"]), lonely_uri)
4477 d.addCallback(_check_json)
4480 def test_deep_check(self):
4481 self.basedir = "web/Grid/deep_check"
4483 c0 = self.g.clients[0]
4487 d = c0.create_dirnode()
4488 def _stash_root_and_create_file(n):
4490 self.fileurls["root"] = "uri/" + urllib.quote(n.get_uri()) + "/"
4491 return n.add_file(u"good", upload.Data(DATA, convergence=""))
4492 d.addCallback(_stash_root_and_create_file)
4493 def _stash_uri(fn, which):
4494 self.uris[which] = fn.get_uri()
4496 d.addCallback(_stash_uri, "good")
4497 d.addCallback(lambda ign:
4498 self.rootnode.add_file(u"small",
4499 upload.Data("literal",
4501 d.addCallback(_stash_uri, "small")
4502 d.addCallback(lambda ign:
4503 self.rootnode.add_file(u"sick",
4504 upload.Data(DATA+"1",
4506 d.addCallback(_stash_uri, "sick")
4508 # this tests that deep-check and stream-manifest will ignore
4509 # UnknownNode instances. Hopefully this will also cover deep-stats.
4510 future_node = UnknownNode(unknown_rwcap, unknown_rocap)
4511 d.addCallback(lambda ign: self.rootnode.set_node(u"future", future_node))
4513 def _clobber_shares(ignored):
4514 self.delete_shares_numbered(self.uris["sick"], [0,1])
4515 d.addCallback(_clobber_shares)
4523 d.addCallback(self.CHECK, "root", "t=stream-deep-check")
4526 units = [simplejson.loads(line)
4527 for line in res.splitlines()
4530 print "response is:", res
4531 print "undecodeable line was '%s'" % line
4533 self.failUnlessReallyEqual(len(units), 5+1)
4534 # should be parent-first
4536 self.failUnlessEqual(u0["path"], [])
4537 self.failUnlessEqual(u0["type"], "directory")
4538 self.failUnlessReallyEqual(to_str(u0["cap"]), self.rootnode.get_uri())
4539 u0cr = u0["check-results"]
4540 self.failUnlessReallyEqual(u0cr["results"]["count-shares-good"], 10)
4542 ugood = [u for u in units
4543 if u["type"] == "file" and u["path"] == [u"good"]][0]
4544 self.failUnlessReallyEqual(to_str(ugood["cap"]), self.uris["good"])
4545 ugoodcr = ugood["check-results"]
4546 self.failUnlessReallyEqual(ugoodcr["results"]["count-shares-good"], 10)
4549 self.failUnlessEqual(stats["type"], "stats")
4551 self.failUnlessReallyEqual(s["count-immutable-files"], 2)
4552 self.failUnlessReallyEqual(s["count-literal-files"], 1)
4553 self.failUnlessReallyEqual(s["count-directories"], 1)
4554 self.failUnlessReallyEqual(s["count-unknown"], 1)
4555 d.addCallback(_done)
4557 d.addCallback(self.CHECK, "root", "t=stream-manifest")
4558 def _check_manifest(res):
4559 self.failUnless(res.endswith("\n"))
4560 units = [simplejson.loads(t) for t in res[:-1].split("\n")]
4561 self.failUnlessReallyEqual(len(units), 5+1)
4562 self.failUnlessEqual(units[-1]["type"], "stats")
4564 self.failUnlessEqual(first["path"], [])
4565 self.failUnlessEqual(to_str(first["cap"]), self.rootnode.get_uri())
4566 self.failUnlessEqual(first["type"], "directory")
4567 stats = units[-1]["stats"]
4568 self.failUnlessReallyEqual(stats["count-immutable-files"], 2)
4569 self.failUnlessReallyEqual(stats["count-literal-files"], 1)
4570 self.failUnlessReallyEqual(stats["count-mutable-files"], 0)
4571 self.failUnlessReallyEqual(stats["count-immutable-files"], 2)
4572 self.failUnlessReallyEqual(stats["count-unknown"], 1)
4573 d.addCallback(_check_manifest)
4575 # now add root/subdir and root/subdir/grandchild, then make subdir
4576 # unrecoverable, then see what happens
4578 d.addCallback(lambda ign:
4579 self.rootnode.create_subdirectory(u"subdir"))
4580 d.addCallback(_stash_uri, "subdir")
4581 d.addCallback(lambda subdir_node:
4582 subdir_node.add_file(u"grandchild",
4583 upload.Data(DATA+"2",
4585 d.addCallback(_stash_uri, "grandchild")
4587 d.addCallback(lambda ign:
4588 self.delete_shares_numbered(self.uris["subdir"],
4596 # root/subdir [unrecoverable]
4597 # root/subdir/grandchild
4599 # how should a streaming-JSON API indicate fatal error?
4600 # answer: emit ERROR: instead of a JSON string
4602 d.addCallback(self.CHECK, "root", "t=stream-manifest")
4603 def _check_broken_manifest(res):
4604 lines = res.splitlines()
4606 for (i,line) in enumerate(lines)
4607 if line.startswith("ERROR:")]
4609 self.fail("no ERROR: in output: %s" % (res,))
4610 first_error = error_lines[0]
4611 error_line = lines[first_error]
4612 error_msg = lines[first_error+1:]
4613 error_msg_s = "\n".join(error_msg) + "\n"
4614 self.failUnlessIn("ERROR: UnrecoverableFileError(no recoverable versions)",
4616 self.failUnless(len(error_msg) > 2, error_msg_s) # some traceback
4617 units = [simplejson.loads(line) for line in lines[:first_error]]
4618 self.failUnlessReallyEqual(len(units), 6) # includes subdir
4619 last_unit = units[-1]
4620 self.failUnlessEqual(last_unit["path"], ["subdir"])
4621 d.addCallback(_check_broken_manifest)
4623 d.addCallback(self.CHECK, "root", "t=stream-deep-check")
4624 def _check_broken_deepcheck(res):
4625 lines = res.splitlines()
4627 for (i,line) in enumerate(lines)
4628 if line.startswith("ERROR:")]
4630 self.fail("no ERROR: in output: %s" % (res,))
4631 first_error = error_lines[0]
4632 error_line = lines[first_error]
4633 error_msg = lines[first_error+1:]
4634 error_msg_s = "\n".join(error_msg) + "\n"
4635 self.failUnlessIn("ERROR: UnrecoverableFileError(no recoverable versions)",
4637 self.failUnless(len(error_msg) > 2, error_msg_s) # some traceback
4638 units = [simplejson.loads(line) for line in lines[:first_error]]
4639 self.failUnlessReallyEqual(len(units), 6) # includes subdir
4640 last_unit = units[-1]
4641 self.failUnlessEqual(last_unit["path"], ["subdir"])
4642 r = last_unit["check-results"]["results"]
4643 self.failUnlessReallyEqual(r["count-recoverable-versions"], 0)
4644 self.failUnlessReallyEqual(r["count-shares-good"], 1)
4645 self.failUnlessReallyEqual(r["recoverable"], False)
4646 d.addCallback(_check_broken_deepcheck)
4648 d.addErrback(self.explain_web_error)
4651 def test_deep_check_and_repair(self):
4652 self.basedir = "web/Grid/deep_check_and_repair"
4654 c0 = self.g.clients[0]
4658 d = c0.create_dirnode()
4659 def _stash_root_and_create_file(n):
4661 self.fileurls["root"] = "uri/" + urllib.quote(n.get_uri()) + "/"
4662 return n.add_file(u"good", upload.Data(DATA, convergence=""))
4663 d.addCallback(_stash_root_and_create_file)
4664 def _stash_uri(fn, which):
4665 self.uris[which] = fn.get_uri()
4666 d.addCallback(_stash_uri, "good")
4667 d.addCallback(lambda ign:
4668 self.rootnode.add_file(u"small",
4669 upload.Data("literal",
4671 d.addCallback(_stash_uri, "small")
4672 d.addCallback(lambda ign:
4673 self.rootnode.add_file(u"sick",
4674 upload.Data(DATA+"1",
4676 d.addCallback(_stash_uri, "sick")
4677 #d.addCallback(lambda ign:
4678 # self.rootnode.add_file(u"dead",
4679 # upload.Data(DATA+"2",
4681 #d.addCallback(_stash_uri, "dead")
4683 #d.addCallback(lambda ign: c0.create_mutable_file("mutable"))
4684 #d.addCallback(lambda fn: self.rootnode.set_node(u"corrupt", fn))
4685 #d.addCallback(_stash_uri, "corrupt")
4687 def _clobber_shares(ignored):
4688 good_shares = self.find_uri_shares(self.uris["good"])
4689 self.failUnlessReallyEqual(len(good_shares), 10)
4690 sick_shares = self.find_uri_shares(self.uris["sick"])
4691 os.unlink(sick_shares[0][2])
4692 #dead_shares = self.find_uri_shares(self.uris["dead"])
4693 #for i in range(1, 10):
4694 # os.unlink(dead_shares[i][2])
4696 #c_shares = self.find_uri_shares(self.uris["corrupt"])
4697 #cso = CorruptShareOptions()
4698 #cso.stdout = StringIO()
4699 #cso.parseOptions([c_shares[0][2]])
4701 d.addCallback(_clobber_shares)
4704 # root/good CHK, 10 shares
4706 # root/sick CHK, 9 shares
4708 d.addCallback(self.CHECK, "root", "t=stream-deep-check&repair=true")
4710 units = [simplejson.loads(line)
4711 for line in res.splitlines()
4713 self.failUnlessReallyEqual(len(units), 4+1)
4714 # should be parent-first
4716 self.failUnlessEqual(u0["path"], [])
4717 self.failUnlessEqual(u0["type"], "directory")
4718 self.failUnlessReallyEqual(to_str(u0["cap"]), self.rootnode.get_uri())
4719 u0crr = u0["check-and-repair-results"]
4720 self.failUnlessReallyEqual(u0crr["repair-attempted"], False)
4721 self.failUnlessReallyEqual(u0crr["pre-repair-results"]["results"]["count-shares-good"], 10)
4723 ugood = [u for u in units
4724 if u["type"] == "file" and u["path"] == [u"good"]][0]
4725 self.failUnlessEqual(to_str(ugood["cap"]), self.uris["good"])
4726 ugoodcrr = ugood["check-and-repair-results"]
4727 self.failUnlessReallyEqual(ugoodcrr["repair-attempted"], False)
4728 self.failUnlessReallyEqual(ugoodcrr["pre-repair-results"]["results"]["count-shares-good"], 10)
4730 usick = [u for u in units
4731 if u["type"] == "file" and u["path"] == [u"sick"]][0]
4732 self.failUnlessReallyEqual(to_str(usick["cap"]), self.uris["sick"])
4733 usickcrr = usick["check-and-repair-results"]
4734 self.failUnlessReallyEqual(usickcrr["repair-attempted"], True)
4735 self.failUnlessReallyEqual(usickcrr["repair-successful"], True)
4736 self.failUnlessReallyEqual(usickcrr["pre-repair-results"]["results"]["count-shares-good"], 9)
4737 self.failUnlessReallyEqual(usickcrr["post-repair-results"]["results"]["count-shares-good"], 10)
4740 self.failUnlessEqual(stats["type"], "stats")
4742 self.failUnlessReallyEqual(s["count-immutable-files"], 2)
4743 self.failUnlessReallyEqual(s["count-literal-files"], 1)
4744 self.failUnlessReallyEqual(s["count-directories"], 1)
4745 d.addCallback(_done)
4747 d.addErrback(self.explain_web_error)
4750 def _count_leases(self, ignored, which):
4751 u = self.uris[which]
4752 shares = self.find_uri_shares(u)
4754 for shnum, serverid, fn in shares:
4755 sf = get_share_file(fn)
4756 num_leases = len(list(sf.get_leases()))
4757 lease_counts.append( (fn, num_leases) )
4760 def _assert_leasecount(self, lease_counts, expected):
4761 for (fn, num_leases) in lease_counts:
4762 if num_leases != expected:
4763 self.fail("expected %d leases, have %d, on %s" %
4764 (expected, num_leases, fn))
4766 def test_add_lease(self):
4767 self.basedir = "web/Grid/add_lease"
4768 self.set_up_grid(num_clients=2)
4769 c0 = self.g.clients[0]
4772 d = c0.upload(upload.Data(DATA, convergence=""))
4773 def _stash_uri(ur, which):
4774 self.uris[which] = ur.uri
4775 d.addCallback(_stash_uri, "one")
4776 d.addCallback(lambda ign:
4777 c0.upload(upload.Data(DATA+"1", convergence="")))
4778 d.addCallback(_stash_uri, "two")
4779 def _stash_mutable_uri(n, which):
4780 self.uris[which] = n.get_uri()
4781 assert isinstance(self.uris[which], str)
4782 d.addCallback(lambda ign:
4783 c0.create_mutable_file(publish.MutableData(DATA+"2")))
4784 d.addCallback(_stash_mutable_uri, "mutable")
4786 def _compute_fileurls(ignored):
4788 for which in self.uris:
4789 self.fileurls[which] = "uri/" + urllib.quote(self.uris[which])
4790 d.addCallback(_compute_fileurls)
4792 d.addCallback(self._count_leases, "one")
4793 d.addCallback(self._assert_leasecount, 1)
4794 d.addCallback(self._count_leases, "two")
4795 d.addCallback(self._assert_leasecount, 1)
4796 d.addCallback(self._count_leases, "mutable")
4797 d.addCallback(self._assert_leasecount, 1)
4799 d.addCallback(self.CHECK, "one", "t=check") # no add-lease
4800 def _got_html_good(res):
4801 self.failUnlessIn("Healthy", res)
4802 self.failIfIn("Not Healthy", res)
4803 d.addCallback(_got_html_good)
4805 d.addCallback(self._count_leases, "one")
4806 d.addCallback(self._assert_leasecount, 1)
4807 d.addCallback(self._count_leases, "two")
4808 d.addCallback(self._assert_leasecount, 1)
4809 d.addCallback(self._count_leases, "mutable")
4810 d.addCallback(self._assert_leasecount, 1)
4812 # this CHECK uses the original client, which uses the same
4813 # lease-secrets, so it will just renew the original lease
4814 d.addCallback(self.CHECK, "one", "t=check&add-lease=true")
4815 d.addCallback(_got_html_good)
4817 d.addCallback(self._count_leases, "one")
4818 d.addCallback(self._assert_leasecount, 1)
4819 d.addCallback(self._count_leases, "two")
4820 d.addCallback(self._assert_leasecount, 1)
4821 d.addCallback(self._count_leases, "mutable")
4822 d.addCallback(self._assert_leasecount, 1)
4824 # this CHECK uses an alternate client, which adds a second lease
4825 d.addCallback(self.CHECK, "one", "t=check&add-lease=true", clientnum=1)
4826 d.addCallback(_got_html_good)
4828 d.addCallback(self._count_leases, "one")
4829 d.addCallback(self._assert_leasecount, 2)
4830 d.addCallback(self._count_leases, "two")
4831 d.addCallback(self._assert_leasecount, 1)
4832 d.addCallback(self._count_leases, "mutable")
4833 d.addCallback(self._assert_leasecount, 1)
4835 d.addCallback(self.CHECK, "mutable", "t=check&add-lease=true")
4836 d.addCallback(_got_html_good)
4838 d.addCallback(self._count_leases, "one")
4839 d.addCallback(self._assert_leasecount, 2)
4840 d.addCallback(self._count_leases, "two")
4841 d.addCallback(self._assert_leasecount, 1)
4842 d.addCallback(self._count_leases, "mutable")
4843 d.addCallback(self._assert_leasecount, 1)
4845 d.addCallback(self.CHECK, "mutable", "t=check&add-lease=true",
4847 d.addCallback(_got_html_good)
4849 d.addCallback(self._count_leases, "one")
4850 d.addCallback(self._assert_leasecount, 2)
4851 d.addCallback(self._count_leases, "two")
4852 d.addCallback(self._assert_leasecount, 1)
4853 d.addCallback(self._count_leases, "mutable")
4854 d.addCallback(self._assert_leasecount, 2)
4856 d.addErrback(self.explain_web_error)
4859 def test_deep_add_lease(self):
4860 self.basedir = "web/Grid/deep_add_lease"
4861 self.set_up_grid(num_clients=2)
4862 c0 = self.g.clients[0]
4866 d = c0.create_dirnode()
4867 def _stash_root_and_create_file(n):
4869 self.uris["root"] = n.get_uri()
4870 self.fileurls["root"] = "uri/" + urllib.quote(n.get_uri()) + "/"
4871 return n.add_file(u"one", upload.Data(DATA, convergence=""))
4872 d.addCallback(_stash_root_and_create_file)
4873 def _stash_uri(fn, which):
4874 self.uris[which] = fn.get_uri()
4875 d.addCallback(_stash_uri, "one")
4876 d.addCallback(lambda ign:
4877 self.rootnode.add_file(u"small",
4878 upload.Data("literal",
4880 d.addCallback(_stash_uri, "small")
4882 d.addCallback(lambda ign:
4883 c0.create_mutable_file(publish.MutableData("mutable")))
4884 d.addCallback(lambda fn: self.rootnode.set_node(u"mutable", fn))
4885 d.addCallback(_stash_uri, "mutable")
4887 d.addCallback(self.CHECK, "root", "t=stream-deep-check") # no add-lease
4889 units = [simplejson.loads(line)
4890 for line in res.splitlines()
4892 # root, one, small, mutable, stats
4893 self.failUnlessReallyEqual(len(units), 4+1)
4894 d.addCallback(_done)
4896 d.addCallback(self._count_leases, "root")
4897 d.addCallback(self._assert_leasecount, 1)
4898 d.addCallback(self._count_leases, "one")
4899 d.addCallback(self._assert_leasecount, 1)
4900 d.addCallback(self._count_leases, "mutable")
4901 d.addCallback(self._assert_leasecount, 1)
4903 d.addCallback(self.CHECK, "root", "t=stream-deep-check&add-lease=true")
4904 d.addCallback(_done)
4906 d.addCallback(self._count_leases, "root")
4907 d.addCallback(self._assert_leasecount, 1)
4908 d.addCallback(self._count_leases, "one")
4909 d.addCallback(self._assert_leasecount, 1)
4910 d.addCallback(self._count_leases, "mutable")
4911 d.addCallback(self._assert_leasecount, 1)
4913 d.addCallback(self.CHECK, "root", "t=stream-deep-check&add-lease=true",
4915 d.addCallback(_done)
4917 d.addCallback(self._count_leases, "root")
4918 d.addCallback(self._assert_leasecount, 2)
4919 d.addCallback(self._count_leases, "one")
4920 d.addCallback(self._assert_leasecount, 2)
4921 d.addCallback(self._count_leases, "mutable")
4922 d.addCallback(self._assert_leasecount, 2)
4924 d.addErrback(self.explain_web_error)
4928 def test_exceptions(self):
4929 self.basedir = "web/Grid/exceptions"
4930 self.set_up_grid(num_clients=1, num_servers=2)
4931 c0 = self.g.clients[0]
4932 c0.DEFAULT_ENCODING_PARAMETERS['happy'] = 2
4935 d = c0.create_dirnode()
4937 self.fileurls["root"] = "uri/" + urllib.quote(n.get_uri()) + "/"
4938 self.fileurls["imaginary"] = self.fileurls["root"] + "imaginary"
4940 d.addCallback(_stash_root)
4941 d.addCallback(lambda ign: c0.upload(upload.Data(DATA, convergence="")))
4943 self.fileurls["1share"] = "uri/" + urllib.quote(ur.uri)
4944 self.delete_shares_numbered(ur.uri, range(1,10))
4946 u = uri.from_string(ur.uri)
4947 u.key = testutil.flip_bit(u.key, 0)
4948 baduri = u.to_string()
4949 self.fileurls["0shares"] = "uri/" + urllib.quote(baduri)
4950 d.addCallback(_stash_bad)
4951 d.addCallback(lambda ign: c0.create_dirnode())
4952 def _mangle_dirnode_1share(n):
4954 url = self.fileurls["dir-1share"] = "uri/" + urllib.quote(u) + "/"
4955 self.fileurls["dir-1share-json"] = url + "?t=json"
4956 self.delete_shares_numbered(u, range(1,10))
4957 d.addCallback(_mangle_dirnode_1share)
4958 d.addCallback(lambda ign: c0.create_dirnode())
4959 def _mangle_dirnode_0share(n):
4961 url = self.fileurls["dir-0share"] = "uri/" + urllib.quote(u) + "/"
4962 self.fileurls["dir-0share-json"] = url + "?t=json"
4963 self.delete_shares_numbered(u, range(0,10))
4964 d.addCallback(_mangle_dirnode_0share)
4966 # NotEnoughSharesError should be reported sensibly, with a
4967 # text/plain explanation of the problem, and perhaps some
4968 # information on which shares *could* be found.
4970 d.addCallback(lambda ignored:
4971 self.shouldHTTPError("GET unrecoverable",
4972 410, "Gone", "NoSharesError",
4973 self.GET, self.fileurls["0shares"]))
4974 def _check_zero_shares(body):
4975 self.failIfIn("<html>", body)
4976 body = " ".join(body.strip().split())
4977 exp = ("NoSharesError: no shares could be found. "
4978 "Zero shares usually indicates a corrupt URI, or that "
4979 "no servers were connected, but it might also indicate "
4980 "severe corruption. You should perform a filecheck on "
4981 "this object to learn more. The full error message is: "
4982 "no shares (need 3). Last failure: None")
4983 self.failUnlessReallyEqual(exp, body)
4984 d.addCallback(_check_zero_shares)
4987 d.addCallback(lambda ignored:
4988 self.shouldHTTPError("GET 1share",
4989 410, "Gone", "NotEnoughSharesError",
4990 self.GET, self.fileurls["1share"]))
4991 def _check_one_share(body):
4992 self.failIfIn("<html>", body)
4993 body = " ".join(body.strip().split())
4994 msgbase = ("NotEnoughSharesError: This indicates that some "
4995 "servers were unavailable, or that shares have been "
4996 "lost to server departure, hard drive failure, or disk "
4997 "corruption. You should perform a filecheck on "
4998 "this object to learn more. The full error message is:"
5000 msg1 = msgbase + (" ran out of shares:"
5003 " overdue= unused= need 3. Last failure: None")
5004 msg2 = msgbase + (" ran out of shares:"
5006 " pending=Share(sh0-on-xgru5)"
5007 " overdue= unused= need 3. Last failure: None")
5008 self.failUnless(body == msg1 or body == msg2, body)
5009 d.addCallback(_check_one_share)
5011 d.addCallback(lambda ignored:
5012 self.shouldHTTPError("GET imaginary",
5013 404, "Not Found", None,
5014 self.GET, self.fileurls["imaginary"]))
5015 def _missing_child(body):
5016 self.failUnlessIn("No such child: imaginary", body)
5017 d.addCallback(_missing_child)
5019 d.addCallback(lambda ignored: self.GET(self.fileurls["dir-0share"]))
5020 def _check_0shares_dir_html(body):
5021 self.failUnlessIn("<html>", body)
5022 # we should see the regular page, but without the child table or
5024 body = " ".join(body.strip().split())
5025 self.failUnlessIn('href="?t=info">More info on this directory',
5027 exp = ("UnrecoverableFileError: the directory (or mutable file) "
5028 "could not be retrieved, because there were insufficient "
5029 "good shares. This might indicate that no servers were "
5030 "connected, insufficient servers were connected, the URI "
5031 "was corrupt, or that shares have been lost due to server "
5032 "departure, hard drive failure, or disk corruption. You "
5033 "should perform a filecheck on this object to learn more.")
5034 self.failUnlessIn(exp, body)
5035 self.failUnlessIn("No upload forms: directory is unreadable", body)
5036 d.addCallback(_check_0shares_dir_html)
5038 d.addCallback(lambda ignored: self.GET(self.fileurls["dir-1share"]))
5039 def _check_1shares_dir_html(body):
5040 # at some point, we'll split UnrecoverableFileError into 0-shares
5041 # and some-shares like we did for immutable files (since there
5042 # are different sorts of advice to offer in each case). For now,
5043 # they present the same way.
5044 self.failUnlessIn("<html>", body)
5045 body = " ".join(body.strip().split())
5046 self.failUnlessIn('href="?t=info">More info on this directory',
5048 exp = ("UnrecoverableFileError: the directory (or mutable file) "
5049 "could not be retrieved, because there were insufficient "
5050 "good shares. This might indicate that no servers were "
5051 "connected, insufficient servers were connected, the URI "
5052 "was corrupt, or that shares have been lost due to server "
5053 "departure, hard drive failure, or disk corruption. You "
5054 "should perform a filecheck on this object to learn more.")
5055 self.failUnlessIn(exp, body)
5056 self.failUnlessIn("No upload forms: directory is unreadable", body)
5057 d.addCallback(_check_1shares_dir_html)
5059 d.addCallback(lambda ignored:
5060 self.shouldHTTPError("GET dir-0share-json",
5061 410, "Gone", "UnrecoverableFileError",
5063 self.fileurls["dir-0share-json"]))
5064 def _check_unrecoverable_file(body):
5065 self.failIfIn("<html>", body)
5066 body = " ".join(body.strip().split())
5067 exp = ("UnrecoverableFileError: the directory (or mutable file) "
5068 "could not be retrieved, because there were insufficient "
5069 "good shares. This might indicate that no servers were "
5070 "connected, insufficient servers were connected, the URI "
5071 "was corrupt, or that shares have been lost due to server "
5072 "departure, hard drive failure, or disk corruption. You "
5073 "should perform a filecheck on this object to learn more.")
5074 self.failUnlessReallyEqual(exp, body)
5075 d.addCallback(_check_unrecoverable_file)
5077 d.addCallback(lambda ignored:
5078 self.shouldHTTPError("GET dir-1share-json",
5079 410, "Gone", "UnrecoverableFileError",
5081 self.fileurls["dir-1share-json"]))
5082 d.addCallback(_check_unrecoverable_file)
5084 d.addCallback(lambda ignored:
5085 self.shouldHTTPError("GET imaginary",
5086 404, "Not Found", None,
5087 self.GET, self.fileurls["imaginary"]))
5089 # attach a webapi child that throws a random error, to test how it
5091 w = c0.getServiceNamed("webish")
5092 w.root.putChild("ERRORBOOM", ErrorBoom())
5094 # "Accept: */*" : should get a text/html stack trace
5095 # "Accept: text/plain" : should get a text/plain stack trace
5096 # "Accept: text/plain, application/octet-stream" : text/plain (CLI)
5097 # no Accept header: should get a text/html stack trace
5099 d.addCallback(lambda ignored:
5100 self.shouldHTTPError("GET errorboom_html",
5101 500, "Internal Server Error", None,
5102 self.GET, "ERRORBOOM",
5103 headers={"accept": ["*/*"]}))
5104 def _internal_error_html1(body):
5105 self.failUnlessIn("<html>", "expected HTML, not '%s'" % body)
5106 d.addCallback(_internal_error_html1)
5108 d.addCallback(lambda ignored:
5109 self.shouldHTTPError("GET errorboom_text",
5110 500, "Internal Server Error", None,
5111 self.GET, "ERRORBOOM",
5112 headers={"accept": ["text/plain"]}))
5113 def _internal_error_text2(body):
5114 self.failIfIn("<html>", body)
5115 self.failUnless(body.startswith("Traceback "), body)
5116 d.addCallback(_internal_error_text2)
5118 CLI_accepts = "text/plain, application/octet-stream"
5119 d.addCallback(lambda ignored:
5120 self.shouldHTTPError("GET errorboom_text",
5121 500, "Internal Server Error", None,
5122 self.GET, "ERRORBOOM",
5123 headers={"accept": [CLI_accepts]}))
5124 def _internal_error_text3(body):
5125 self.failIfIn("<html>", body)
5126 self.failUnless(body.startswith("Traceback "), body)
5127 d.addCallback(_internal_error_text3)
5129 d.addCallback(lambda ignored:
5130 self.shouldHTTPError("GET errorboom_text",
5131 500, "Internal Server Error", None,
5132 self.GET, "ERRORBOOM"))
5133 def _internal_error_html4(body):
5134 self.failUnlessIn("<html>", body)
5135 d.addCallback(_internal_error_html4)
5137 def _flush_errors(res):
5138 # Trial: please ignore the CompletelyUnhandledError in the logs
5139 self.flushLoggedErrors(CompletelyUnhandledError)
5141 d.addBoth(_flush_errors)
5145 def test_blacklist(self):
5146 # download from a blacklisted URI, get an error
5147 self.basedir = "web/Grid/blacklist"
5149 c0 = self.g.clients[0]
5150 c0_basedir = c0.basedir
5151 fn = os.path.join(c0_basedir, "access.blacklist")
5153 DATA = "off-limits " * 50
5155 d = c0.upload(upload.Data(DATA, convergence=""))
5156 def _stash_uri_and_create_dir(ur):
5158 self.url = "uri/"+self.uri
5159 u = uri.from_string_filenode(self.uri)
5160 self.si = u.get_storage_index()
5161 childnode = c0.create_node_from_uri(self.uri, None)
5162 return c0.create_dirnode({u"blacklisted.txt": (childnode,{}) })
5163 d.addCallback(_stash_uri_and_create_dir)
5164 def _stash_dir(node):
5165 self.dir_node = node
5166 self.dir_uri = node.get_uri()
5167 self.dir_url = "uri/"+self.dir_uri
5168 d.addCallback(_stash_dir)
5169 d.addCallback(lambda ign: self.GET(self.dir_url, followRedirect=True))
5170 def _check_dir_html(body):
5171 self.failUnlessIn("<html>", body)
5172 self.failUnlessIn("blacklisted.txt</a>", body)
5173 d.addCallback(_check_dir_html)
5174 d.addCallback(lambda ign: self.GET(self.url))
5175 d.addCallback(lambda body: self.failUnlessEqual(DATA, body))
5177 def _blacklist(ign):
5179 f.write(" # this is a comment\n")
5181 f.write("\n") # also exercise blank lines
5182 f.write("%s %s\n" % (base32.b2a(self.si), "off-limits to you"))
5184 # clients should be checking the blacklist each time, so we don't
5185 # need to restart the client
5186 d.addCallback(_blacklist)
5187 d.addCallback(lambda ign: self.shouldHTTPError("get_from_blacklisted_uri",
5189 "Access Prohibited: off-limits",
5190 self.GET, self.url))
5192 # We should still be able to list the parent directory, in HTML...
5193 d.addCallback(lambda ign: self.GET(self.dir_url, followRedirect=True))
5194 def _check_dir_html2(body):
5195 self.failUnlessIn("<html>", body)
5196 self.failUnlessIn("blacklisted.txt</strike>", body)
5197 d.addCallback(_check_dir_html2)
5199 # ... and in JSON (used by CLI).
5200 d.addCallback(lambda ign: self.GET(self.dir_url+"?t=json", followRedirect=True))
5201 def _check_dir_json(res):
5202 data = simplejson.loads(res)
5203 self.failUnless(isinstance(data, list), data)
5204 self.failUnlessEqual(data[0], "dirnode")
5205 self.failUnless(isinstance(data[1], dict), data)
5206 self.failUnlessIn("children", data[1])
5207 self.failUnlessIn("blacklisted.txt", data[1]["children"])
5208 childdata = data[1]["children"]["blacklisted.txt"]
5209 self.failUnless(isinstance(childdata, list), data)
5210 self.failUnlessEqual(childdata[0], "filenode")
5211 self.failUnless(isinstance(childdata[1], dict), data)
5212 d.addCallback(_check_dir_json)
5214 def _unblacklist(ign):
5215 open(fn, "w").close()
5216 # the Blacklist object watches mtime to tell when the file has
5217 # changed, but on windows this test will run faster than the
5218 # filesystem's mtime resolution. So we edit Blacklist.last_mtime
5219 # to force a reload.
5220 self.g.clients[0].blacklist.last_mtime -= 2.0
5221 d.addCallback(_unblacklist)
5223 # now a read should work
5224 d.addCallback(lambda ign: self.GET(self.url))
5225 d.addCallback(lambda body: self.failUnlessEqual(DATA, body))
5227 # read again to exercise the blacklist-is-unchanged logic
5228 d.addCallback(lambda ign: self.GET(self.url))
5229 d.addCallback(lambda body: self.failUnlessEqual(DATA, body))
5231 # now add a blacklisted directory, and make sure files under it are
5234 childnode = c0.create_node_from_uri(self.uri, None)
5235 return c0.create_dirnode({u"child": (childnode,{}) })
5236 d.addCallback(_add_dir)
5237 def _get_dircap(dn):
5238 self.dir_si_b32 = base32.b2a(dn.get_storage_index())
5239 self.dir_url_base = "uri/"+dn.get_write_uri()
5240 self.dir_url_json1 = "uri/"+dn.get_write_uri()+"?t=json"
5241 self.dir_url_json2 = "uri/"+dn.get_write_uri()+"/?t=json"
5242 self.dir_url_json_ro = "uri/"+dn.get_readonly_uri()+"/?t=json"
5243 self.child_url = "uri/"+dn.get_readonly_uri()+"/child"
5244 d.addCallback(_get_dircap)
5245 d.addCallback(lambda ign: self.GET(self.dir_url_base, followRedirect=True))
5246 d.addCallback(lambda body: self.failUnlessIn("<html>", body))
5247 d.addCallback(lambda ign: self.GET(self.dir_url_json1))
5248 d.addCallback(lambda res: simplejson.loads(res)) # just check it decodes
5249 d.addCallback(lambda ign: self.GET(self.dir_url_json2))
5250 d.addCallback(lambda res: simplejson.loads(res)) # just check it decodes
5251 d.addCallback(lambda ign: self.GET(self.dir_url_json_ro))
5252 d.addCallback(lambda res: simplejson.loads(res)) # just check it decodes
5253 d.addCallback(lambda ign: self.GET(self.child_url))
5254 d.addCallback(lambda body: self.failUnlessEqual(DATA, body))
5256 def _block_dir(ign):
5258 f.write("%s %s\n" % (self.dir_si_b32, "dir-off-limits to you"))
5260 self.g.clients[0].blacklist.last_mtime -= 2.0
5261 d.addCallback(_block_dir)
5262 d.addCallback(lambda ign: self.shouldHTTPError("get_from_blacklisted_dir base",
5264 "Access Prohibited: dir-off-limits",
5265 self.GET, self.dir_url_base))
5266 d.addCallback(lambda ign: self.shouldHTTPError("get_from_blacklisted_dir json1",
5268 "Access Prohibited: dir-off-limits",
5269 self.GET, self.dir_url_json1))
5270 d.addCallback(lambda ign: self.shouldHTTPError("get_from_blacklisted_dir json2",
5272 "Access Prohibited: dir-off-limits",
5273 self.GET, self.dir_url_json2))
5274 d.addCallback(lambda ign: self.shouldHTTPError("get_from_blacklisted_dir json_ro",
5276 "Access Prohibited: dir-off-limits",
5277 self.GET, self.dir_url_json_ro))
5278 d.addCallback(lambda ign: self.shouldHTTPError("get_from_blacklisted_dir child",
5280 "Access Prohibited: dir-off-limits",
5281 self.GET, self.child_url))
5285 class CompletelyUnhandledError(Exception):
5287 class ErrorBoom(rend.Page):
5288 def beforeRender(self, ctx):
5289 raise CompletelyUnhandledError("whoops")