1 import os.path, re, urllib
3 from StringIO import StringIO
4 from twisted.application import service
5 from twisted.trial import unittest
6 from twisted.internet import defer, reactor
7 from twisted.web import client, error, http
8 from twisted.python import failure, log
9 from allmydata import interfaces, uri, webish
10 from allmydata.storage.mutable import MutableShareFile
11 from allmydata.storage.immutable import ShareFile
12 from allmydata.immutable import upload, download
13 from allmydata.web import status, common
14 from allmydata.scripts.debug import CorruptShareOptions, corrupt_share
15 from allmydata.util import fileutil, base32
16 from allmydata.util.assertutil import precondition
17 from allmydata.test.common import FakeDirectoryNode, FakeCHKFileNode, \
18 FakeMutableFileNode, create_chk_filenode, WebErrorMixin, ShouldFailMixin
19 from allmydata.interfaces import IURI, INewDirectoryURI, \
20 IReadonlyNewDirectoryURI, IFileURI, IMutableFileURI, IMutableFileNode
21 from allmydata.mutable import servermap, publish, retrieve
22 import common_util as testutil
23 from allmydata.test.no_network import GridTestMixin
25 from allmydata.test.common_web import HTTPClientGETFactory, \
28 # create a fake uploader/downloader, and a couple of fake dirnodes, then
29 # create a webserver that works against them
31 class FakeIntroducerClient:
32 def get_all_connectors(self):
34 def get_all_connections_for(self, service_name):
36 def get_all_peerids(self):
39 class FakeStatsProvider:
41 stats = {'stats': {}, 'counters': {}}
44 class FakeClient(service.MultiService):
45 nodeid = "fake_nodeid"
46 nickname = "fake_nickname"
47 basedir = "fake_basedir"
48 def get_versions(self):
49 return {'allmydata': "fake",
54 introducer_furl = "None"
55 introducer_client = FakeIntroducerClient()
56 _all_upload_status = [upload.UploadStatus()]
57 _all_download_status = [download.DownloadStatus()]
58 _all_mapupdate_statuses = [servermap.UpdateStatus()]
59 _all_publish_statuses = [publish.PublishStatus()]
60 _all_retrieve_statuses = [retrieve.RetrieveStatus()]
61 convergence = "some random string"
62 stats_provider = FakeStatsProvider()
64 def connected_to_introducer(self):
67 def get_nickname_for_peerid(self, peerid):
70 def get_permuted_peers(self, service_name, key):
73 def create_node_from_uri(self, auri):
74 precondition(isinstance(auri, str), auri)
75 u = uri.from_string(auri)
76 if (INewDirectoryURI.providedBy(u)
77 or IReadonlyNewDirectoryURI.providedBy(u)):
78 return FakeDirectoryNode(self).init_from_uri(u)
79 if IFileURI.providedBy(u):
80 return FakeCHKFileNode(u, self)
81 assert IMutableFileURI.providedBy(u), u
82 return FakeMutableFileNode(self).init_from_uri(u)
84 def create_empty_dirnode(self):
85 n = FakeDirectoryNode(self)
87 d.addCallback(lambda res: n)
90 MUTABLE_SIZELIMIT = FakeMutableFileNode.MUTABLE_SIZELIMIT
91 def create_mutable_file(self, contents=""):
92 n = FakeMutableFileNode(self)
93 return n.create(contents)
95 def upload(self, uploadable):
96 d = uploadable.get_size()
97 d.addCallback(lambda size: uploadable.read(size))
100 n = create_chk_filenode(self, data)
101 results = upload.UploadResults()
102 results.uri = n.get_uri()
104 d.addCallback(_got_data)
107 def list_all_upload_statuses(self):
108 return self._all_upload_status
109 def list_all_download_statuses(self):
110 return self._all_download_status
111 def list_all_mapupdate_statuses(self):
112 return self._all_mapupdate_statuses
113 def list_all_publish_statuses(self):
114 return self._all_publish_statuses
115 def list_all_retrieve_statuses(self):
116 return self._all_retrieve_statuses
117 def list_all_helper_statuses(self):
120 class WebMixin(object):
122 self.s = FakeClient()
123 self.s.startService()
124 self.staticdir = self.mktemp()
125 self.ws = s = webish.WebishServer(self.s, "0", staticdir=self.staticdir)
126 s.setServiceParent(self.s)
127 self.webish_port = port = s.listener._port.getHost().port
128 self.webish_url = "http://localhost:%d" % port
130 l = [ self.s.create_empty_dirnode() for x in range(6) ]
131 d = defer.DeferredList(l)
133 self.public_root = res[0][1]
134 assert interfaces.IDirectoryNode.providedBy(self.public_root), res
135 self.public_url = "/uri/" + self.public_root.get_uri()
136 self.private_root = res[1][1]
140 self._foo_uri = foo.get_uri()
141 self._foo_readonly_uri = foo.get_readonly_uri()
142 self._foo_verifycap = foo.get_verify_cap().to_string()
143 # NOTE: we ignore the deferred on all set_uri() calls, because we
144 # know the fake nodes do these synchronously
145 self.public_root.set_uri(u"foo", foo.get_uri())
147 self.BAR_CONTENTS, n, self._bar_txt_uri = self.makefile(0)
148 foo.set_uri(u"bar.txt", self._bar_txt_uri)
149 self._bar_txt_verifycap = n.get_verify_cap().to_string()
151 foo.set_uri(u"empty", res[3][1].get_uri())
152 sub_uri = res[4][1].get_uri()
153 self._sub_uri = sub_uri
154 foo.set_uri(u"sub", sub_uri)
155 sub = self.s.create_node_from_uri(sub_uri)
157 _ign, n, blocking_uri = self.makefile(1)
158 foo.set_uri(u"blockingfile", blocking_uri)
160 unicode_filename = u"n\u00fc.txt" # n u-umlaut . t x t
161 # ok, unicode calls it LATIN SMALL LETTER U WITH DIAERESIS but I
162 # still think of it as an umlaut
163 foo.set_uri(unicode_filename, self._bar_txt_uri)
165 _ign, n, baz_file = self.makefile(2)
166 self._baz_file_uri = baz_file
167 sub.set_uri(u"baz.txt", baz_file)
169 _ign, n, self._bad_file_uri = self.makefile(3)
170 # this uri should not be downloadable
171 del FakeCHKFileNode.all_contents[self._bad_file_uri]
174 self.public_root.set_uri(u"reedownlee", rodir.get_readonly_uri())
175 rodir.set_uri(u"nor", baz_file)
180 # public/foo/blockingfile
183 # public/foo/sub/baz.txt
185 # public/reedownlee/nor
186 self.NEWFILE_CONTENTS = "newfile contents\n"
188 return foo.get_metadata_for(u"bar.txt")
190 def _got_metadata(metadata):
191 self._bar_txt_metadata = metadata
192 d.addCallback(_got_metadata)
195 def makefile(self, number):
196 contents = "contents of file %s\n" % number
197 n = create_chk_filenode(self.s, contents)
198 return contents, n, n.get_uri()
201 return self.s.stopService()
203 def failUnlessIsBarDotTxt(self, res):
204 self.failUnlessEqual(res, self.BAR_CONTENTS, res)
206 def failUnlessIsBarJSON(self, res):
207 data = simplejson.loads(res)
208 self.failUnless(isinstance(data, list))
209 self.failUnlessEqual(data[0], u"filenode")
210 self.failUnless(isinstance(data[1], dict))
211 self.failIf(data[1]["mutable"])
212 self.failIf("rw_uri" in data[1]) # immutable
213 self.failUnlessEqual(data[1]["ro_uri"], self._bar_txt_uri)
214 self.failUnlessEqual(data[1]["verify_uri"], self._bar_txt_verifycap)
215 self.failUnlessEqual(data[1]["size"], len(self.BAR_CONTENTS))
217 def failUnlessIsFooJSON(self, res):
218 data = simplejson.loads(res)
219 self.failUnless(isinstance(data, list))
220 self.failUnlessEqual(data[0], "dirnode", res)
221 self.failUnless(isinstance(data[1], dict))
222 self.failUnless(data[1]["mutable"])
223 self.failUnless("rw_uri" in data[1]) # mutable
224 self.failUnlessEqual(data[1]["rw_uri"], self._foo_uri)
225 self.failUnlessEqual(data[1]["ro_uri"], self._foo_readonly_uri)
226 self.failUnlessEqual(data[1]["verify_uri"], self._foo_verifycap)
228 kidnames = sorted([unicode(n) for n in data[1]["children"]])
229 self.failUnlessEqual(kidnames,
230 [u"bar.txt", u"blockingfile", u"empty",
231 u"n\u00fc.txt", u"sub"])
232 kids = dict( [(unicode(name),value)
234 in data[1]["children"].iteritems()] )
235 self.failUnlessEqual(kids[u"sub"][0], "dirnode")
236 self.failUnless("metadata" in kids[u"sub"][1])
237 self.failUnless("ctime" in kids[u"sub"][1]["metadata"])
238 self.failUnless("mtime" in kids[u"sub"][1]["metadata"])
239 self.failUnlessEqual(kids[u"bar.txt"][0], "filenode")
240 self.failUnlessEqual(kids[u"bar.txt"][1]["size"], len(self.BAR_CONTENTS))
241 self.failUnlessEqual(kids[u"bar.txt"][1]["ro_uri"], self._bar_txt_uri)
242 self.failUnlessEqual(kids[u"bar.txt"][1]["verify_uri"],
243 self._bar_txt_verifycap)
244 self.failUnlessEqual(kids[u"bar.txt"][1]["metadata"]["ctime"],
245 self._bar_txt_metadata["ctime"])
246 self.failUnlessEqual(kids[u"n\u00fc.txt"][1]["ro_uri"],
249 def GET(self, urlpath, followRedirect=False, return_response=False,
251 # if return_response=True, this fires with (data, statuscode,
252 # respheaders) instead of just data.
253 assert not isinstance(urlpath, unicode)
254 url = self.webish_url + urlpath
255 factory = HTTPClientGETFactory(url, method="GET",
256 followRedirect=followRedirect, **kwargs)
257 reactor.connectTCP("localhost", self.webish_port, factory)
260 return (data, factory.status, factory.response_headers)
262 d.addCallback(_got_data)
263 return factory.deferred
265 def HEAD(self, urlpath, return_response=False, **kwargs):
266 # this requires some surgery, because twisted.web.client doesn't want
267 # to give us back the response headers.
268 factory = HTTPClientHEADFactory(urlpath, method="HEAD", **kwargs)
269 reactor.connectTCP("localhost", self.webish_port, factory)
272 return (data, factory.status, factory.response_headers)
274 d.addCallback(_got_data)
275 return factory.deferred
277 def PUT(self, urlpath, data, **kwargs):
278 url = self.webish_url + urlpath
279 return client.getPage(url, method="PUT", postdata=data, **kwargs)
281 def DELETE(self, urlpath):
282 url = self.webish_url + urlpath
283 return client.getPage(url, method="DELETE")
285 def POST(self, urlpath, followRedirect=False, **fields):
286 url = self.webish_url + urlpath
287 sepbase = "boogabooga"
291 form.append('Content-Disposition: form-data; name="_charset"')
295 for name, value in fields.iteritems():
296 if isinstance(value, tuple):
297 filename, value = value
298 form.append('Content-Disposition: form-data; name="%s"; '
299 'filename="%s"' % (name, filename.encode("utf-8")))
301 form.append('Content-Disposition: form-data; name="%s"' % name)
303 if isinstance(value, unicode):
304 value = value.encode("utf-8")
307 assert isinstance(value, str)
311 body = "\r\n".join(form) + "\r\n"
312 headers = {"content-type": "multipart/form-data; boundary=%s" % sepbase,
314 return client.getPage(url, method="POST", postdata=body,
315 headers=headers, followRedirect=followRedirect)
317 def shouldFail(self, res, expected_failure, which,
318 substring=None, response_substring=None):
319 if isinstance(res, failure.Failure):
320 res.trap(expected_failure)
322 self.failUnless(substring in str(res),
323 "substring '%s' not in '%s'"
324 % (substring, str(res)))
325 if response_substring:
326 self.failUnless(response_substring in res.value.response,
327 "response substring '%s' not in '%s'"
328 % (response_substring, res.value.response))
330 self.fail("%s was supposed to raise %s, not get '%s'" %
331 (which, expected_failure, res))
333 def shouldFail2(self, expected_failure, which, substring,
335 callable, *args, **kwargs):
336 assert substring is None or isinstance(substring, str)
337 assert response_substring is None or isinstance(response_substring, str)
338 d = defer.maybeDeferred(callable, *args, **kwargs)
340 if isinstance(res, failure.Failure):
341 res.trap(expected_failure)
343 self.failUnless(substring in str(res),
344 "%s: substring '%s' not in '%s'"
345 % (which, substring, str(res)))
346 if response_substring:
347 self.failUnless(response_substring in res.value.response,
348 "%s: response substring '%s' not in '%s'"
350 response_substring, res.value.response))
352 self.fail("%s was supposed to raise %s, not get '%s'" %
353 (which, expected_failure, res))
357 def should404(self, res, which):
358 if isinstance(res, failure.Failure):
359 res.trap(error.Error)
360 self.failUnlessEqual(res.value.status, "404")
362 self.fail("%s was supposed to Error(404), not get '%s'" %
365 def shouldHTTPError(self, res, which, code=None, substring=None,
366 response_substring=None):
367 if isinstance(res, failure.Failure):
368 res.trap(error.Error)
370 self.failUnlessEqual(res.value.status, str(code))
372 self.failUnless(substring in str(res),
373 "substring '%s' not in '%s'"
374 % (substring, str(res)))
375 if response_substring:
376 self.failUnless(response_substring in res.value.response,
377 "response substring '%s' not in '%s'"
378 % (response_substring, res.value.response))
380 self.fail("%s was supposed to Error(%s), not get '%s'" %
383 def shouldHTTPError2(self, which,
384 code=None, substring=None, response_substring=None,
385 callable=None, *args, **kwargs):
386 assert substring is None or isinstance(substring, str)
388 d = defer.maybeDeferred(callable, *args, **kwargs)
389 d.addBoth(self.shouldHTTPError, which,
390 code, substring, response_substring)
394 class Web(WebMixin, testutil.StallMixin, unittest.TestCase):
395 def test_create(self):
398 def test_welcome(self):
401 self.failUnless('Welcome To AllMyData' in res)
402 self.failUnless('Tahoe' in res)
404 self.s.basedir = 'web/test_welcome'
405 fileutil.make_dirs("web/test_welcome")
406 fileutil.make_dirs("web/test_welcome/private")
408 d.addCallback(_check)
411 def test_provisioning(self):
412 d = self.GET("/provisioning/")
414 self.failUnless('Tahoe Provisioning Tool' in res)
415 fields = {'filled': True,
416 "num_users": int(50e3),
417 "files_per_user": 1000,
418 "space_per_user": int(1e9),
419 "sharing_ratio": 1.0,
420 "encoding_parameters": "3-of-10-5",
422 "ownership_mode": "A",
423 "download_rate": 100,
428 return self.POST("/provisioning/", **fields)
430 d.addCallback(_check)
432 self.failUnless('Tahoe Provisioning Tool' in res)
433 self.failUnless("Share space consumed: 167.01TB" in res)
435 fields = {'filled': True,
436 "num_users": int(50e6),
437 "files_per_user": 1000,
438 "space_per_user": int(5e9),
439 "sharing_ratio": 1.0,
440 "encoding_parameters": "25-of-100-50",
441 "num_servers": 30000,
442 "ownership_mode": "E",
443 "drive_failure_model": "U",
445 "download_rate": 1000,
450 return self.POST("/provisioning/", **fields)
451 d.addCallback(_check2)
453 self.failUnless("Share space consumed: huge!" in res)
454 fields = {'filled': True}
455 return self.POST("/provisioning/", **fields)
456 d.addCallback(_check3)
458 self.failUnless("Share space consumed:" in res)
459 d.addCallback(_check4)
462 def test_reliability_tool(self):
464 from allmydata import reliability
465 _hush_pyflakes = reliability
467 raise unittest.SkipTest("reliability tool requires NumPy")
469 d = self.GET("/reliability/")
471 self.failUnless('Tahoe Reliability Tool' in res)
472 fields = {'drive_lifetime': "8Y",
477 "check_period": "1M",
478 "report_period": "3M",
481 return self.POST("/reliability/", **fields)
483 d.addCallback(_check)
485 self.failUnless('Tahoe Reliability Tool' in res)
486 r = r'Probability of loss \(no maintenance\):\s+<span>0.033591'
487 self.failUnless(re.search(r, res), res)
488 d.addCallback(_check2)
491 def test_status(self):
492 dl_num = self.s.list_all_download_statuses()[0].get_counter()
493 ul_num = self.s.list_all_upload_statuses()[0].get_counter()
494 mu_num = self.s.list_all_mapupdate_statuses()[0].get_counter()
495 pub_num = self.s.list_all_publish_statuses()[0].get_counter()
496 ret_num = self.s.list_all_retrieve_statuses()[0].get_counter()
497 d = self.GET("/status", followRedirect=True)
499 self.failUnless('Upload and Download Status' in res, res)
500 self.failUnless('"down-%d"' % dl_num in res, res)
501 self.failUnless('"up-%d"' % ul_num in res, res)
502 self.failUnless('"mapupdate-%d"' % mu_num in res, res)
503 self.failUnless('"publish-%d"' % pub_num in res, res)
504 self.failUnless('"retrieve-%d"' % ret_num in res, res)
505 d.addCallback(_check)
506 d.addCallback(lambda res: self.GET("/status/?t=json"))
507 def _check_json(res):
508 data = simplejson.loads(res)
509 self.failUnless(isinstance(data, dict))
510 active = data["active"]
511 # TODO: test more. We need a way to fake an active operation
513 d.addCallback(_check_json)
515 d.addCallback(lambda res: self.GET("/status/down-%d" % dl_num))
517 self.failUnless("File Download Status" in res, res)
518 d.addCallback(_check_dl)
519 d.addCallback(lambda res: self.GET("/status/up-%d" % ul_num))
521 self.failUnless("File Upload Status" in res, res)
522 d.addCallback(_check_ul)
523 d.addCallback(lambda res: self.GET("/status/mapupdate-%d" % mu_num))
524 def _check_mapupdate(res):
525 self.failUnless("Mutable File Servermap Update Status" in res, res)
526 d.addCallback(_check_mapupdate)
527 d.addCallback(lambda res: self.GET("/status/publish-%d" % pub_num))
528 def _check_publish(res):
529 self.failUnless("Mutable File Publish Status" in res, res)
530 d.addCallback(_check_publish)
531 d.addCallback(lambda res: self.GET("/status/retrieve-%d" % ret_num))
532 def _check_retrieve(res):
533 self.failUnless("Mutable File Retrieve Status" in res, res)
534 d.addCallback(_check_retrieve)
538 def test_status_numbers(self):
539 drrm = status.DownloadResultsRendererMixin()
540 self.failUnlessEqual(drrm.render_time(None, None), "")
541 self.failUnlessEqual(drrm.render_time(None, 2.5), "2.50s")
542 self.failUnlessEqual(drrm.render_time(None, 0.25), "250ms")
543 self.failUnlessEqual(drrm.render_time(None, 0.0021), "2.1ms")
544 self.failUnlessEqual(drrm.render_time(None, 0.000123), "123us")
545 self.failUnlessEqual(drrm.render_rate(None, None), "")
546 self.failUnlessEqual(drrm.render_rate(None, 2500000), "2.50MBps")
547 self.failUnlessEqual(drrm.render_rate(None, 30100), "30.1kBps")
548 self.failUnlessEqual(drrm.render_rate(None, 123), "123Bps")
550 urrm = status.UploadResultsRendererMixin()
551 self.failUnlessEqual(urrm.render_time(None, None), "")
552 self.failUnlessEqual(urrm.render_time(None, 2.5), "2.50s")
553 self.failUnlessEqual(urrm.render_time(None, 0.25), "250ms")
554 self.failUnlessEqual(urrm.render_time(None, 0.0021), "2.1ms")
555 self.failUnlessEqual(urrm.render_time(None, 0.000123), "123us")
556 self.failUnlessEqual(urrm.render_rate(None, None), "")
557 self.failUnlessEqual(urrm.render_rate(None, 2500000), "2.50MBps")
558 self.failUnlessEqual(urrm.render_rate(None, 30100), "30.1kBps")
559 self.failUnlessEqual(urrm.render_rate(None, 123), "123Bps")
561 def test_GET_FILEURL(self):
562 d = self.GET(self.public_url + "/foo/bar.txt")
563 d.addCallback(self.failUnlessIsBarDotTxt)
566 def test_GET_FILEURL_range(self):
567 headers = {"range": "bytes=1-10"}
568 d = self.GET(self.public_url + "/foo/bar.txt", headers=headers,
569 return_response=True)
570 def _got((res, status, headers)):
571 self.failUnlessEqual(int(status), 206)
572 self.failUnless(headers.has_key("content-range"))
573 self.failUnlessEqual(headers["content-range"][0],
574 "bytes 1-10/%d" % len(self.BAR_CONTENTS))
575 self.failUnlessEqual(res, self.BAR_CONTENTS[1:11])
579 def test_GET_FILEURL_partial_range(self):
580 headers = {"range": "bytes=5-"}
581 length = len(self.BAR_CONTENTS)
582 d = self.GET(self.public_url + "/foo/bar.txt", headers=headers,
583 return_response=True)
584 def _got((res, status, headers)):
585 self.failUnlessEqual(int(status), 206)
586 self.failUnless(headers.has_key("content-range"))
587 self.failUnlessEqual(headers["content-range"][0],
588 "bytes 5-%d/%d" % (length-1, length))
589 self.failUnlessEqual(res, self.BAR_CONTENTS[5:])
593 def test_HEAD_FILEURL_range(self):
594 headers = {"range": "bytes=1-10"}
595 d = self.HEAD(self.public_url + "/foo/bar.txt", headers=headers,
596 return_response=True)
597 def _got((res, status, headers)):
598 self.failUnlessEqual(res, "")
599 self.failUnlessEqual(int(status), 206)
600 self.failUnless(headers.has_key("content-range"))
601 self.failUnlessEqual(headers["content-range"][0],
602 "bytes 1-10/%d" % len(self.BAR_CONTENTS))
606 def test_HEAD_FILEURL_partial_range(self):
607 headers = {"range": "bytes=5-"}
608 length = len(self.BAR_CONTENTS)
609 d = self.HEAD(self.public_url + "/foo/bar.txt", headers=headers,
610 return_response=True)
611 def _got((res, status, headers)):
612 self.failUnlessEqual(int(status), 206)
613 self.failUnless(headers.has_key("content-range"))
614 self.failUnlessEqual(headers["content-range"][0],
615 "bytes 5-%d/%d" % (length-1, length))
619 def test_GET_FILEURL_range_bad(self):
620 headers = {"range": "BOGUS=fizbop-quarnak"}
621 d = self.shouldFail2(error.Error, "test_GET_FILEURL_range_bad",
623 "Syntactically invalid http range header",
624 self.GET, self.public_url + "/foo/bar.txt",
628 def test_HEAD_FILEURL(self):
629 d = self.HEAD(self.public_url + "/foo/bar.txt", return_response=True)
630 def _got((res, status, headers)):
631 self.failUnlessEqual(res, "")
632 self.failUnlessEqual(headers["content-length"][0],
633 str(len(self.BAR_CONTENTS)))
634 self.failUnlessEqual(headers["content-type"], ["text/plain"])
638 def test_GET_FILEURL_named(self):
639 base = "/file/%s" % urllib.quote(self._bar_txt_uri)
640 base2 = "/named/%s" % urllib.quote(self._bar_txt_uri)
641 d = self.GET(base + "/@@name=/blah.txt")
642 d.addCallback(self.failUnlessIsBarDotTxt)
643 d.addCallback(lambda res: self.GET(base + "/blah.txt"))
644 d.addCallback(self.failUnlessIsBarDotTxt)
645 d.addCallback(lambda res: self.GET(base + "/ignore/lots/blah.txt"))
646 d.addCallback(self.failUnlessIsBarDotTxt)
647 d.addCallback(lambda res: self.GET(base2 + "/@@name=/blah.txt"))
648 d.addCallback(self.failUnlessIsBarDotTxt)
649 save_url = base + "?save=true&filename=blah.txt"
650 d.addCallback(lambda res: self.GET(save_url))
651 d.addCallback(self.failUnlessIsBarDotTxt) # TODO: check headers
652 u_filename = u"n\u00e9wer.txt" # n e-acute w e r . t x t
653 u_fn_e = urllib.quote(u_filename.encode("utf-8"))
654 u_url = base + "?save=true&filename=" + u_fn_e
655 d.addCallback(lambda res: self.GET(u_url))
656 d.addCallback(self.failUnlessIsBarDotTxt) # TODO: check headers
659 def test_PUT_FILEURL_named_bad(self):
660 base = "/file/%s" % urllib.quote(self._bar_txt_uri)
661 d = self.shouldFail2(error.Error, "test_PUT_FILEURL_named_bad",
663 "/file can only be used with GET or HEAD",
664 self.PUT, base + "/@@name=/blah.txt", "")
667 def test_GET_DIRURL_named_bad(self):
668 base = "/file/%s" % urllib.quote(self._foo_uri)
669 d = self.shouldFail2(error.Error, "test_PUT_DIRURL_named_bad",
672 self.GET, base + "/@@name=/blah.txt")
675 def test_GET_slash_file_bad(self):
676 d = self.shouldFail2(error.Error, "test_GET_slash_file_bad",
678 "/file must be followed by a file-cap and a name",
682 def test_GET_unhandled_URI_named(self):
683 contents, n, newuri = self.makefile(12)
684 verifier_cap = n.get_verify_cap().to_string()
685 base = "/file/%s" % urllib.quote(verifier_cap)
686 # client.create_node_from_uri() can't handle verify-caps
687 d = self.shouldFail2(error.Error, "GET_unhandled_URI_named",
689 "is not a valid file- or directory- cap",
693 def test_GET_unhandled_URI(self):
694 contents, n, newuri = self.makefile(12)
695 verifier_cap = n.get_verify_cap().to_string()
696 base = "/uri/%s" % urllib.quote(verifier_cap)
697 # client.create_node_from_uri() can't handle verify-caps
698 d = self.shouldFail2(error.Error, "test_GET_unhandled_URI",
700 "is not a valid file- or directory- cap",
704 def test_GET_FILE_URI(self):
705 base = "/uri/%s" % urllib.quote(self._bar_txt_uri)
707 d.addCallback(self.failUnlessIsBarDotTxt)
710 def test_GET_FILE_URI_badchild(self):
711 base = "/uri/%s/boguschild" % urllib.quote(self._bar_txt_uri)
712 errmsg = "Files have no children, certainly not named 'boguschild'"
713 d = self.shouldFail2(error.Error, "test_GET_FILE_URI_badchild",
714 "400 Bad Request", errmsg,
718 def test_PUT_FILE_URI_badchild(self):
719 base = "/uri/%s/boguschild" % urllib.quote(self._bar_txt_uri)
720 errmsg = "Cannot create directory 'boguschild', because its parent is a file, not a directory"
721 d = self.shouldFail2(error.Error, "test_GET_FILE_URI_badchild",
722 "400 Bad Request", errmsg,
726 def test_GET_FILEURL_save(self):
727 d = self.GET(self.public_url + "/foo/bar.txt?filename=bar.txt&save=true")
728 # TODO: look at the headers, expect a Content-Disposition: attachment
730 d.addCallback(self.failUnlessIsBarDotTxt)
733 def test_GET_FILEURL_missing(self):
734 d = self.GET(self.public_url + "/foo/missing")
735 d.addBoth(self.should404, "test_GET_FILEURL_missing")
738 def test_PUT_NEWFILEURL(self):
739 d = self.PUT(self.public_url + "/foo/new.txt", self.NEWFILE_CONTENTS)
740 # TODO: we lose the response code, so we can't check this
741 #self.failUnlessEqual(responsecode, 201)
742 d.addCallback(self.failUnlessURIMatchesChild, self._foo_node, u"new.txt")
743 d.addCallback(lambda res:
744 self.failUnlessChildContentsAre(self._foo_node, u"new.txt",
745 self.NEWFILE_CONTENTS))
748 def test_PUT_NEWFILEURL_range_bad(self):
749 headers = {"content-range": "bytes 1-10/%d" % len(self.NEWFILE_CONTENTS)}
750 target = self.public_url + "/foo/new.txt"
751 d = self.shouldFail2(error.Error, "test_PUT_NEWFILEURL_range_bad",
752 "501 Not Implemented",
753 "Content-Range in PUT not yet supported",
754 # (and certainly not for immutable files)
755 self.PUT, target, self.NEWFILE_CONTENTS[1:11],
757 d.addCallback(lambda res:
758 self.failIfNodeHasChild(self._foo_node, u"new.txt"))
761 def test_PUT_NEWFILEURL_mutable(self):
762 d = self.PUT(self.public_url + "/foo/new.txt?mutable=true",
763 self.NEWFILE_CONTENTS)
764 # TODO: we lose the response code, so we can't check this
765 #self.failUnlessEqual(responsecode, 201)
767 u = uri.from_string_mutable_filenode(res)
768 self.failUnless(u.is_mutable())
769 self.failIf(u.is_readonly())
771 d.addCallback(_check_uri)
772 d.addCallback(self.failUnlessURIMatchesChild, self._foo_node, u"new.txt")
773 d.addCallback(lambda res:
774 self.failUnlessMutableChildContentsAre(self._foo_node,
776 self.NEWFILE_CONTENTS))
779 def test_PUT_NEWFILEURL_mutable_toobig(self):
780 d = self.shouldFail2(error.Error, "test_PUT_NEWFILEURL_mutable_toobig",
781 "413 Request Entity Too Large",
782 "SDMF is limited to one segment, and 10001 > 10000",
784 self.public_url + "/foo/new.txt?mutable=true",
785 "b" * (self.s.MUTABLE_SIZELIMIT+1))
788 def test_PUT_NEWFILEURL_replace(self):
789 d = self.PUT(self.public_url + "/foo/bar.txt", self.NEWFILE_CONTENTS)
790 # TODO: we lose the response code, so we can't check this
791 #self.failUnlessEqual(responsecode, 200)
792 d.addCallback(self.failUnlessURIMatchesChild, self._foo_node, u"bar.txt")
793 d.addCallback(lambda res:
794 self.failUnlessChildContentsAre(self._foo_node, u"bar.txt",
795 self.NEWFILE_CONTENTS))
798 def test_PUT_NEWFILEURL_bad_t(self):
799 d = self.shouldFail2(error.Error, "PUT_bad_t", "400 Bad Request",
800 "PUT to a file: bad t=bogus",
801 self.PUT, self.public_url + "/foo/bar.txt?t=bogus",
805 def test_PUT_NEWFILEURL_no_replace(self):
806 d = self.PUT(self.public_url + "/foo/bar.txt?replace=false",
807 self.NEWFILE_CONTENTS)
808 d.addBoth(self.shouldFail, error.Error, "PUT_NEWFILEURL_no_replace",
810 "There was already a child by that name, and you asked me "
814 def test_PUT_NEWFILEURL_mkdirs(self):
815 d = self.PUT(self.public_url + "/foo/newdir/new.txt", self.NEWFILE_CONTENTS)
817 d.addCallback(self.failUnlessURIMatchesChild, fn, u"newdir/new.txt")
818 d.addCallback(lambda res: self.failIfNodeHasChild(fn, u"new.txt"))
819 d.addCallback(lambda res: self.failUnlessNodeHasChild(fn, u"newdir"))
820 d.addCallback(lambda res:
821 self.failUnlessChildContentsAre(fn, u"newdir/new.txt",
822 self.NEWFILE_CONTENTS))
825 def test_PUT_NEWFILEURL_blocked(self):
826 d = self.PUT(self.public_url + "/foo/blockingfile/new.txt",
827 self.NEWFILE_CONTENTS)
828 d.addBoth(self.shouldFail, error.Error, "PUT_NEWFILEURL_blocked",
830 "Unable to create directory 'blockingfile': a file was in the way")
833 def test_DELETE_FILEURL(self):
834 d = self.DELETE(self.public_url + "/foo/bar.txt")
835 d.addCallback(lambda res:
836 self.failIfNodeHasChild(self._foo_node, u"bar.txt"))
839 def test_DELETE_FILEURL_missing(self):
840 d = self.DELETE(self.public_url + "/foo/missing")
841 d.addBoth(self.should404, "test_DELETE_FILEURL_missing")
844 def test_DELETE_FILEURL_missing2(self):
845 d = self.DELETE(self.public_url + "/missing/missing")
846 d.addBoth(self.should404, "test_DELETE_FILEURL_missing2")
849 def test_GET_FILEURL_json(self):
850 # twisted.web.http.parse_qs ignores any query args without an '=', so
851 # I can't do "GET /path?json", I have to do "GET /path/t=json"
852 # instead. This may make it tricky to emulate the S3 interface
854 d = self.GET(self.public_url + "/foo/bar.txt?t=json")
855 d.addCallback(self.failUnlessIsBarJSON)
858 def test_GET_FILEURL_json_missing(self):
859 d = self.GET(self.public_url + "/foo/missing?json")
860 d.addBoth(self.should404, "test_GET_FILEURL_json_missing")
863 def test_GET_FILEURL_uri(self):
864 d = self.GET(self.public_url + "/foo/bar.txt?t=uri")
866 self.failUnlessEqual(res, self._bar_txt_uri)
867 d.addCallback(_check)
868 d.addCallback(lambda res:
869 self.GET(self.public_url + "/foo/bar.txt?t=readonly-uri"))
871 # for now, for files, uris and readonly-uris are the same
872 self.failUnlessEqual(res, self._bar_txt_uri)
873 d.addCallback(_check2)
876 def test_GET_FILEURL_badtype(self):
877 d = self.shouldHTTPError2("GET t=bogus", 400, "Bad Request",
880 self.public_url + "/foo/bar.txt?t=bogus")
883 def test_GET_FILEURL_uri_missing(self):
884 d = self.GET(self.public_url + "/foo/missing?t=uri")
885 d.addBoth(self.should404, "test_GET_FILEURL_uri_missing")
888 def test_GET_DIRURL(self):
889 # the addSlash means we get a redirect here
890 # from /uri/$URI/foo/ , we need ../../../ to get back to the root
892 d = self.GET(self.public_url + "/foo", followRedirect=True)
894 self.failUnless(('<a href="%s">Return to Welcome page' % ROOT)
896 # the FILE reference points to a URI, but it should end in bar.txt
897 bar_url = ("%s/file/%s/@@named=/bar.txt" %
898 (ROOT, urllib.quote(self._bar_txt_uri)))
899 get_bar = "".join([r'<td>',
900 r'<a href="%s">bar.txt</a>' % bar_url,
903 r'\s+<td>%d</td>' % len(self.BAR_CONTENTS),
905 self.failUnless(re.search(get_bar, res), res)
906 for line in res.split("\n"):
907 # find the line that contains the delete button for bar.txt
908 if ("form action" in line and
909 'value="delete"' in line and
910 'value="bar.txt"' in line):
911 # the form target should use a relative URL
912 foo_url = urllib.quote("%s/uri/%s/" % (ROOT, self._foo_uri))
913 self.failUnless(('action="%s"' % foo_url) in line, line)
914 # and the when_done= should too
915 #done_url = urllib.quote(???)
916 #self.failUnless(('name="when_done" value="%s"' % done_url)
920 self.fail("unable to find delete-bar.txt line", res)
922 # the DIR reference just points to a URI
923 sub_url = ("%s/uri/%s/" % (ROOT, urllib.quote(self._sub_uri)))
924 get_sub = ((r'<td><a href="%s">sub</a></td>' % sub_url)
925 + r'\s+<td>DIR</td>')
926 self.failUnless(re.search(get_sub, res), res)
927 d.addCallback(_check)
929 # look at a directory which is readonly
930 d.addCallback(lambda res:
931 self.GET(self.public_url + "/reedownlee", followRedirect=True))
933 self.failUnless("(readonly)" in res, res)
934 self.failIf("Upload a file" in res, res)
935 d.addCallback(_check2)
937 # and at a directory that contains a readonly directory
938 d.addCallback(lambda res:
939 self.GET(self.public_url, followRedirect=True))
941 self.failUnless(re.search(r'<td><a href="[\.\/]+/uri/URI%3ADIR2-RO%3A[^"]+">reedownlee</a>'
942 '</td>\s+<td>DIR-RO</td>', res))
943 d.addCallback(_check3)
947 def test_GET_DIRURL_badtype(self):
948 d = self.shouldHTTPError2("test_GET_DIRURL_badtype",
952 self.public_url + "/foo?t=bogus")
955 def test_GET_DIRURL_json(self):
956 d = self.GET(self.public_url + "/foo?t=json")
957 d.addCallback(self.failUnlessIsFooJSON)
961 def test_POST_DIRURL_manifest_no_ophandle(self):
962 d = self.shouldFail2(error.Error,
963 "test_POST_DIRURL_manifest_no_ophandle",
965 "slow operation requires ophandle=",
966 self.POST, self.public_url, t="start-manifest")
969 def test_POST_DIRURL_manifest(self):
970 d = defer.succeed(None)
971 def getman(ignored, output):
972 d = self.POST(self.public_url + "/foo/?t=start-manifest&ophandle=125",
974 d.addCallback(self.wait_for_operation, "125")
975 d.addCallback(self.get_operation_results, "125", output)
977 d.addCallback(getman, None)
978 def _got_html(manifest):
979 self.failUnless("Manifest of SI=" in manifest)
980 self.failUnless("<td>sub</td>" in manifest)
981 self.failUnless(self._sub_uri in manifest)
982 self.failUnless("<td>sub/baz.txt</td>" in manifest)
983 d.addCallback(_got_html)
985 # both t=status and unadorned GET should be identical
986 d.addCallback(lambda res: self.GET("/operations/125"))
987 d.addCallback(_got_html)
989 d.addCallback(getman, "html")
990 d.addCallback(_got_html)
991 d.addCallback(getman, "text")
992 def _got_text(manifest):
993 self.failUnless("\nsub " + self._sub_uri + "\n" in manifest)
994 self.failUnless("\nsub/baz.txt URI:CHK:" in manifest)
995 d.addCallback(_got_text)
996 d.addCallback(getman, "JSON")
998 data = res["manifest"]
1000 for (path_list, cap) in data:
1001 got[tuple(path_list)] = cap
1002 self.failUnlessEqual(got[(u"sub",)], self._sub_uri)
1003 self.failUnless((u"sub",u"baz.txt") in got)
1004 self.failUnless("finished" in res)
1005 self.failUnless("origin" in res)
1006 self.failUnless("storage-index" in res)
1007 self.failUnless("verifycaps" in res)
1008 self.failUnless("stats" in res)
1009 d.addCallback(_got_json)
1012 def test_POST_DIRURL_deepsize_no_ophandle(self):
1013 d = self.shouldFail2(error.Error,
1014 "test_POST_DIRURL_deepsize_no_ophandle",
1016 "slow operation requires ophandle=",
1017 self.POST, self.public_url, t="start-deep-size")
1020 def test_POST_DIRURL_deepsize(self):
1021 d = self.POST(self.public_url + "/foo/?t=start-deep-size&ophandle=126",
1022 followRedirect=True)
1023 d.addCallback(self.wait_for_operation, "126")
1024 d.addCallback(self.get_operation_results, "126", "json")
1025 def _got_json(data):
1026 self.failUnlessEqual(data["finished"], True)
1028 self.failUnless(size > 1000)
1029 d.addCallback(_got_json)
1030 d.addCallback(self.get_operation_results, "126", "text")
1032 mo = re.search(r'^size: (\d+)$', res, re.M)
1033 self.failUnless(mo, res)
1034 size = int(mo.group(1))
1035 # with directories, the size varies.
1036 self.failUnless(size > 1000)
1037 d.addCallback(_got_text)
1040 def test_POST_DIRURL_deepstats_no_ophandle(self):
1041 d = self.shouldFail2(error.Error,
1042 "test_POST_DIRURL_deepstats_no_ophandle",
1044 "slow operation requires ophandle=",
1045 self.POST, self.public_url, t="start-deep-stats")
1048 def test_POST_DIRURL_deepstats(self):
1049 d = self.POST(self.public_url + "/foo/?t=start-deep-stats&ophandle=127",
1050 followRedirect=True)
1051 d.addCallback(self.wait_for_operation, "127")
1052 d.addCallback(self.get_operation_results, "127", "json")
1053 def _got_json(stats):
1054 expected = {"count-immutable-files": 3,
1055 "count-mutable-files": 0,
1056 "count-literal-files": 0,
1058 "count-directories": 3,
1059 "size-immutable-files": 57,
1060 "size-literal-files": 0,
1061 #"size-directories": 1912, # varies
1062 #"largest-directory": 1590,
1063 "largest-directory-children": 5,
1064 "largest-immutable-file": 19,
1066 for k,v in expected.iteritems():
1067 self.failUnlessEqual(stats[k], v,
1068 "stats[%s] was %s, not %s" %
1070 self.failUnlessEqual(stats["size-files-histogram"],
1072 d.addCallback(_got_json)
1075 def test_POST_DIRURL_stream_manifest(self):
1076 d = self.POST(self.public_url + "/foo/?t=stream-manifest")
1078 self.failUnless(res.endswith("\n"))
1079 units = [simplejson.loads(t) for t in res[:-1].split("\n")]
1080 self.failUnlessEqual(len(units), 7)
1081 self.failUnlessEqual(units[-1]["type"], "stats")
1083 self.failUnlessEqual(first["path"], [])
1084 self.failUnlessEqual(first["cap"], self._foo_uri)
1085 self.failUnlessEqual(first["type"], "directory")
1086 baz = [u for u in units[:-1] if u["cap"] == self._baz_file_uri][0]
1087 self.failUnlessEqual(baz["path"], ["sub", "baz.txt"])
1088 self.failIfEqual(baz["storage-index"], None)
1089 self.failIfEqual(baz["verifycap"], None)
1090 self.failIfEqual(baz["repaircap"], None)
1092 d.addCallback(_check)
1095 def test_GET_DIRURL_uri(self):
1096 d = self.GET(self.public_url + "/foo?t=uri")
1098 self.failUnlessEqual(res, self._foo_uri)
1099 d.addCallback(_check)
1102 def test_GET_DIRURL_readonly_uri(self):
1103 d = self.GET(self.public_url + "/foo?t=readonly-uri")
1105 self.failUnlessEqual(res, self._foo_readonly_uri)
1106 d.addCallback(_check)
1109 def test_PUT_NEWDIRURL(self):
1110 d = self.PUT(self.public_url + "/foo/newdir?t=mkdir", "")
1111 d.addCallback(lambda res:
1112 self.failUnlessNodeHasChild(self._foo_node, u"newdir"))
1113 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
1114 d.addCallback(self.failUnlessNodeKeysAre, [])
1117 def test_PUT_NEWDIRURL_exists(self):
1118 d = self.PUT(self.public_url + "/foo/sub?t=mkdir", "")
1119 d.addCallback(lambda res:
1120 self.failUnlessNodeHasChild(self._foo_node, u"sub"))
1121 d.addCallback(lambda res: self._foo_node.get(u"sub"))
1122 d.addCallback(self.failUnlessNodeKeysAre, [u"baz.txt"])
1125 def test_PUT_NEWDIRURL_blocked(self):
1126 d = self.shouldFail2(error.Error, "PUT_NEWDIRURL_blocked",
1127 "409 Conflict", "Unable to create directory 'bar.txt': a file was in the way",
1129 self.public_url + "/foo/bar.txt/sub?t=mkdir", "")
1130 d.addCallback(lambda res:
1131 self.failUnlessNodeHasChild(self._foo_node, u"sub"))
1132 d.addCallback(lambda res: self._foo_node.get(u"sub"))
1133 d.addCallback(self.failUnlessNodeKeysAre, [u"baz.txt"])
1136 def test_PUT_NEWDIRURL_mkdir_p(self):
1137 d = defer.succeed(None)
1138 d.addCallback(lambda res: self.POST(self.public_url + "/foo", t='mkdir', name='mkp'))
1139 d.addCallback(lambda res: self.failUnlessNodeHasChild(self._foo_node, u"mkp"))
1140 d.addCallback(lambda res: self._foo_node.get(u"mkp"))
1141 def mkdir_p(mkpnode):
1142 url = '/uri/%s?t=mkdir-p&path=/sub1/sub2' % urllib.quote(mkpnode.get_uri())
1144 def made_subsub(ssuri):
1145 d = self._foo_node.get_child_at_path(u"mkp/sub1/sub2")
1146 d.addCallback(lambda ssnode: self.failUnlessEqual(ssnode.get_uri(), ssuri))
1148 d.addCallback(lambda uri2: self.failUnlessEqual(uri2, ssuri))
1150 d.addCallback(made_subsub)
1152 d.addCallback(mkdir_p)
1155 def test_PUT_NEWDIRURL_mkdirs(self):
1156 d = self.PUT(self.public_url + "/foo/subdir/newdir?t=mkdir", "")
1157 d.addCallback(lambda res:
1158 self.failIfNodeHasChild(self._foo_node, u"newdir"))
1159 d.addCallback(lambda res:
1160 self.failUnlessNodeHasChild(self._foo_node, u"subdir"))
1161 d.addCallback(lambda res:
1162 self._foo_node.get_child_at_path(u"subdir/newdir"))
1163 d.addCallback(self.failUnlessNodeKeysAre, [])
1166 def test_DELETE_DIRURL(self):
1167 d = self.DELETE(self.public_url + "/foo")
1168 d.addCallback(lambda res:
1169 self.failIfNodeHasChild(self.public_root, u"foo"))
1172 def test_DELETE_DIRURL_missing(self):
1173 d = self.DELETE(self.public_url + "/foo/missing")
1174 d.addBoth(self.should404, "test_DELETE_DIRURL_missing")
1175 d.addCallback(lambda res:
1176 self.failUnlessNodeHasChild(self.public_root, u"foo"))
1179 def test_DELETE_DIRURL_missing2(self):
1180 d = self.DELETE(self.public_url + "/missing")
1181 d.addBoth(self.should404, "test_DELETE_DIRURL_missing2")
1184 def dump_root(self):
1186 w = webish.DirnodeWalkerMixin()
1187 def visitor(childpath, childnode, metadata):
1189 d = w.walk(self.public_root, visitor)
1192 def failUnlessNodeKeysAre(self, node, expected_keys):
1193 for k in expected_keys:
1194 assert isinstance(k, unicode)
1196 def _check(children):
1197 self.failUnlessEqual(sorted(children.keys()), sorted(expected_keys))
1198 d.addCallback(_check)
1200 def failUnlessNodeHasChild(self, node, name):
1201 assert isinstance(name, unicode)
1203 def _check(children):
1204 self.failUnless(name in children)
1205 d.addCallback(_check)
1207 def failIfNodeHasChild(self, node, name):
1208 assert isinstance(name, unicode)
1210 def _check(children):
1211 self.failIf(name in children)
1212 d.addCallback(_check)
1215 def failUnlessChildContentsAre(self, node, name, expected_contents):
1216 assert isinstance(name, unicode)
1217 d = node.get_child_at_path(name)
1218 d.addCallback(lambda node: node.download_to_data())
1219 def _check(contents):
1220 self.failUnlessEqual(contents, expected_contents)
1221 d.addCallback(_check)
1224 def failUnlessMutableChildContentsAre(self, node, name, expected_contents):
1225 assert isinstance(name, unicode)
1226 d = node.get_child_at_path(name)
1227 d.addCallback(lambda node: node.download_best_version())
1228 def _check(contents):
1229 self.failUnlessEqual(contents, expected_contents)
1230 d.addCallback(_check)
1233 def failUnlessChildURIIs(self, node, name, expected_uri):
1234 assert isinstance(name, unicode)
1235 d = node.get_child_at_path(name)
1237 self.failUnlessEqual(child.get_uri(), expected_uri.strip())
1238 d.addCallback(_check)
1241 def failUnlessURIMatchesChild(self, got_uri, node, name):
1242 assert isinstance(name, unicode)
1243 d = node.get_child_at_path(name)
1245 self.failUnlessEqual(got_uri.strip(), child.get_uri())
1246 d.addCallback(_check)
1249 def failUnlessCHKURIHasContents(self, got_uri, contents):
1250 self.failUnless(FakeCHKFileNode.all_contents[got_uri] == contents)
1252 def test_POST_upload(self):
1253 d = self.POST(self.public_url + "/foo", t="upload",
1254 file=("new.txt", self.NEWFILE_CONTENTS))
1256 d.addCallback(self.failUnlessURIMatchesChild, fn, u"new.txt")
1257 d.addCallback(lambda res:
1258 self.failUnlessChildContentsAre(fn, u"new.txt",
1259 self.NEWFILE_CONTENTS))
1262 def test_POST_upload_unicode(self):
1263 filename = u"n\u00e9wer.txt" # n e-acute w e r . t x t
1264 d = self.POST(self.public_url + "/foo", t="upload",
1265 file=(filename, self.NEWFILE_CONTENTS))
1267 d.addCallback(self.failUnlessURIMatchesChild, fn, filename)
1268 d.addCallback(lambda res:
1269 self.failUnlessChildContentsAre(fn, filename,
1270 self.NEWFILE_CONTENTS))
1271 target_url = self.public_url + "/foo/" + filename.encode("utf-8")
1272 d.addCallback(lambda res: self.GET(target_url))
1273 d.addCallback(lambda contents: self.failUnlessEqual(contents,
1274 self.NEWFILE_CONTENTS,
1278 def test_POST_upload_unicode_named(self):
1279 filename = u"n\u00e9wer.txt" # n e-acute w e r . t x t
1280 d = self.POST(self.public_url + "/foo", t="upload",
1282 file=("overridden", self.NEWFILE_CONTENTS))
1284 d.addCallback(self.failUnlessURIMatchesChild, fn, filename)
1285 d.addCallback(lambda res:
1286 self.failUnlessChildContentsAre(fn, filename,
1287 self.NEWFILE_CONTENTS))
1288 target_url = self.public_url + "/foo/" + filename.encode("utf-8")
1289 d.addCallback(lambda res: self.GET(target_url))
1290 d.addCallback(lambda contents: self.failUnlessEqual(contents,
1291 self.NEWFILE_CONTENTS,
1295 def test_POST_upload_no_link(self):
1296 d = self.POST("/uri", t="upload",
1297 file=("new.txt", self.NEWFILE_CONTENTS))
1298 def _check_upload_results(page):
1299 # this should be a page which describes the results of the upload
1300 # that just finished.
1301 self.failUnless("Upload Results:" in page)
1302 self.failUnless("URI:" in page)
1303 uri_re = re.compile("URI: <tt><span>(.*)</span>")
1304 mo = uri_re.search(page)
1305 self.failUnless(mo, page)
1306 new_uri = mo.group(1)
1308 d.addCallback(_check_upload_results)
1309 d.addCallback(self.failUnlessCHKURIHasContents, self.NEWFILE_CONTENTS)
1312 def test_POST_upload_no_link_whendone(self):
1313 d = self.POST("/uri", t="upload", when_done="/",
1314 file=("new.txt", self.NEWFILE_CONTENTS))
1315 d.addBoth(self.shouldRedirect, "/")
1318 def shouldRedirect2(self, which, checker, callable, *args, **kwargs):
1319 d = defer.maybeDeferred(callable, *args, **kwargs)
1321 if isinstance(res, failure.Failure):
1322 res.trap(error.PageRedirect)
1323 statuscode = res.value.status
1324 target = res.value.location
1325 return checker(statuscode, target)
1326 self.fail("%s: callable was supposed to redirect, not return '%s'"
1331 def test_POST_upload_no_link_whendone_results(self):
1332 def check(statuscode, target):
1333 self.failUnlessEqual(statuscode, str(http.FOUND))
1334 self.failUnless(target.startswith(self.webish_url), target)
1335 return client.getPage(target, method="GET")
1336 d = self.shouldRedirect2("test_POST_upload_no_link_whendone_results",
1338 self.POST, "/uri", t="upload",
1339 when_done="/uri/%(uri)s",
1340 file=("new.txt", self.NEWFILE_CONTENTS))
1341 d.addCallback(lambda res:
1342 self.failUnlessEqual(res, self.NEWFILE_CONTENTS))
1345 def test_POST_upload_no_link_mutable(self):
1346 d = self.POST("/uri", t="upload", mutable="true",
1347 file=("new.txt", self.NEWFILE_CONTENTS))
1348 def _check(new_uri):
1349 new_uri = new_uri.strip()
1350 self.new_uri = new_uri
1352 self.failUnless(IMutableFileURI.providedBy(u))
1353 self.failUnless(u.storage_index in FakeMutableFileNode.all_contents)
1354 n = self.s.create_node_from_uri(new_uri)
1355 return n.download_best_version()
1356 d.addCallback(_check)
1358 self.failUnlessEqual(data, self.NEWFILE_CONTENTS)
1359 return self.GET("/uri/%s" % urllib.quote(self.new_uri))
1360 d.addCallback(_check2)
1362 self.failUnlessEqual(data, self.NEWFILE_CONTENTS)
1363 return self.GET("/file/%s" % urllib.quote(self.new_uri))
1364 d.addCallback(_check3)
1366 self.failUnlessEqual(data, self.NEWFILE_CONTENTS)
1367 d.addCallback(_check4)
1370 def test_POST_upload_no_link_mutable_toobig(self):
1371 d = self.shouldFail2(error.Error,
1372 "test_POST_upload_no_link_mutable_toobig",
1373 "413 Request Entity Too Large",
1374 "SDMF is limited to one segment, and 10001 > 10000",
1376 "/uri", t="upload", mutable="true",
1378 "b" * (self.s.MUTABLE_SIZELIMIT+1)) )
1381 def test_POST_upload_mutable(self):
1382 # this creates a mutable file
1383 d = self.POST(self.public_url + "/foo", t="upload", mutable="true",
1384 file=("new.txt", self.NEWFILE_CONTENTS))
1386 d.addCallback(self.failUnlessURIMatchesChild, fn, u"new.txt")
1387 d.addCallback(lambda res:
1388 self.failUnlessMutableChildContentsAre(fn, u"new.txt",
1389 self.NEWFILE_CONTENTS))
1390 d.addCallback(lambda res: self._foo_node.get(u"new.txt"))
1392 self.failUnless(IMutableFileNode.providedBy(newnode))
1393 self.failUnless(newnode.is_mutable())
1394 self.failIf(newnode.is_readonly())
1395 self._mutable_node = newnode
1396 self._mutable_uri = newnode.get_uri()
1399 # now upload it again and make sure that the URI doesn't change
1400 NEWER_CONTENTS = self.NEWFILE_CONTENTS + "newer\n"
1401 d.addCallback(lambda res:
1402 self.POST(self.public_url + "/foo", t="upload",
1404 file=("new.txt", NEWER_CONTENTS)))
1405 d.addCallback(self.failUnlessURIMatchesChild, fn, u"new.txt")
1406 d.addCallback(lambda res:
1407 self.failUnlessMutableChildContentsAre(fn, u"new.txt",
1409 d.addCallback(lambda res: self._foo_node.get(u"new.txt"))
1411 self.failUnless(IMutableFileNode.providedBy(newnode))
1412 self.failUnless(newnode.is_mutable())
1413 self.failIf(newnode.is_readonly())
1414 self.failUnlessEqual(self._mutable_uri, newnode.get_uri())
1415 d.addCallback(_got2)
1417 # upload a second time, using PUT instead of POST
1418 NEW2_CONTENTS = NEWER_CONTENTS + "overwrite with PUT\n"
1419 d.addCallback(lambda res:
1420 self.PUT(self.public_url + "/foo/new.txt", NEW2_CONTENTS))
1421 d.addCallback(self.failUnlessURIMatchesChild, fn, u"new.txt")
1422 d.addCallback(lambda res:
1423 self.failUnlessMutableChildContentsAre(fn, u"new.txt",
1426 # finally list the directory, since mutable files are displayed
1427 # slightly differently
1429 d.addCallback(lambda res:
1430 self.GET(self.public_url + "/foo/",
1431 followRedirect=True))
1432 def _check_page(res):
1433 # TODO: assert more about the contents
1434 self.failUnless("SSK" in res)
1436 d.addCallback(_check_page)
1438 d.addCallback(lambda res: self._foo_node.get(u"new.txt"))
1440 self.failUnless(IMutableFileNode.providedBy(newnode))
1441 self.failUnless(newnode.is_mutable())
1442 self.failIf(newnode.is_readonly())
1443 self.failUnlessEqual(self._mutable_uri, newnode.get_uri())
1444 d.addCallback(_got3)
1446 # look at the JSON form of the enclosing directory
1447 d.addCallback(lambda res:
1448 self.GET(self.public_url + "/foo/?t=json",
1449 followRedirect=True))
1450 def _check_page_json(res):
1451 parsed = simplejson.loads(res)
1452 self.failUnlessEqual(parsed[0], "dirnode")
1453 children = dict( [(unicode(name),value)
1455 in parsed[1]["children"].iteritems()] )
1456 self.failUnless("new.txt" in children)
1457 new_json = children["new.txt"]
1458 self.failUnlessEqual(new_json[0], "filenode")
1459 self.failUnless(new_json[1]["mutable"])
1460 self.failUnlessEqual(new_json[1]["rw_uri"], self._mutable_uri)
1461 ro_uri = unicode(self._mutable_node.get_readonly().to_string())
1462 self.failUnlessEqual(new_json[1]["ro_uri"], ro_uri)
1463 d.addCallback(_check_page_json)
1465 # and the JSON form of the file
1466 d.addCallback(lambda res:
1467 self.GET(self.public_url + "/foo/new.txt?t=json"))
1468 def _check_file_json(res):
1469 parsed = simplejson.loads(res)
1470 self.failUnlessEqual(parsed[0], "filenode")
1471 self.failUnless(parsed[1]["mutable"])
1472 self.failUnlessEqual(parsed[1]["rw_uri"], self._mutable_uri)
1473 ro_uri = unicode(self._mutable_node.get_readonly().to_string())
1474 self.failUnlessEqual(parsed[1]["ro_uri"], ro_uri)
1475 d.addCallback(_check_file_json)
1477 # and look at t=uri and t=readonly-uri
1478 d.addCallback(lambda res:
1479 self.GET(self.public_url + "/foo/new.txt?t=uri"))
1480 d.addCallback(lambda res: self.failUnlessEqual(res, self._mutable_uri))
1481 d.addCallback(lambda res:
1482 self.GET(self.public_url + "/foo/new.txt?t=readonly-uri"))
1483 def _check_ro_uri(res):
1484 ro_uri = unicode(self._mutable_node.get_readonly().to_string())
1485 self.failUnlessEqual(res, ro_uri)
1486 d.addCallback(_check_ro_uri)
1488 # make sure we can get to it from /uri/URI
1489 d.addCallback(lambda res:
1490 self.GET("/uri/%s" % urllib.quote(self._mutable_uri)))
1491 d.addCallback(lambda res:
1492 self.failUnlessEqual(res, NEW2_CONTENTS))
1494 # and that HEAD computes the size correctly
1495 d.addCallback(lambda res:
1496 self.HEAD(self.public_url + "/foo/new.txt",
1497 return_response=True))
1498 def _got_headers((res, status, headers)):
1499 self.failUnlessEqual(res, "")
1500 self.failUnlessEqual(headers["content-length"][0],
1501 str(len(NEW2_CONTENTS)))
1502 self.failUnlessEqual(headers["content-type"], ["text/plain"])
1503 d.addCallback(_got_headers)
1505 # make sure that size errors are displayed correctly for overwrite
1506 d.addCallback(lambda res:
1507 self.shouldFail2(error.Error,
1508 "test_POST_upload_mutable-toobig",
1509 "413 Request Entity Too Large",
1510 "SDMF is limited to one segment, and 10001 > 10000",
1512 self.public_url + "/foo", t="upload",
1515 "b" * (self.s.MUTABLE_SIZELIMIT+1)),
1518 d.addErrback(self.dump_error)
1521 def test_POST_upload_mutable_toobig(self):
1522 d = self.shouldFail2(error.Error,
1523 "test_POST_upload_no_link_mutable_toobig",
1524 "413 Request Entity Too Large",
1525 "SDMF is limited to one segment, and 10001 > 10000",
1527 self.public_url + "/foo",
1528 t="upload", mutable="true",
1530 "b" * (self.s.MUTABLE_SIZELIMIT+1)) )
1533 def dump_error(self, f):
1534 # if the web server returns an error code (like 400 Bad Request),
1535 # web.client.getPage puts the HTTP response body into the .response
1536 # attribute of the exception object that it gives back. It does not
1537 # appear in the Failure's repr(), so the ERROR that trial displays
1538 # will be rather terse and unhelpful. addErrback this method to the
1539 # end of your chain to get more information out of these errors.
1540 if f.check(error.Error):
1541 print "web.error.Error:"
1543 print f.value.response
1546 def test_POST_upload_replace(self):
1547 d = self.POST(self.public_url + "/foo", t="upload",
1548 file=("bar.txt", self.NEWFILE_CONTENTS))
1550 d.addCallback(self.failUnlessURIMatchesChild, fn, u"bar.txt")
1551 d.addCallback(lambda res:
1552 self.failUnlessChildContentsAre(fn, u"bar.txt",
1553 self.NEWFILE_CONTENTS))
1556 def test_POST_upload_no_replace_ok(self):
1557 d = self.POST(self.public_url + "/foo?replace=false", t="upload",
1558 file=("new.txt", self.NEWFILE_CONTENTS))
1559 d.addCallback(lambda res: self.GET(self.public_url + "/foo/new.txt"))
1560 d.addCallback(lambda res: self.failUnlessEqual(res,
1561 self.NEWFILE_CONTENTS))
1564 def test_POST_upload_no_replace_queryarg(self):
1565 d = self.POST(self.public_url + "/foo?replace=false", t="upload",
1566 file=("bar.txt", self.NEWFILE_CONTENTS))
1567 d.addBoth(self.shouldFail, error.Error,
1568 "POST_upload_no_replace_queryarg",
1570 "There was already a child by that name, and you asked me "
1571 "to not replace it")
1572 d.addCallback(lambda res: self.GET(self.public_url + "/foo/bar.txt"))
1573 d.addCallback(self.failUnlessIsBarDotTxt)
1576 def test_POST_upload_no_replace_field(self):
1577 d = self.POST(self.public_url + "/foo", t="upload", replace="false",
1578 file=("bar.txt", self.NEWFILE_CONTENTS))
1579 d.addBoth(self.shouldFail, error.Error, "POST_upload_no_replace_field",
1581 "There was already a child by that name, and you asked me "
1582 "to not replace it")
1583 d.addCallback(lambda res: self.GET(self.public_url + "/foo/bar.txt"))
1584 d.addCallback(self.failUnlessIsBarDotTxt)
1587 def test_POST_upload_whendone(self):
1588 d = self.POST(self.public_url + "/foo", t="upload", when_done="/THERE",
1589 file=("new.txt", self.NEWFILE_CONTENTS))
1590 d.addBoth(self.shouldRedirect, "/THERE")
1592 d.addCallback(lambda res:
1593 self.failUnlessChildContentsAre(fn, u"new.txt",
1594 self.NEWFILE_CONTENTS))
1597 def test_POST_upload_named(self):
1599 d = self.POST(self.public_url + "/foo", t="upload",
1600 name="new.txt", file=self.NEWFILE_CONTENTS)
1601 d.addCallback(self.failUnlessURIMatchesChild, fn, u"new.txt")
1602 d.addCallback(lambda res:
1603 self.failUnlessChildContentsAre(fn, u"new.txt",
1604 self.NEWFILE_CONTENTS))
1607 def test_POST_upload_named_badfilename(self):
1608 d = self.POST(self.public_url + "/foo", t="upload",
1609 name="slashes/are/bad.txt", file=self.NEWFILE_CONTENTS)
1610 d.addBoth(self.shouldFail, error.Error,
1611 "test_POST_upload_named_badfilename",
1613 "name= may not contain a slash",
1615 # make sure that nothing was added
1616 d.addCallback(lambda res:
1617 self.failUnlessNodeKeysAre(self._foo_node,
1618 [u"bar.txt", u"blockingfile",
1619 u"empty", u"n\u00fc.txt",
1623 def test_POST_FILEURL_check(self):
1624 bar_url = self.public_url + "/foo/bar.txt"
1625 d = self.POST(bar_url, t="check")
1627 self.failUnless("Healthy :" in res)
1628 d.addCallback(_check)
1629 redir_url = "http://allmydata.org/TARGET"
1630 def _check2(statuscode, target):
1631 self.failUnlessEqual(statuscode, str(http.FOUND))
1632 self.failUnlessEqual(target, redir_url)
1633 d.addCallback(lambda res:
1634 self.shouldRedirect2("test_POST_FILEURL_check",
1638 when_done=redir_url))
1639 d.addCallback(lambda res:
1640 self.POST(bar_url, t="check", return_to=redir_url))
1642 self.failUnless("Healthy :" in res)
1643 self.failUnless("Return to parent directory" in res)
1644 self.failUnless(redir_url in res)
1645 d.addCallback(_check3)
1647 d.addCallback(lambda res:
1648 self.POST(bar_url, t="check", output="JSON"))
1649 def _check_json(res):
1650 data = simplejson.loads(res)
1651 self.failUnless("storage-index" in data)
1652 self.failUnless(data["results"]["healthy"])
1653 d.addCallback(_check_json)
1657 def test_POST_FILEURL_check_and_repair(self):
1658 bar_url = self.public_url + "/foo/bar.txt"
1659 d = self.POST(bar_url, t="check", repair="true")
1661 self.failUnless("Healthy :" in res)
1662 d.addCallback(_check)
1663 redir_url = "http://allmydata.org/TARGET"
1664 def _check2(statuscode, target):
1665 self.failUnlessEqual(statuscode, str(http.FOUND))
1666 self.failUnlessEqual(target, redir_url)
1667 d.addCallback(lambda res:
1668 self.shouldRedirect2("test_POST_FILEURL_check_and_repair",
1671 t="check", repair="true",
1672 when_done=redir_url))
1673 d.addCallback(lambda res:
1674 self.POST(bar_url, t="check", return_to=redir_url))
1676 self.failUnless("Healthy :" in res)
1677 self.failUnless("Return to parent directory" in res)
1678 self.failUnless(redir_url in res)
1679 d.addCallback(_check3)
1682 def test_POST_DIRURL_check(self):
1683 foo_url = self.public_url + "/foo/"
1684 d = self.POST(foo_url, t="check")
1686 self.failUnless("Healthy :" in res, res)
1687 d.addCallback(_check)
1688 redir_url = "http://allmydata.org/TARGET"
1689 def _check2(statuscode, target):
1690 self.failUnlessEqual(statuscode, str(http.FOUND))
1691 self.failUnlessEqual(target, redir_url)
1692 d.addCallback(lambda res:
1693 self.shouldRedirect2("test_POST_DIRURL_check",
1697 when_done=redir_url))
1698 d.addCallback(lambda res:
1699 self.POST(foo_url, t="check", return_to=redir_url))
1701 self.failUnless("Healthy :" in res, res)
1702 self.failUnless("Return to parent directory" in res)
1703 self.failUnless(redir_url in res)
1704 d.addCallback(_check3)
1706 d.addCallback(lambda res:
1707 self.POST(foo_url, t="check", output="JSON"))
1708 def _check_json(res):
1709 data = simplejson.loads(res)
1710 self.failUnless("storage-index" in data)
1711 self.failUnless(data["results"]["healthy"])
1712 d.addCallback(_check_json)
1716 def test_POST_DIRURL_check_and_repair(self):
1717 foo_url = self.public_url + "/foo/"
1718 d = self.POST(foo_url, t="check", repair="true")
1720 self.failUnless("Healthy :" in res, res)
1721 d.addCallback(_check)
1722 redir_url = "http://allmydata.org/TARGET"
1723 def _check2(statuscode, target):
1724 self.failUnlessEqual(statuscode, str(http.FOUND))
1725 self.failUnlessEqual(target, redir_url)
1726 d.addCallback(lambda res:
1727 self.shouldRedirect2("test_POST_DIRURL_check_and_repair",
1730 t="check", repair="true",
1731 when_done=redir_url))
1732 d.addCallback(lambda res:
1733 self.POST(foo_url, t="check", return_to=redir_url))
1735 self.failUnless("Healthy :" in res)
1736 self.failUnless("Return to parent directory" in res)
1737 self.failUnless(redir_url in res)
1738 d.addCallback(_check3)
1741 def wait_for_operation(self, ignored, ophandle):
1742 url = "/operations/" + ophandle
1743 url += "?t=status&output=JSON"
1746 data = simplejson.loads(res)
1747 if not data["finished"]:
1748 d = self.stall(delay=1.0)
1749 d.addCallback(self.wait_for_operation, ophandle)
1755 def get_operation_results(self, ignored, ophandle, output=None):
1756 url = "/operations/" + ophandle
1759 url += "&output=" + output
1762 if output and output.lower() == "json":
1763 return simplejson.loads(res)
1768 def test_POST_DIRURL_deepcheck_no_ophandle(self):
1769 d = self.shouldFail2(error.Error,
1770 "test_POST_DIRURL_deepcheck_no_ophandle",
1772 "slow operation requires ophandle=",
1773 self.POST, self.public_url, t="start-deep-check")
1776 def test_POST_DIRURL_deepcheck(self):
1777 def _check_redirect(statuscode, target):
1778 self.failUnlessEqual(statuscode, str(http.FOUND))
1779 self.failUnless(target.endswith("/operations/123"))
1780 d = self.shouldRedirect2("test_POST_DIRURL_deepcheck", _check_redirect,
1781 self.POST, self.public_url,
1782 t="start-deep-check", ophandle="123")
1783 d.addCallback(self.wait_for_operation, "123")
1784 def _check_json(data):
1785 self.failUnlessEqual(data["finished"], True)
1786 self.failUnlessEqual(data["count-objects-checked"], 8)
1787 self.failUnlessEqual(data["count-objects-healthy"], 8)
1788 d.addCallback(_check_json)
1789 d.addCallback(self.get_operation_results, "123", "html")
1790 def _check_html(res):
1791 self.failUnless("Objects Checked: <span>8</span>" in res)
1792 self.failUnless("Objects Healthy: <span>8</span>" in res)
1793 d.addCallback(_check_html)
1795 d.addCallback(lambda res:
1796 self.GET("/operations/123/"))
1797 d.addCallback(_check_html) # should be the same as without the slash
1799 d.addCallback(lambda res:
1800 self.shouldFail2(error.Error, "one", "404 Not Found",
1801 "No detailed results for SI bogus",
1802 self.GET, "/operations/123/bogus"))
1804 foo_si = self._foo_node.get_storage_index()
1805 foo_si_s = base32.b2a(foo_si)
1806 d.addCallback(lambda res:
1807 self.GET("/operations/123/%s?output=JSON" % foo_si_s))
1808 def _check_foo_json(res):
1809 data = simplejson.loads(res)
1810 self.failUnlessEqual(data["storage-index"], foo_si_s)
1811 self.failUnless(data["results"]["healthy"])
1812 d.addCallback(_check_foo_json)
1815 def test_POST_DIRURL_deepcheck_and_repair(self):
1816 d = self.POST(self.public_url, t="start-deep-check", repair="true",
1817 ophandle="124", output="json", followRedirect=True)
1818 d.addCallback(self.wait_for_operation, "124")
1819 def _check_json(data):
1820 self.failUnlessEqual(data["finished"], True)
1821 self.failUnlessEqual(data["count-objects-checked"], 8)
1822 self.failUnlessEqual(data["count-objects-healthy-pre-repair"], 8)
1823 self.failUnlessEqual(data["count-objects-unhealthy-pre-repair"], 0)
1824 self.failUnlessEqual(data["count-corrupt-shares-pre-repair"], 0)
1825 self.failUnlessEqual(data["count-repairs-attempted"], 0)
1826 self.failUnlessEqual(data["count-repairs-successful"], 0)
1827 self.failUnlessEqual(data["count-repairs-unsuccessful"], 0)
1828 self.failUnlessEqual(data["count-objects-healthy-post-repair"], 8)
1829 self.failUnlessEqual(data["count-objects-unhealthy-post-repair"], 0)
1830 self.failUnlessEqual(data["count-corrupt-shares-post-repair"], 0)
1831 d.addCallback(_check_json)
1832 d.addCallback(self.get_operation_results, "124", "html")
1833 def _check_html(res):
1834 self.failUnless("Objects Checked: <span>8</span>" in res)
1836 self.failUnless("Objects Healthy (before repair): <span>8</span>" in res)
1837 self.failUnless("Objects Unhealthy (before repair): <span>0</span>" in res)
1838 self.failUnless("Corrupt Shares (before repair): <span>0</span>" in res)
1840 self.failUnless("Repairs Attempted: <span>0</span>" in res)
1841 self.failUnless("Repairs Successful: <span>0</span>" in res)
1842 self.failUnless("Repairs Unsuccessful: <span>0</span>" in res)
1844 self.failUnless("Objects Healthy (after repair): <span>8</span>" in res)
1845 self.failUnless("Objects Unhealthy (after repair): <span>0</span>" in res)
1846 self.failUnless("Corrupt Shares (after repair): <span>0</span>" in res)
1847 d.addCallback(_check_html)
1850 def test_POST_FILEURL_bad_t(self):
1851 d = self.shouldFail2(error.Error, "POST_bad_t", "400 Bad Request",
1852 "POST to file: bad t=bogus",
1853 self.POST, self.public_url + "/foo/bar.txt",
1857 def test_POST_mkdir(self): # return value?
1858 d = self.POST(self.public_url + "/foo", t="mkdir", name="newdir")
1859 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
1860 d.addCallback(self.failUnlessNodeKeysAre, [])
1863 def test_POST_mkdir_2(self):
1864 d = self.POST(self.public_url + "/foo/newdir?t=mkdir", "")
1865 d.addCallback(lambda res:
1866 self.failUnlessNodeHasChild(self._foo_node, u"newdir"))
1867 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
1868 d.addCallback(self.failUnlessNodeKeysAre, [])
1871 def test_POST_mkdirs_2(self):
1872 d = self.POST(self.public_url + "/foo/bardir/newdir?t=mkdir", "")
1873 d.addCallback(lambda res:
1874 self.failUnlessNodeHasChild(self._foo_node, u"bardir"))
1875 d.addCallback(lambda res: self._foo_node.get(u"bardir"))
1876 d.addCallback(lambda bardirnode: bardirnode.get(u"newdir"))
1877 d.addCallback(self.failUnlessNodeKeysAre, [])
1880 def test_POST_mkdir_no_parentdir_noredirect(self):
1881 d = self.POST("/uri?t=mkdir")
1882 def _after_mkdir(res):
1883 uri.NewDirectoryURI.init_from_string(res)
1884 d.addCallback(_after_mkdir)
1887 def test_POST_mkdir_no_parentdir_redirect(self):
1888 d = self.POST("/uri?t=mkdir&redirect_to_result=true")
1889 d.addBoth(self.shouldRedirect, None, statuscode='303')
1890 def _check_target(target):
1891 target = urllib.unquote(target)
1892 self.failUnless(target.startswith("uri/URI:DIR2:"), target)
1893 d.addCallback(_check_target)
1896 def test_POST_noparent_bad(self):
1897 d = self.shouldHTTPError2("POST /uri?t=bogus", 400, "Bad Request",
1898 "/uri accepts only PUT, PUT?t=mkdir, "
1899 "POST?t=upload, and POST?t=mkdir",
1900 self.POST, "/uri?t=bogus")
1903 def test_welcome_page_mkdir_button(self):
1904 # Fetch the welcome page.
1906 def _after_get_welcome_page(res):
1907 MKDIR_BUTTON_RE=re.compile('<form action="([^"]*)" method="post".*<input type="hidden" name="t" value="([^"]*)" /><input type="hidden" name="([^"]*)" value="([^"]*)" /><input type="submit" value="Create Directory!" />', re.I)
1908 mo = MKDIR_BUTTON_RE.search(res)
1909 formaction = mo.group(1)
1911 formaname = mo.group(3)
1912 formavalue = mo.group(4)
1913 return (formaction, formt, formaname, formavalue)
1914 d.addCallback(_after_get_welcome_page)
1915 def _after_parse_form(res):
1916 (formaction, formt, formaname, formavalue) = res
1917 return self.POST("/%s?t=%s&%s=%s" % (formaction, formt, formaname, formavalue))
1918 d.addCallback(_after_parse_form)
1919 d.addBoth(self.shouldRedirect, None, statuscode='303')
1922 def test_POST_mkdir_replace(self): # return value?
1923 d = self.POST(self.public_url + "/foo", t="mkdir", name="sub")
1924 d.addCallback(lambda res: self._foo_node.get(u"sub"))
1925 d.addCallback(self.failUnlessNodeKeysAre, [])
1928 def test_POST_mkdir_no_replace_queryarg(self): # return value?
1929 d = self.POST(self.public_url + "/foo?replace=false", t="mkdir", name="sub")
1930 d.addBoth(self.shouldFail, error.Error,
1931 "POST_mkdir_no_replace_queryarg",
1933 "There was already a child by that name, and you asked me "
1934 "to not replace it")
1935 d.addCallback(lambda res: self._foo_node.get(u"sub"))
1936 d.addCallback(self.failUnlessNodeKeysAre, [u"baz.txt"])
1939 def test_POST_mkdir_no_replace_field(self): # return value?
1940 d = self.POST(self.public_url + "/foo", t="mkdir", name="sub",
1942 d.addBoth(self.shouldFail, error.Error, "POST_mkdir_no_replace_field",
1944 "There was already a child by that name, and you asked me "
1945 "to not replace it")
1946 d.addCallback(lambda res: self._foo_node.get(u"sub"))
1947 d.addCallback(self.failUnlessNodeKeysAre, [u"baz.txt"])
1950 def test_POST_mkdir_whendone_field(self):
1951 d = self.POST(self.public_url + "/foo",
1952 t="mkdir", name="newdir", when_done="/THERE")
1953 d.addBoth(self.shouldRedirect, "/THERE")
1954 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
1955 d.addCallback(self.failUnlessNodeKeysAre, [])
1958 def test_POST_mkdir_whendone_queryarg(self):
1959 d = self.POST(self.public_url + "/foo?when_done=/THERE",
1960 t="mkdir", name="newdir")
1961 d.addBoth(self.shouldRedirect, "/THERE")
1962 d.addCallback(lambda res: self._foo_node.get(u"newdir"))
1963 d.addCallback(self.failUnlessNodeKeysAre, [])
1966 def test_POST_bad_t(self):
1967 d = self.shouldFail2(error.Error, "POST_bad_t", "400 Bad Request",
1968 "POST to a directory with bad t=BOGUS",
1969 self.POST, self.public_url + "/foo", t="BOGUS")
1972 def test_POST_set_children(self):
1973 contents9, n9, newuri9 = self.makefile(9)
1974 contents10, n10, newuri10 = self.makefile(10)
1975 contents11, n11, newuri11 = self.makefile(11)
1978 "atomic_added_1": [ "filenode", { "rw_uri": "%s",
1981 "ctime": 1002777696.7564139,
1982 "mtime": 1002777696.7564139
1985 "atomic_added_2": [ "filenode", { "rw_uri": "%s",
1988 "ctime": 1002777696.7564139,
1989 "mtime": 1002777696.7564139
1992 "atomic_added_3": [ "filenode", { "rw_uri": "%s",
1995 "ctime": 1002777696.7564139,
1996 "mtime": 1002777696.7564139
1999 }""" % (newuri9, newuri10, newuri11)
2001 url = self.webish_url + self.public_url + "/foo" + "?t=set_children"
2003 d = client.getPage(url, method="POST", postdata=reqbody)
2005 self.failUnlessURIMatchesChild(newuri9, self._foo_node, u"atomic_added_1")
2006 self.failUnlessURIMatchesChild(newuri10, self._foo_node, u"atomic_added_2")
2007 self.failUnlessURIMatchesChild(newuri11, self._foo_node, u"atomic_added_3")
2009 d.addCallback(_then)
2010 d.addErrback(self.dump_error)
2013 def test_POST_put_uri(self):
2014 contents, n, newuri = self.makefile(8)
2015 d = self.POST(self.public_url + "/foo", t="uri", name="new.txt", uri=newuri)
2016 d.addCallback(self.failUnlessURIMatchesChild, self._foo_node, u"new.txt")
2017 d.addCallback(lambda res:
2018 self.failUnlessChildContentsAre(self._foo_node, u"new.txt",
2022 def test_POST_put_uri_replace(self):
2023 contents, n, newuri = self.makefile(8)
2024 d = self.POST(self.public_url + "/foo", t="uri", name="bar.txt", uri=newuri)
2025 d.addCallback(self.failUnlessURIMatchesChild, self._foo_node, u"bar.txt")
2026 d.addCallback(lambda res:
2027 self.failUnlessChildContentsAre(self._foo_node, u"bar.txt",
2031 def test_POST_put_uri_no_replace_queryarg(self):
2032 contents, n, newuri = self.makefile(8)
2033 d = self.POST(self.public_url + "/foo?replace=false", t="uri",
2034 name="bar.txt", uri=newuri)
2035 d.addBoth(self.shouldFail, error.Error,
2036 "POST_put_uri_no_replace_queryarg",
2038 "There was already a child by that name, and you asked me "
2039 "to not replace it")
2040 d.addCallback(lambda res: self.GET(self.public_url + "/foo/bar.txt"))
2041 d.addCallback(self.failUnlessIsBarDotTxt)
2044 def test_POST_put_uri_no_replace_field(self):
2045 contents, n, newuri = self.makefile(8)
2046 d = self.POST(self.public_url + "/foo", t="uri", replace="false",
2047 name="bar.txt", uri=newuri)
2048 d.addBoth(self.shouldFail, error.Error,
2049 "POST_put_uri_no_replace_field",
2051 "There was already a child by that name, and you asked me "
2052 "to not replace it")
2053 d.addCallback(lambda res: self.GET(self.public_url + "/foo/bar.txt"))
2054 d.addCallback(self.failUnlessIsBarDotTxt)
2057 def test_POST_delete(self):
2058 d = self.POST(self.public_url + "/foo", t="delete", name="bar.txt")
2059 d.addCallback(lambda res: self._foo_node.list())
2060 def _check(children):
2061 self.failIf(u"bar.txt" in children)
2062 d.addCallback(_check)
2065 def test_POST_rename_file(self):
2066 d = self.POST(self.public_url + "/foo", t="rename",
2067 from_name="bar.txt", to_name='wibble.txt')
2068 d.addCallback(lambda res:
2069 self.failIfNodeHasChild(self._foo_node, u"bar.txt"))
2070 d.addCallback(lambda res:
2071 self.failUnlessNodeHasChild(self._foo_node, u"wibble.txt"))
2072 d.addCallback(lambda res: self.GET(self.public_url + "/foo/wibble.txt"))
2073 d.addCallback(self.failUnlessIsBarDotTxt)
2074 d.addCallback(lambda res: self.GET(self.public_url + "/foo/wibble.txt?t=json"))
2075 d.addCallback(self.failUnlessIsBarJSON)
2078 def test_POST_rename_file_redundant(self):
2079 d = self.POST(self.public_url + "/foo", t="rename",
2080 from_name="bar.txt", to_name='bar.txt')
2081 d.addCallback(lambda res:
2082 self.failUnlessNodeHasChild(self._foo_node, u"bar.txt"))
2083 d.addCallback(lambda res: self.GET(self.public_url + "/foo/bar.txt"))
2084 d.addCallback(self.failUnlessIsBarDotTxt)
2085 d.addCallback(lambda res: self.GET(self.public_url + "/foo/bar.txt?t=json"))
2086 d.addCallback(self.failUnlessIsBarJSON)
2089 def test_POST_rename_file_replace(self):
2090 # rename a file and replace a directory with it
2091 d = self.POST(self.public_url + "/foo", t="rename",
2092 from_name="bar.txt", to_name='empty')
2093 d.addCallback(lambda res:
2094 self.failIfNodeHasChild(self._foo_node, u"bar.txt"))
2095 d.addCallback(lambda res:
2096 self.failUnlessNodeHasChild(self._foo_node, u"empty"))
2097 d.addCallback(lambda res: self.GET(self.public_url + "/foo/empty"))
2098 d.addCallback(self.failUnlessIsBarDotTxt)
2099 d.addCallback(lambda res: self.GET(self.public_url + "/foo/empty?t=json"))
2100 d.addCallback(self.failUnlessIsBarJSON)
2103 def test_POST_rename_file_no_replace_queryarg(self):
2104 # rename a file and replace a directory with it
2105 d = self.POST(self.public_url + "/foo?replace=false", t="rename",
2106 from_name="bar.txt", to_name='empty')
2107 d.addBoth(self.shouldFail, error.Error,
2108 "POST_rename_file_no_replace_queryarg",
2110 "There was already a child by that name, and you asked me "
2111 "to not replace it")
2112 d.addCallback(lambda res: self.GET(self.public_url + "/foo/empty?t=json"))
2113 d.addCallback(self.failUnlessIsEmptyJSON)
2116 def test_POST_rename_file_no_replace_field(self):
2117 # rename a file and replace a directory with it
2118 d = self.POST(self.public_url + "/foo", t="rename", replace="false",
2119 from_name="bar.txt", to_name='empty')
2120 d.addBoth(self.shouldFail, error.Error,
2121 "POST_rename_file_no_replace_field",
2123 "There was already a child by that name, and you asked me "
2124 "to not replace it")
2125 d.addCallback(lambda res: self.GET(self.public_url + "/foo/empty?t=json"))
2126 d.addCallback(self.failUnlessIsEmptyJSON)
2129 def failUnlessIsEmptyJSON(self, res):
2130 data = simplejson.loads(res)
2131 self.failUnlessEqual(data[0], "dirnode", data)
2132 self.failUnlessEqual(len(data[1]["children"]), 0)
2134 def test_POST_rename_file_slash_fail(self):
2135 d = self.POST(self.public_url + "/foo", t="rename",
2136 from_name="bar.txt", to_name='kirk/spock.txt')
2137 d.addBoth(self.shouldFail, error.Error,
2138 "test_POST_rename_file_slash_fail",
2140 "to_name= may not contain a slash",
2142 d.addCallback(lambda res:
2143 self.failUnlessNodeHasChild(self._foo_node, u"bar.txt"))
2146 def test_POST_rename_dir(self):
2147 d = self.POST(self.public_url, t="rename",
2148 from_name="foo", to_name='plunk')
2149 d.addCallback(lambda res:
2150 self.failIfNodeHasChild(self.public_root, u"foo"))
2151 d.addCallback(lambda res:
2152 self.failUnlessNodeHasChild(self.public_root, u"plunk"))
2153 d.addCallback(lambda res: self.GET(self.public_url + "/plunk?t=json"))
2154 d.addCallback(self.failUnlessIsFooJSON)
2157 def shouldRedirect(self, res, target=None, statuscode=None, which=""):
2158 """ If target is not None then the redirection has to go to target. If
2159 statuscode is not None then the redirection has to be accomplished with
2160 that HTTP status code."""
2161 if not isinstance(res, failure.Failure):
2162 to_where = (target is None) and "somewhere" or ("to " + target)
2163 self.fail("%s: we were expecting to get redirected %s, not get an"
2164 " actual page: %s" % (which, to_where, res))
2165 res.trap(error.PageRedirect)
2166 if statuscode is not None:
2167 self.failUnlessEqual(res.value.status, statuscode,
2168 "%s: not a redirect" % which)
2169 if target is not None:
2170 # the PageRedirect does not seem to capture the uri= query arg
2171 # properly, so we can't check for it.
2172 realtarget = self.webish_url + target
2173 self.failUnlessEqual(res.value.location, realtarget,
2174 "%s: wrong target" % which)
2175 return res.value.location
2177 def test_GET_URI_form(self):
2178 base = "/uri?uri=%s" % self._bar_txt_uri
2179 # this is supposed to give us a redirect to /uri/$URI, plus arguments
2180 targetbase = "/uri/%s" % urllib.quote(self._bar_txt_uri)
2182 d.addBoth(self.shouldRedirect, targetbase)
2183 d.addCallback(lambda res: self.GET(base+"&filename=bar.txt"))
2184 d.addBoth(self.shouldRedirect, targetbase+"?filename=bar.txt")
2185 d.addCallback(lambda res: self.GET(base+"&t=json"))
2186 d.addBoth(self.shouldRedirect, targetbase+"?t=json")
2187 d.addCallback(self.log, "about to get file by uri")
2188 d.addCallback(lambda res: self.GET(base, followRedirect=True))
2189 d.addCallback(self.failUnlessIsBarDotTxt)
2190 d.addCallback(self.log, "got file by uri, about to get dir by uri")
2191 d.addCallback(lambda res: self.GET("/uri?uri=%s&t=json" % self._foo_uri,
2192 followRedirect=True))
2193 d.addCallback(self.failUnlessIsFooJSON)
2194 d.addCallback(self.log, "got dir by uri")
2198 def test_GET_URI_form_bad(self):
2199 d = self.shouldFail2(error.Error, "test_GET_URI_form_bad",
2200 "400 Bad Request", "GET /uri requires uri=",
2204 def test_GET_rename_form(self):
2205 d = self.GET(self.public_url + "/foo?t=rename-form&name=bar.txt",
2206 followRedirect=True)
2208 self.failUnless('name="when_done" value="."' in res, res)
2209 self.failUnless(re.search(r'name="from_name" value="bar\.txt"', res))
2210 d.addCallback(_check)
2213 def log(self, res, msg):
2214 #print "MSG: %s RES: %s" % (msg, res)
2218 def test_GET_URI_URL(self):
2219 base = "/uri/%s" % self._bar_txt_uri
2221 d.addCallback(self.failUnlessIsBarDotTxt)
2222 d.addCallback(lambda res: self.GET(base+"?filename=bar.txt"))
2223 d.addCallback(self.failUnlessIsBarDotTxt)
2224 d.addCallback(lambda res: self.GET(base+"?filename=bar.txt&save=true"))
2225 d.addCallback(self.failUnlessIsBarDotTxt)
2228 def test_GET_URI_URL_dir(self):
2229 base = "/uri/%s?t=json" % self._foo_uri
2231 d.addCallback(self.failUnlessIsFooJSON)
2234 def test_GET_URI_URL_missing(self):
2235 base = "/uri/%s" % self._bad_file_uri
2237 d.addBoth(self.shouldHTTPError, "test_GET_URI_URL_missing",
2238 http.GONE, response_substring="NotEnoughSharesError")
2239 # TODO: how can we exercise both sides of WebDownloadTarget.fail
2240 # here? we must arrange for a download to fail after target.open()
2241 # has been called, and then inspect the response to see that it is
2242 # shorter than we expected.
2245 def test_PUT_DIRURL_uri(self):
2246 d = self.s.create_empty_dirnode()
2248 new_uri = dn.get_uri()
2249 # replace /foo with a new (empty) directory
2250 d = self.PUT(self.public_url + "/foo?t=uri", new_uri)
2251 d.addCallback(lambda res:
2252 self.failUnlessEqual(res.strip(), new_uri))
2253 d.addCallback(lambda res:
2254 self.failUnlessChildURIIs(self.public_root,
2258 d.addCallback(_made_dir)
2261 def test_PUT_DIRURL_uri_noreplace(self):
2262 d = self.s.create_empty_dirnode()
2264 new_uri = dn.get_uri()
2265 # replace /foo with a new (empty) directory, but ask that
2266 # replace=false, so it should fail
2267 d = self.shouldFail2(error.Error, "test_PUT_DIRURL_uri_noreplace",
2268 "409 Conflict", "There was already a child by that name, and you asked me to not replace it",
2270 self.public_url + "/foo?t=uri&replace=false",
2272 d.addCallback(lambda res:
2273 self.failUnlessChildURIIs(self.public_root,
2277 d.addCallback(_made_dir)
2280 def test_PUT_DIRURL_bad_t(self):
2281 d = self.shouldFail2(error.Error, "test_PUT_DIRURL_bad_t",
2282 "400 Bad Request", "PUT to a directory",
2283 self.PUT, self.public_url + "/foo?t=BOGUS", "")
2284 d.addCallback(lambda res:
2285 self.failUnlessChildURIIs(self.public_root,
2290 def test_PUT_NEWFILEURL_uri(self):
2291 contents, n, new_uri = self.makefile(8)
2292 d = self.PUT(self.public_url + "/foo/new.txt?t=uri", new_uri)
2293 d.addCallback(lambda res: self.failUnlessEqual(res.strip(), new_uri))
2294 d.addCallback(lambda res:
2295 self.failUnlessChildContentsAre(self._foo_node, u"new.txt",
2299 def test_PUT_NEWFILEURL_uri_replace(self):
2300 contents, n, new_uri = self.makefile(8)
2301 d = self.PUT(self.public_url + "/foo/bar.txt?t=uri", new_uri)
2302 d.addCallback(lambda res: self.failUnlessEqual(res.strip(), new_uri))
2303 d.addCallback(lambda res:
2304 self.failUnlessChildContentsAre(self._foo_node, u"bar.txt",
2308 def test_PUT_NEWFILEURL_uri_no_replace(self):
2309 contents, n, new_uri = self.makefile(8)
2310 d = self.PUT(self.public_url + "/foo/bar.txt?t=uri&replace=false", new_uri)
2311 d.addBoth(self.shouldFail, error.Error, "PUT_NEWFILEURL_uri_no_replace",
2313 "There was already a child by that name, and you asked me "
2314 "to not replace it")
2317 def test_PUT_NEWFILE_URI(self):
2318 file_contents = "New file contents here\n"
2319 d = self.PUT("/uri", file_contents)
2321 assert isinstance(uri, str), uri
2322 self.failUnless(uri in FakeCHKFileNode.all_contents)
2323 self.failUnlessEqual(FakeCHKFileNode.all_contents[uri],
2325 return self.GET("/uri/%s" % uri)
2326 d.addCallback(_check)
2328 self.failUnlessEqual(res, file_contents)
2329 d.addCallback(_check2)
2332 def test_PUT_NEWFILE_URI_only_PUT(self):
2333 d = self.PUT("/uri?t=bogus", "")
2334 d.addBoth(self.shouldFail, error.Error,
2335 "PUT_NEWFILE_URI_only_PUT",
2337 "/uri accepts only PUT, PUT?t=mkdir, POST?t=upload, and POST?t=mkdir")
2340 def test_PUT_NEWFILE_URI_mutable(self):
2341 file_contents = "New file contents here\n"
2342 d = self.PUT("/uri?mutable=true", file_contents)
2343 def _check_mutable(uri):
2346 self.failUnless(IMutableFileURI.providedBy(u))
2347 self.failUnless(u.storage_index in FakeMutableFileNode.all_contents)
2348 n = self.s.create_node_from_uri(uri)
2349 return n.download_best_version()
2350 d.addCallback(_check_mutable)
2351 def _check2_mutable(data):
2352 self.failUnlessEqual(data, file_contents)
2353 d.addCallback(_check2_mutable)
2357 self.failUnless(uri.to_string() in FakeCHKFileNode.all_contents)
2358 self.failUnlessEqual(FakeCHKFileNode.all_contents[uri.to_string()],
2360 return self.GET("/uri/%s" % uri)
2361 d.addCallback(_check)
2363 self.failUnlessEqual(res, file_contents)
2364 d.addCallback(_check2)
2367 def test_PUT_mkdir(self):
2368 d = self.PUT("/uri?t=mkdir", "")
2370 n = self.s.create_node_from_uri(uri.strip())
2371 d2 = self.failUnlessNodeKeysAre(n, [])
2372 d2.addCallback(lambda res:
2373 self.GET("/uri/%s?t=json" % uri))
2375 d.addCallback(_check)
2376 d.addCallback(self.failUnlessIsEmptyJSON)
2379 def test_POST_check(self):
2380 d = self.POST(self.public_url + "/foo", t="check", name="bar.txt")
2382 # this returns a string form of the results, which are probably
2383 # None since we're using fake filenodes.
2384 # TODO: verify that the check actually happened, by changing
2385 # FakeCHKFileNode to count how many times .check() has been
2388 d.addCallback(_done)
2391 def test_bad_method(self):
2392 url = self.webish_url + self.public_url + "/foo/bar.txt"
2393 d = self.shouldHTTPError2("test_bad_method",
2394 501, "Not Implemented",
2395 "I don't know how to treat a BOGUS request.",
2396 client.getPage, url, method="BOGUS")
2399 def test_short_url(self):
2400 url = self.webish_url + "/uri"
2401 d = self.shouldHTTPError2("test_short_url", 501, "Not Implemented",
2402 "I don't know how to treat a DELETE request.",
2403 client.getPage, url, method="DELETE")
2406 def test_ophandle_bad(self):
2407 url = self.webish_url + "/operations/bogus?t=status"
2408 d = self.shouldHTTPError2("test_ophandle_bad", 404, "404 Not Found",
2409 "unknown/expired handle 'bogus'",
2410 client.getPage, url)
2413 def test_ophandle_cancel(self):
2414 d = self.POST(self.public_url + "/foo/?t=start-manifest&ophandle=128",
2415 followRedirect=True)
2416 d.addCallback(lambda ignored:
2417 self.GET("/operations/128?t=status&output=JSON"))
2419 data = simplejson.loads(res)
2420 self.failUnless("finished" in data, res)
2421 monitor = self.ws.root.child_operations.handles["128"][0]
2422 d = self.POST("/operations/128?t=cancel&output=JSON")
2424 data = simplejson.loads(res)
2425 self.failUnless("finished" in data, res)
2426 # t=cancel causes the handle to be forgotten
2427 self.failUnless(monitor.is_cancelled())
2428 d.addCallback(_check2)
2430 d.addCallback(_check1)
2431 d.addCallback(lambda ignored:
2432 self.shouldHTTPError2("test_ophandle_cancel",
2433 404, "404 Not Found",
2434 "unknown/expired handle '128'",
2436 "/operations/128?t=status&output=JSON"))
2439 def test_ophandle_retainfor(self):
2440 d = self.POST(self.public_url + "/foo/?t=start-manifest&ophandle=129&retain-for=60",
2441 followRedirect=True)
2442 d.addCallback(lambda ignored:
2443 self.GET("/operations/129?t=status&output=JSON&retain-for=0"))
2445 data = simplejson.loads(res)
2446 self.failUnless("finished" in data, res)
2447 d.addCallback(_check1)
2448 # the retain-for=0 will cause the handle to be expired very soon
2449 d.addCallback(self.stall, 2.0)
2450 d.addCallback(lambda ignored:
2451 self.shouldHTTPError2("test_ophandle_retainfor",
2452 404, "404 Not Found",
2453 "unknown/expired handle '129'",
2455 "/operations/129?t=status&output=JSON"))
2458 def test_ophandle_release_after_complete(self):
2459 d = self.POST(self.public_url + "/foo/?t=start-manifest&ophandle=130",
2460 followRedirect=True)
2461 d.addCallback(self.wait_for_operation, "130")
2462 d.addCallback(lambda ignored:
2463 self.GET("/operations/130?t=status&output=JSON&release-after-complete=true"))
2464 # the release-after-complete=true will cause the handle to be expired
2465 d.addCallback(lambda ignored:
2466 self.shouldHTTPError2("test_ophandle_release_after_complete",
2467 404, "404 Not Found",
2468 "unknown/expired handle '130'",
2470 "/operations/130?t=status&output=JSON"))
2473 def test_incident(self):
2474 d = self.POST("/report_incident", details="eek")
2476 self.failUnless("Thank you for your report!" in res, res)
2477 d.addCallback(_done)
2480 def test_static(self):
2481 webdir = os.path.join(self.staticdir, "subdir")
2482 fileutil.make_dirs(webdir)
2483 f = open(os.path.join(webdir, "hello.txt"), "wb")
2487 d = self.GET("/static/subdir/hello.txt")
2489 self.failUnlessEqual(res, "hello")
2490 d.addCallback(_check)
2494 class Util(unittest.TestCase):
2495 def test_abbreviate_time(self):
2496 self.failUnlessEqual(common.abbreviate_time(None), "")
2497 self.failUnlessEqual(common.abbreviate_time(1.234), "1.23s")
2498 self.failUnlessEqual(common.abbreviate_time(0.123), "123ms")
2499 self.failUnlessEqual(common.abbreviate_time(0.00123), "1.2ms")
2500 self.failUnlessEqual(common.abbreviate_time(0.000123), "123us")
2502 def test_abbreviate_rate(self):
2503 self.failUnlessEqual(common.abbreviate_rate(None), "")
2504 self.failUnlessEqual(common.abbreviate_rate(1234000), "1.23MBps")
2505 self.failUnlessEqual(common.abbreviate_rate(12340), "12.3kBps")
2506 self.failUnlessEqual(common.abbreviate_rate(123), "123Bps")
2508 def test_abbreviate_size(self):
2509 self.failUnlessEqual(common.abbreviate_size(None), "")
2510 self.failUnlessEqual(common.abbreviate_size(1.23*1000*1000*1000), "1.23GB")
2511 self.failUnlessEqual(common.abbreviate_size(1.23*1000*1000), "1.23MB")
2512 self.failUnlessEqual(common.abbreviate_size(1230), "1.2kB")
2513 self.failUnlessEqual(common.abbreviate_size(123), "123B")
2515 def test_plural(self):
2517 return "%d second%s" % (s, status.plural(s))
2518 self.failUnlessEqual(convert(0), "0 seconds")
2519 self.failUnlessEqual(convert(1), "1 second")
2520 self.failUnlessEqual(convert(2), "2 seconds")
2522 return "has share%s: %s" % (status.plural(s), ",".join(s))
2523 self.failUnlessEqual(convert2([]), "has shares: ")
2524 self.failUnlessEqual(convert2(["1"]), "has share: 1")
2525 self.failUnlessEqual(convert2(["1","2"]), "has shares: 1,2")
2528 class Grid(GridTestMixin, WebErrorMixin, unittest.TestCase, ShouldFailMixin):
2530 def CHECK(self, ign, which, args, clientnum=0):
2531 fileurl = self.fileurls[which]
2532 url = fileurl + "?" + args
2533 return self.GET(url, method="POST", clientnum=clientnum)
2535 def test_filecheck(self):
2536 self.basedir = "web/Grid/filecheck"
2538 c0 = self.g.clients[0]
2541 d = c0.upload(upload.Data(DATA, convergence=""))
2542 def _stash_uri(ur, which):
2543 self.uris[which] = ur.uri
2544 d.addCallback(_stash_uri, "good")
2545 d.addCallback(lambda ign:
2546 c0.upload(upload.Data(DATA+"1", convergence="")))
2547 d.addCallback(_stash_uri, "sick")
2548 d.addCallback(lambda ign:
2549 c0.upload(upload.Data(DATA+"2", convergence="")))
2550 d.addCallback(_stash_uri, "dead")
2551 def _stash_mutable_uri(n, which):
2552 self.uris[which] = n.get_uri()
2553 assert isinstance(self.uris[which], str)
2554 d.addCallback(lambda ign: c0.create_mutable_file(DATA+"3"))
2555 d.addCallback(_stash_mutable_uri, "corrupt")
2556 d.addCallback(lambda ign:
2557 c0.upload(upload.Data("literal", convergence="")))
2558 d.addCallback(_stash_uri, "small")
2560 def _compute_fileurls(ignored):
2562 for which in self.uris:
2563 self.fileurls[which] = "uri/" + urllib.quote(self.uris[which])
2564 d.addCallback(_compute_fileurls)
2566 def _clobber_shares(ignored):
2567 good_shares = self.find_shares(self.uris["good"])
2568 self.failUnlessEqual(len(good_shares), 10)
2569 sick_shares = self.find_shares(self.uris["sick"])
2570 os.unlink(sick_shares[0][2])
2571 dead_shares = self.find_shares(self.uris["dead"])
2572 for i in range(1, 10):
2573 os.unlink(dead_shares[i][2])
2574 c_shares = self.find_shares(self.uris["corrupt"])
2575 cso = CorruptShareOptions()
2576 cso.stdout = StringIO()
2577 cso.parseOptions([c_shares[0][2]])
2579 d.addCallback(_clobber_shares)
2581 d.addCallback(self.CHECK, "good", "t=check")
2582 def _got_html_good(res):
2583 self.failUnless("Healthy" in res, res)
2584 self.failIf("Not Healthy" in res, res)
2585 d.addCallback(_got_html_good)
2586 d.addCallback(self.CHECK, "good", "t=check&return_to=somewhere")
2587 def _got_html_good_return_to(res):
2588 self.failUnless("Healthy" in res, res)
2589 self.failIf("Not Healthy" in res, res)
2590 self.failUnless('<a href="somewhere">Return to parent directory'
2592 d.addCallback(_got_html_good_return_to)
2593 d.addCallback(self.CHECK, "good", "t=check&output=json")
2594 def _got_json_good(res):
2595 r = simplejson.loads(res)
2596 self.failUnlessEqual(r["summary"], "Healthy")
2597 self.failUnless(r["results"]["healthy"])
2598 self.failIf(r["results"]["needs-rebalancing"])
2599 self.failUnless(r["results"]["recoverable"])
2600 d.addCallback(_got_json_good)
2602 d.addCallback(self.CHECK, "small", "t=check")
2603 def _got_html_small(res):
2604 self.failUnless("Literal files are always healthy" in res, res)
2605 self.failIf("Not Healthy" in res, res)
2606 d.addCallback(_got_html_small)
2607 d.addCallback(self.CHECK, "small", "t=check&return_to=somewhere")
2608 def _got_html_small_return_to(res):
2609 self.failUnless("Literal files are always healthy" in res, res)
2610 self.failIf("Not Healthy" in res, res)
2611 self.failUnless('<a href="somewhere">Return to parent directory'
2613 d.addCallback(_got_html_small_return_to)
2614 d.addCallback(self.CHECK, "small", "t=check&output=json")
2615 def _got_json_small(res):
2616 r = simplejson.loads(res)
2617 self.failUnlessEqual(r["storage-index"], "")
2618 self.failUnless(r["results"]["healthy"])
2619 d.addCallback(_got_json_small)
2621 d.addCallback(self.CHECK, "sick", "t=check")
2622 def _got_html_sick(res):
2623 self.failUnless("Not Healthy" in res, res)
2624 d.addCallback(_got_html_sick)
2625 d.addCallback(self.CHECK, "sick", "t=check&output=json")
2626 def _got_json_sick(res):
2627 r = simplejson.loads(res)
2628 self.failUnlessEqual(r["summary"],
2629 "Not Healthy: 9 shares (enc 3-of-10)")
2630 self.failIf(r["results"]["healthy"])
2631 self.failIf(r["results"]["needs-rebalancing"])
2632 self.failUnless(r["results"]["recoverable"])
2633 d.addCallback(_got_json_sick)
2635 d.addCallback(self.CHECK, "dead", "t=check")
2636 def _got_html_dead(res):
2637 self.failUnless("Not Healthy" in res, res)
2638 d.addCallback(_got_html_dead)
2639 d.addCallback(self.CHECK, "dead", "t=check&output=json")
2640 def _got_json_dead(res):
2641 r = simplejson.loads(res)
2642 self.failUnlessEqual(r["summary"],
2643 "Not Healthy: 1 shares (enc 3-of-10)")
2644 self.failIf(r["results"]["healthy"])
2645 self.failIf(r["results"]["needs-rebalancing"])
2646 self.failIf(r["results"]["recoverable"])
2647 d.addCallback(_got_json_dead)
2649 d.addCallback(self.CHECK, "corrupt", "t=check&verify=true")
2650 def _got_html_corrupt(res):
2651 self.failUnless("Not Healthy! : Unhealthy" in res, res)
2652 d.addCallback(_got_html_corrupt)
2653 d.addCallback(self.CHECK, "corrupt", "t=check&verify=true&output=json")
2654 def _got_json_corrupt(res):
2655 r = simplejson.loads(res)
2656 self.failUnless("Unhealthy: 9 shares (enc 3-of-10)" in r["summary"],
2658 self.failIf(r["results"]["healthy"])
2659 self.failUnless(r["results"]["recoverable"])
2660 self.failUnlessEqual(r["results"]["count-shares-good"], 9)
2661 self.failUnlessEqual(r["results"]["count-corrupt-shares"], 1)
2662 d.addCallback(_got_json_corrupt)
2664 d.addErrback(self.explain_web_error)
2667 def test_repair_html(self):
2668 self.basedir = "web/Grid/repair_html"
2670 c0 = self.g.clients[0]
2673 d = c0.upload(upload.Data(DATA, convergence=""))
2674 def _stash_uri(ur, which):
2675 self.uris[which] = ur.uri
2676 d.addCallback(_stash_uri, "good")
2677 d.addCallback(lambda ign:
2678 c0.upload(upload.Data(DATA+"1", convergence="")))
2679 d.addCallback(_stash_uri, "sick")
2680 d.addCallback(lambda ign:
2681 c0.upload(upload.Data(DATA+"2", convergence="")))
2682 d.addCallback(_stash_uri, "dead")
2683 def _stash_mutable_uri(n, which):
2684 self.uris[which] = n.get_uri()
2685 assert isinstance(self.uris[which], str)
2686 d.addCallback(lambda ign: c0.create_mutable_file(DATA+"3"))
2687 d.addCallback(_stash_mutable_uri, "corrupt")
2689 def _compute_fileurls(ignored):
2691 for which in self.uris:
2692 self.fileurls[which] = "uri/" + urllib.quote(self.uris[which])
2693 d.addCallback(_compute_fileurls)
2695 def _clobber_shares(ignored):
2696 good_shares = self.find_shares(self.uris["good"])
2697 self.failUnlessEqual(len(good_shares), 10)
2698 sick_shares = self.find_shares(self.uris["sick"])
2699 os.unlink(sick_shares[0][2])
2700 dead_shares = self.find_shares(self.uris["dead"])
2701 for i in range(1, 10):
2702 os.unlink(dead_shares[i][2])
2703 c_shares = self.find_shares(self.uris["corrupt"])
2704 cso = CorruptShareOptions()
2705 cso.stdout = StringIO()
2706 cso.parseOptions([c_shares[0][2]])
2708 d.addCallback(_clobber_shares)
2710 d.addCallback(self.CHECK, "good", "t=check&repair=true")
2711 def _got_html_good(res):
2712 self.failUnless("Healthy" in res, res)
2713 self.failIf("Not Healthy" in res, res)
2714 self.failUnless("No repair necessary" in res, res)
2715 d.addCallback(_got_html_good)
2717 d.addCallback(self.CHECK, "sick", "t=check&repair=true")
2718 def _got_html_sick(res):
2719 self.failUnless("Healthy : healthy" in res, res)
2720 self.failIf("Not Healthy" in res, res)
2721 self.failUnless("Repair successful" in res, res)
2722 d.addCallback(_got_html_sick)
2724 # repair of a dead file will fail, of course, but it isn't yet
2725 # clear how this should be reported. Right now it shows up as
2728 #d.addCallback(self.CHECK, "dead", "t=check&repair=true")
2729 #def _got_html_dead(res):
2731 # self.failUnless("Healthy : healthy" in res, res)
2732 # self.failIf("Not Healthy" in res, res)
2733 # self.failUnless("No repair necessary" in res, res)
2734 #d.addCallback(_got_html_dead)
2736 d.addCallback(self.CHECK, "corrupt", "t=check&verify=true&repair=true")
2737 def _got_html_corrupt(res):
2738 self.failUnless("Healthy : Healthy" in res, res)
2739 self.failIf("Not Healthy" in res, res)
2740 self.failUnless("Repair successful" in res, res)
2741 d.addCallback(_got_html_corrupt)
2743 d.addErrback(self.explain_web_error)
2746 def test_repair_json(self):
2747 self.basedir = "web/Grid/repair_json"
2749 c0 = self.g.clients[0]
2752 d = c0.upload(upload.Data(DATA+"1", convergence=""))
2753 def _stash_uri(ur, which):
2754 self.uris[which] = ur.uri
2755 d.addCallback(_stash_uri, "sick")
2757 def _compute_fileurls(ignored):
2759 for which in self.uris:
2760 self.fileurls[which] = "uri/" + urllib.quote(self.uris[which])
2761 d.addCallback(_compute_fileurls)
2763 def _clobber_shares(ignored):
2764 sick_shares = self.find_shares(self.uris["sick"])
2765 os.unlink(sick_shares[0][2])
2766 d.addCallback(_clobber_shares)
2768 d.addCallback(self.CHECK, "sick", "t=check&repair=true&output=json")
2769 def _got_json_sick(res):
2770 r = simplejson.loads(res)
2771 self.failUnlessEqual(r["repair-attempted"], True)
2772 self.failUnlessEqual(r["repair-successful"], True)
2773 self.failUnlessEqual(r["pre-repair-results"]["summary"],
2774 "Not Healthy: 9 shares (enc 3-of-10)")
2775 self.failIf(r["pre-repair-results"]["results"]["healthy"])
2776 self.failUnlessEqual(r["post-repair-results"]["summary"], "healthy")
2777 self.failUnless(r["post-repair-results"]["results"]["healthy"])
2778 d.addCallback(_got_json_sick)
2780 d.addErrback(self.explain_web_error)
2783 def test_deep_check(self):
2784 self.basedir = "web/Grid/deep_check"
2786 c0 = self.g.clients[0]
2790 d = c0.create_empty_dirnode()
2791 def _stash_root_and_create_file(n):
2793 self.fileurls["root"] = "uri/" + urllib.quote(n.get_uri()) + "/"
2794 return n.add_file(u"good", upload.Data(DATA, convergence=""))
2795 d.addCallback(_stash_root_and_create_file)
2796 def _stash_uri(fn, which):
2797 self.uris[which] = fn.get_uri()
2799 d.addCallback(_stash_uri, "good")
2800 d.addCallback(lambda ign:
2801 self.rootnode.add_file(u"small",
2802 upload.Data("literal",
2804 d.addCallback(_stash_uri, "small")
2805 d.addCallback(lambda ign:
2806 self.rootnode.add_file(u"sick",
2807 upload.Data(DATA+"1",
2809 d.addCallback(_stash_uri, "sick")
2811 def _clobber_shares(ignored):
2812 self.delete_shares_numbered(self.uris["sick"], [0,1])
2813 d.addCallback(_clobber_shares)
2820 d.addCallback(self.CHECK, "root", "t=stream-deep-check")
2822 units = [simplejson.loads(line)
2823 for line in res.splitlines()
2825 self.failUnlessEqual(len(units), 4+1)
2826 # should be parent-first
2828 self.failUnlessEqual(u0["path"], [])
2829 self.failUnlessEqual(u0["type"], "directory")
2830 self.failUnlessEqual(u0["cap"], self.rootnode.get_uri())
2831 u0cr = u0["check-results"]
2832 self.failUnlessEqual(u0cr["results"]["count-shares-good"], 10)
2834 ugood = [u for u in units
2835 if u["type"] == "file" and u["path"] == [u"good"]][0]
2836 self.failUnlessEqual(ugood["cap"], self.uris["good"])
2837 ugoodcr = ugood["check-results"]
2838 self.failUnlessEqual(ugoodcr["results"]["count-shares-good"], 10)
2841 self.failUnlessEqual(stats["type"], "stats")
2843 self.failUnlessEqual(s["count-immutable-files"], 2)
2844 self.failUnlessEqual(s["count-literal-files"], 1)
2845 self.failUnlessEqual(s["count-directories"], 1)
2846 d.addCallback(_done)
2848 # now add root/subdir and root/subdir/grandchild, then make subdir
2849 # unrecoverable, then see what happens
2851 d.addCallback(lambda ign:
2852 self.rootnode.create_empty_directory(u"subdir"))
2853 d.addCallback(_stash_uri, "subdir")
2854 d.addCallback(lambda subdir_node:
2855 subdir_node.add_file(u"grandchild",
2856 upload.Data(DATA+"2",
2858 d.addCallback(_stash_uri, "grandchild")
2860 d.addCallback(lambda ign:
2861 self.delete_shares_numbered(self.uris["subdir"],
2868 # root/subdir [unrecoverable]
2869 # root/subdir/grandchild
2871 # how should a streaming-JSON API indicate fatal error?
2872 # answer: emit ERROR: instead of a JSON string
2874 d.addCallback(self.CHECK, "root", "t=stream-manifest")
2875 def _check_broken_manifest(res):
2876 lines = res.splitlines()
2878 for (i,line) in enumerate(lines)
2879 if line.startswith("ERROR:")]
2881 self.fail("no ERROR: in output: %s" % (res,))
2882 first_error = error_lines[0]
2883 error_line = lines[first_error]
2884 error_msg = lines[first_error+1:]
2885 error_msg_s = "\n".join(error_msg) + "\n"
2886 self.failUnlessIn("ERROR: UnrecoverableFileError", error_line)
2887 self.failUnlessIn("no recoverable versions", error_line)
2888 self.failUnless(len(error_msg) > 2, error_msg_s) # some traceback
2889 units = [simplejson.loads(line) for line in lines[:first_error]]
2890 self.failUnlessEqual(len(units), 5) # includes subdir
2891 last_unit = units[-1]
2892 self.failUnlessEqual(last_unit["path"], ["subdir"])
2893 d.addCallback(_check_broken_manifest)
2895 d.addCallback(self.CHECK, "root", "t=stream-deep-check")
2896 def _check_broken_deepcheck(res):
2897 lines = res.splitlines()
2899 for (i,line) in enumerate(lines)
2900 if line.startswith("ERROR:")]
2902 self.fail("no ERROR: in output: %s" % (res,))
2903 first_error = error_lines[0]
2904 error_line = lines[first_error]
2905 error_msg = lines[first_error+1:]
2906 error_msg_s = "\n".join(error_msg) + "\n"
2907 self.failUnlessIn("ERROR: UnrecoverableFileError", error_line)
2908 self.failUnlessIn("no recoverable versions", error_line)
2909 self.failUnless(len(error_msg) > 2, error_msg_s) # some traceback
2910 units = [simplejson.loads(line) for line in lines[:first_error]]
2911 self.failUnlessEqual(len(units), 5) # includes subdir
2912 last_unit = units[-1]
2913 self.failUnlessEqual(last_unit["path"], ["subdir"])
2914 r = last_unit["check-results"]["results"]
2915 self.failUnlessEqual(r["count-recoverable-versions"], 0)
2916 self.failUnlessEqual(r["count-shares-good"], 1)
2917 self.failUnlessEqual(r["recoverable"], False)
2918 d.addCallback(_check_broken_deepcheck)
2920 d.addErrback(self.explain_web_error)
2923 def test_deep_check_and_repair(self):
2924 self.basedir = "web/Grid/deep_check_and_repair"
2926 c0 = self.g.clients[0]
2930 d = c0.create_empty_dirnode()
2931 def _stash_root_and_create_file(n):
2933 self.fileurls["root"] = "uri/" + urllib.quote(n.get_uri()) + "/"
2934 return n.add_file(u"good", upload.Data(DATA, convergence=""))
2935 d.addCallback(_stash_root_and_create_file)
2936 def _stash_uri(fn, which):
2937 self.uris[which] = fn.get_uri()
2938 d.addCallback(_stash_uri, "good")
2939 d.addCallback(lambda ign:
2940 self.rootnode.add_file(u"small",
2941 upload.Data("literal",
2943 d.addCallback(_stash_uri, "small")
2944 d.addCallback(lambda ign:
2945 self.rootnode.add_file(u"sick",
2946 upload.Data(DATA+"1",
2948 d.addCallback(_stash_uri, "sick")
2949 #d.addCallback(lambda ign:
2950 # self.rootnode.add_file(u"dead",
2951 # upload.Data(DATA+"2",
2953 #d.addCallback(_stash_uri, "dead")
2955 #d.addCallback(lambda ign: c0.create_mutable_file("mutable"))
2956 #d.addCallback(lambda fn: self.rootnode.set_node(u"corrupt", fn))
2957 #d.addCallback(_stash_uri, "corrupt")
2959 def _clobber_shares(ignored):
2960 good_shares = self.find_shares(self.uris["good"])
2961 self.failUnlessEqual(len(good_shares), 10)
2962 sick_shares = self.find_shares(self.uris["sick"])
2963 os.unlink(sick_shares[0][2])
2964 #dead_shares = self.find_shares(self.uris["dead"])
2965 #for i in range(1, 10):
2966 # os.unlink(dead_shares[i][2])
2968 #c_shares = self.find_shares(self.uris["corrupt"])
2969 #cso = CorruptShareOptions()
2970 #cso.stdout = StringIO()
2971 #cso.parseOptions([c_shares[0][2]])
2973 d.addCallback(_clobber_shares)
2976 # root/good CHK, 10 shares
2978 # root/sick CHK, 9 shares
2980 d.addCallback(self.CHECK, "root", "t=stream-deep-check&repair=true")
2982 units = [simplejson.loads(line)
2983 for line in res.splitlines()
2985 self.failUnlessEqual(len(units), 4+1)
2986 # should be parent-first
2988 self.failUnlessEqual(u0["path"], [])
2989 self.failUnlessEqual(u0["type"], "directory")
2990 self.failUnlessEqual(u0["cap"], self.rootnode.get_uri())
2991 u0crr = u0["check-and-repair-results"]
2992 self.failUnlessEqual(u0crr["repair-attempted"], False)
2993 self.failUnlessEqual(u0crr["pre-repair-results"]["results"]["count-shares-good"], 10)
2995 ugood = [u for u in units
2996 if u["type"] == "file" and u["path"] == [u"good"]][0]
2997 self.failUnlessEqual(ugood["cap"], self.uris["good"])
2998 ugoodcrr = ugood["check-and-repair-results"]
2999 self.failUnlessEqual(u0crr["repair-attempted"], False)
3000 self.failUnlessEqual(u0crr["pre-repair-results"]["results"]["count-shares-good"], 10)
3002 usick = [u for u in units
3003 if u["type"] == "file" and u["path"] == [u"sick"]][0]
3004 self.failUnlessEqual(usick["cap"], self.uris["sick"])
3005 usickcrr = usick["check-and-repair-results"]
3006 self.failUnlessEqual(usickcrr["repair-attempted"], True)
3007 self.failUnlessEqual(usickcrr["repair-successful"], True)
3008 self.failUnlessEqual(usickcrr["pre-repair-results"]["results"]["count-shares-good"], 9)
3009 self.failUnlessEqual(usickcrr["post-repair-results"]["results"]["count-shares-good"], 10)
3012 self.failUnlessEqual(stats["type"], "stats")
3014 self.failUnlessEqual(s["count-immutable-files"], 2)
3015 self.failUnlessEqual(s["count-literal-files"], 1)
3016 self.failUnlessEqual(s["count-directories"], 1)
3017 d.addCallback(_done)
3019 d.addErrback(self.explain_web_error)
3022 def _count_leases(self, ignored, which):
3023 u = self.uris[which]
3024 shares = self.find_shares(u)
3026 for shnum, serverid, fn in shares:
3027 if u.startswith("URI:SSK") or u.startswith("URI:DIR2"):
3028 sf = MutableShareFile(fn)
3029 num_leases = len(sf.debug_get_leases())
3030 elif u.startswith("URI:CHK"):
3032 num_leases = len(list(sf.iter_leases()))
3034 raise ValueError("can't count leases on %s" % u)
3035 lease_counts.append( (fn, num_leases) )
3038 def _assert_leasecount(self, lease_counts, expected):
3039 for (fn, num_leases) in lease_counts:
3040 if num_leases != expected:
3041 self.fail("expected %d leases, have %d, on %s" %
3042 (expected, num_leases, fn))
3044 def test_add_lease(self):
3045 self.basedir = "web/Grid/add_lease"
3046 self.set_up_grid(num_clients=2)
3047 c0 = self.g.clients[0]
3050 d = c0.upload(upload.Data(DATA, convergence=""))
3051 def _stash_uri(ur, which):
3052 self.uris[which] = ur.uri
3053 d.addCallback(_stash_uri, "one")
3054 d.addCallback(lambda ign:
3055 c0.upload(upload.Data(DATA+"1", convergence="")))
3056 d.addCallback(_stash_uri, "two")
3057 def _stash_mutable_uri(n, which):
3058 self.uris[which] = n.get_uri()
3059 assert isinstance(self.uris[which], str)
3060 d.addCallback(lambda ign: c0.create_mutable_file(DATA+"2"))
3061 d.addCallback(_stash_mutable_uri, "mutable")
3063 def _compute_fileurls(ignored):
3065 for which in self.uris:
3066 self.fileurls[which] = "uri/" + urllib.quote(self.uris[which])
3067 d.addCallback(_compute_fileurls)
3069 d.addCallback(self._count_leases, "one")
3070 d.addCallback(self._assert_leasecount, 1)
3071 d.addCallback(self._count_leases, "two")
3072 d.addCallback(self._assert_leasecount, 1)
3073 d.addCallback(self._count_leases, "mutable")
3074 d.addCallback(self._assert_leasecount, 1)
3076 d.addCallback(self.CHECK, "one", "t=check") # no add-lease
3077 def _got_html_good(res):
3078 self.failUnless("Healthy" in res, res)
3079 self.failIf("Not Healthy" in res, res)
3080 d.addCallback(_got_html_good)
3082 d.addCallback(self._count_leases, "one")
3083 d.addCallback(self._assert_leasecount, 1)
3084 d.addCallback(self._count_leases, "two")
3085 d.addCallback(self._assert_leasecount, 1)
3086 d.addCallback(self._count_leases, "mutable")
3087 d.addCallback(self._assert_leasecount, 1)
3089 # this CHECK uses the original client, which uses the same
3090 # lease-secrets, so it will just renew the original lease
3091 d.addCallback(self.CHECK, "one", "t=check&add-lease=true")
3092 d.addCallback(_got_html_good)
3094 d.addCallback(self._count_leases, "one")
3095 d.addCallback(self._assert_leasecount, 1)
3096 d.addCallback(self._count_leases, "two")
3097 d.addCallback(self._assert_leasecount, 1)
3098 d.addCallback(self._count_leases, "mutable")
3099 d.addCallback(self._assert_leasecount, 1)
3101 # this CHECK uses an alternate client, which adds a second lease
3102 d.addCallback(self.CHECK, "one", "t=check&add-lease=true", clientnum=1)
3103 d.addCallback(_got_html_good)
3105 d.addCallback(self._count_leases, "one")
3106 d.addCallback(self._assert_leasecount, 2)
3107 d.addCallback(self._count_leases, "two")
3108 d.addCallback(self._assert_leasecount, 1)
3109 d.addCallback(self._count_leases, "mutable")
3110 d.addCallback(self._assert_leasecount, 1)
3112 d.addCallback(self.CHECK, "mutable", "t=check&add-lease=true")
3113 d.addCallback(_got_html_good)
3115 d.addCallback(self._count_leases, "one")
3116 d.addCallback(self._assert_leasecount, 2)
3117 d.addCallback(self._count_leases, "two")
3118 d.addCallback(self._assert_leasecount, 1)
3119 d.addCallback(self._count_leases, "mutable")
3120 d.addCallback(self._assert_leasecount, 1)
3122 d.addCallback(self.CHECK, "mutable", "t=check&add-lease=true",
3124 d.addCallback(_got_html_good)
3126 d.addCallback(self._count_leases, "one")
3127 d.addCallback(self._assert_leasecount, 2)
3128 d.addCallback(self._count_leases, "two")
3129 d.addCallback(self._assert_leasecount, 1)
3130 d.addCallback(self._count_leases, "mutable")
3131 d.addCallback(self._assert_leasecount, 2)
3133 d.addErrback(self.explain_web_error)
3136 def test_deep_add_lease(self):
3137 self.basedir = "web/Grid/deep_add_lease"
3138 self.set_up_grid(num_clients=2)
3139 c0 = self.g.clients[0]
3143 d = c0.create_empty_dirnode()
3144 def _stash_root_and_create_file(n):
3146 self.uris["root"] = n.get_uri()
3147 self.fileurls["root"] = "uri/" + urllib.quote(n.get_uri()) + "/"
3148 return n.add_file(u"one", upload.Data(DATA, convergence=""))
3149 d.addCallback(_stash_root_and_create_file)
3150 def _stash_uri(fn, which):
3151 self.uris[which] = fn.get_uri()
3152 d.addCallback(_stash_uri, "one")
3153 d.addCallback(lambda ign:
3154 self.rootnode.add_file(u"small",
3155 upload.Data("literal",
3157 d.addCallback(_stash_uri, "small")
3159 d.addCallback(lambda ign: c0.create_mutable_file("mutable"))
3160 d.addCallback(lambda fn: self.rootnode.set_node(u"mutable", fn))
3161 d.addCallback(_stash_uri, "mutable")
3163 d.addCallback(self.CHECK, "root", "t=stream-deep-check") # no add-lease
3165 units = [simplejson.loads(line)
3166 for line in res.splitlines()
3168 # root, one, small, mutable, stats
3169 self.failUnlessEqual(len(units), 4+1)
3170 d.addCallback(_done)
3172 d.addCallback(self._count_leases, "root")
3173 d.addCallback(self._assert_leasecount, 1)
3174 d.addCallback(self._count_leases, "one")
3175 d.addCallback(self._assert_leasecount, 1)
3176 d.addCallback(self._count_leases, "mutable")
3177 d.addCallback(self._assert_leasecount, 1)
3179 d.addCallback(self.CHECK, "root", "t=stream-deep-check&add-lease=true")
3180 d.addCallback(_done)
3182 d.addCallback(self._count_leases, "root")
3183 d.addCallback(self._assert_leasecount, 1)
3184 d.addCallback(self._count_leases, "one")
3185 d.addCallback(self._assert_leasecount, 1)
3186 d.addCallback(self._count_leases, "mutable")
3187 d.addCallback(self._assert_leasecount, 1)
3189 d.addCallback(self.CHECK, "root", "t=stream-deep-check&add-lease=true",
3191 d.addCallback(_done)
3193 d.addCallback(self._count_leases, "root")
3194 d.addCallback(self._assert_leasecount, 2)
3195 d.addCallback(self._count_leases, "one")
3196 d.addCallback(self._assert_leasecount, 2)
3197 d.addCallback(self._count_leases, "mutable")
3198 d.addCallback(self._assert_leasecount, 2)
3200 d.addErrback(self.explain_web_error)