class FakeUploader(service.Service):
name = "uploader"
- def upload(self, uploadable, history=None):
+ def upload(self, uploadable):
d = uploadable.get_size()
d.addCallback(lambda size: uploadable.read(size))
def _got_data(datav):
self.history = FakeHistory()
self.uploader = FakeUploader()
self.uploader.setServiceParent(self)
+ self.blacklist = None
self.nodemaker = FakeNodeMaker(None, self._secret_holder, None,
self.uploader, None,
None, None)
return d
def test_GET_FILE_URI_mdmf_extensions(self):
- base = "/uri/%s" % urllib.quote("%s:3:131073" % self._quux_txt_uri)
- d = self.GET(base)
- d.addCallback(self.failUnlessIsQuuxDotTxt)
- return d
-
- def test_GET_FILE_URI_mdmf_bare_cap(self):
- cap_elements = self._quux_txt_uri.split(":")
- # 6 == expected cap length with two extensions.
- self.failUnlessEqual(len(cap_elements), 6)
-
- # Now lop off the extension parameters and stitch everything
- # back together
- quux_uri = ":".join(cap_elements[:len(cap_elements) - 2])
-
- # Now GET that. We should get back quux.
- base = "/uri/%s" % urllib.quote(quux_uri)
+ base = "/uri/%s" % urllib.quote("%s:RANDOMSTUFF" % self._quux_txt_uri)
d = self.GET(base)
d.addCallback(self.failUnlessIsQuuxDotTxt)
return d
return d
def test_PUT_FILE_URI_mdmf_extensions(self):
- base = "/uri/%s" % urllib.quote("%s:3:131073" % self._quux_txt_uri)
+ base = "/uri/%s" % urllib.quote("%s:EXTENSIONSTUFF" % self._quux_txt_uri)
self._quux_new_contents = "new_contents"
d = self.GET(base)
d.addCallback(lambda res: self.failUnlessIsQuuxDotTxt(res))
res))
return d
- def test_PUT_FILE_URI_mdmf_bare_cap(self):
- elements = self._quux_txt_uri.split(":")
- self.failUnlessEqual(len(elements), 6)
-
- quux_uri = ":".join(elements[:len(elements) - 2])
- base = "/uri/%s" % urllib.quote(quux_uri)
- self._quux_new_contents = "new_contents" * 50000
-
- d = self.GET(base)
- d.addCallback(self.failUnlessIsQuuxDotTxt)
- d.addCallback(lambda ignored: self.PUT(base, self._quux_new_contents))
- d.addCallback(lambda ignored: self.GET(base))
- d.addCallback(lambda res:
- self.failUnlessEqual(res, self._quux_new_contents))
- return d
-
def test_PUT_FILE_URI_mdmf_readonly(self):
# We're not allowed to PUT things to a readonly cap.
base = "/uri/%s" % self._quux_txt_readonly_uri
return d
def test_GET_FILEURL_info_mdmf_extensions(self):
- d = self.GET("/uri/%s:3:131073?t=info" % self._quux_txt_uri)
+ d = self.GET("/uri/%s:STUFF?t=info" % self._quux_txt_uri)
def _got(res):
self.failUnlessIn("mutable file (mdmf)", res)
self.failUnlessIn(self._quux_txt_uri, res)
d.addCallback(_got)
return d
- def test_GET_FILEURL_info_mdmf_bare_cap(self):
- elements = self._quux_txt_uri.split(":")
- self.failUnlessEqual(len(elements), 6)
-
- quux_uri = ":".join(elements[:len(elements) - 2])
- base = "/uri/%s?t=info" % urllib.quote(quux_uri)
- d = self.GET(base)
- def _got(res):
- self.failUnlessIn("mutable file (mdmf)", res)
- self.failUnlessIn(quux_uri, res)
- d.addCallback(_got)
- return d
-
def test_PUT_overwrite_only_files(self):
# create a directory, put a file in that directory.
contents, n, filecap = self.makefile(8)
d.addCallback(_got_json, "sdmf")
return d
- def test_GET_FILEURL_json_mdmf_extensions(self):
- # A GET invoked against a URL that includes an MDMF cap with
- # extensions should fetch the same JSON information as a GET
- # invoked against a bare cap.
- self._quux_txt_uri = "%s:3:131073" % self._quux_txt_uri
- self._quux_txt_readonly_uri = "%s:3:131073" % self._quux_txt_readonly_uri
- d = self.GET("/uri/%s?t=json" % urllib.quote(self._quux_txt_uri))
- d.addCallback(self.failUnlessIsQuuxJSON)
- return d
-
- def test_GET_FILEURL_json_mdmf_bare_cap(self):
- elements = self._quux_txt_uri.split(":")
- self.failUnlessEqual(len(elements), 6)
-
- quux_uri = ":".join(elements[:len(elements) - 2])
- # so failUnlessIsQuuxJSON will work.
- self._quux_txt_uri = quux_uri
-
- # we need to alter the readonly URI in the same way, again so
- # failUnlessIsQuuxJSON will work
- elements = self._quux_txt_readonly_uri.split(":")
- self.failUnlessEqual(len(elements), 6)
- quux_ro_uri = ":".join(elements[:len(elements) - 2])
- self._quux_txt_readonly_uri = quux_ro_uri
-
- base = "/uri/%s?t=json" % urllib.quote(quux_uri)
- d = self.GET(base)
- d.addCallback(self.failUnlessIsQuuxJSON)
- return d
-
- def test_GET_FILEURL_json_mdmf_bare_readonly_cap(self):
- elements = self._quux_txt_readonly_uri.split(":")
- self.failUnlessEqual(len(elements), 6)
-
- quux_readonly_uri = ":".join(elements[:len(elements) - 2])
- # so failUnlessIsQuuxJSON will work
- self._quux_txt_readonly_uri = quux_readonly_uri
- base = "/uri/%s?t=json" % quux_readonly_uri
- d = self.GET(base)
- # XXX: We may need to make a method that knows how to check for
- # readonly JSON, or else alter that one so that it knows how to
- # do that.
- d.addCallback(self.failUnlessIsQuuxJSON, readonly=True)
- return d
-
def test_GET_FILEURL_json_mdmf(self):
d = self.GET("/uri/%s?t=json" % urllib.quote(self._quux_txt_uri))
d.addCallback(self.failUnlessIsQuuxJSON)
for shnum, serverid, fn in shares:
sf = get_share_file(fn)
num_leases = len(list(sf.get_leases()))
- lease_counts.append( (fn, num_leases) )
+ lease_counts.append( (fn, num_leases) )
return lease_counts
def _assert_leasecount(self, lease_counts, expected):
return d
+ def test_blacklist(self):
+ # download from a blacklisted URI, get an error
+ self.basedir = "web/Grid/blacklist"
+ self.set_up_grid()
+ c0 = self.g.clients[0]
+ c0_basedir = c0.basedir
+ fn = os.path.join(c0_basedir, "access.blacklist")
+ self.uris = {}
+ DATA = "off-limits " * 50
+
+ d = c0.upload(upload.Data(DATA, convergence=""))
+ def _stash_uri_and_create_dir(ur):
+ self.uri = ur.uri
+ self.url = "uri/"+self.uri
+ u = uri.from_string_filenode(self.uri)
+ self.si = u.get_storage_index()
+ childnode = c0.create_node_from_uri(self.uri, None)
+ return c0.create_dirnode({u"blacklisted.txt": (childnode,{}) })
+ d.addCallback(_stash_uri_and_create_dir)
+ def _stash_dir(node):
+ self.dir_node = node
+ self.dir_uri = node.get_uri()
+ self.dir_url = "uri/"+self.dir_uri
+ d.addCallback(_stash_dir)
+ d.addCallback(lambda ign: self.GET(self.dir_url, followRedirect=True))
+ def _check_dir_html(body):
+ self.failUnlessIn("<html>", body)
+ self.failUnlessIn("blacklisted.txt</a>", body)
+ d.addCallback(_check_dir_html)
+ d.addCallback(lambda ign: self.GET(self.url))
+ d.addCallback(lambda body: self.failUnlessEqual(DATA, body))
+
+ def _blacklist(ign):
+ f = open(fn, "w")
+ f.write(" # this is a comment\n")
+ f.write(" \n")
+ f.write("\n") # also exercise blank lines
+ f.write("%s %s\n" % (base32.b2a(self.si), "off-limits to you"))
+ f.close()
+ # clients should be checking the blacklist each time, so we don't
+ # need to restart the client
+ d.addCallback(_blacklist)
+ d.addCallback(lambda ign: self.shouldHTTPError("get_from_blacklisted_uri",
+ 403, "Forbidden",
+ "Access Prohibited: off-limits",
+ self.GET, self.url))
+
+ # We should still be able to list the parent directory, in HTML...
+ d.addCallback(lambda ign: self.GET(self.dir_url, followRedirect=True))
+ def _check_dir_html2(body):
+ self.failUnlessIn("<html>", body)
+ self.failUnlessIn("blacklisted.txt</strike>", body)
+ d.addCallback(_check_dir_html2)
+
+ # ... and in JSON (used by CLI).
+ d.addCallback(lambda ign: self.GET(self.dir_url+"?t=json", followRedirect=True))
+ def _check_dir_json(res):
+ data = simplejson.loads(res)
+ self.failUnless(isinstance(data, list), data)
+ self.failUnlessEqual(data[0], "dirnode")
+ self.failUnless(isinstance(data[1], dict), data)
+ self.failUnlessIn("children", data[1])
+ self.failUnlessIn("blacklisted.txt", data[1]["children"])
+ childdata = data[1]["children"]["blacklisted.txt"]
+ self.failUnless(isinstance(childdata, list), data)
+ self.failUnlessEqual(childdata[0], "filenode")
+ self.failUnless(isinstance(childdata[1], dict), data)
+ d.addCallback(_check_dir_json)
+
+ def _unblacklist(ign):
+ open(fn, "w").close()
+ # the Blacklist object watches mtime to tell when the file has
+ # changed, but on windows this test will run faster than the
+ # filesystem's mtime resolution. So we edit Blacklist.last_mtime
+ # to force a reload.
+ self.g.clients[0].blacklist.last_mtime -= 2.0
+ d.addCallback(_unblacklist)
+
+ # now a read should work
+ d.addCallback(lambda ign: self.GET(self.url))
+ d.addCallback(lambda body: self.failUnlessEqual(DATA, body))
+
+ # read again to exercise the blacklist-is-unchanged logic
+ d.addCallback(lambda ign: self.GET(self.url))
+ d.addCallback(lambda body: self.failUnlessEqual(DATA, body))
+
+ # now add a blacklisted directory, and make sure files under it are
+ # refused too
+ def _add_dir(ign):
+ childnode = c0.create_node_from_uri(self.uri, None)
+ return c0.create_dirnode({u"child": (childnode,{}) })
+ d.addCallback(_add_dir)
+ def _get_dircap(dn):
+ self.dir_si_b32 = base32.b2a(dn.get_storage_index())
+ self.dir_url_base = "uri/"+dn.get_write_uri()
+ self.dir_url_json1 = "uri/"+dn.get_write_uri()+"?t=json"
+ self.dir_url_json2 = "uri/"+dn.get_write_uri()+"/?t=json"
+ self.dir_url_json_ro = "uri/"+dn.get_readonly_uri()+"/?t=json"
+ self.child_url = "uri/"+dn.get_readonly_uri()+"/child"
+ d.addCallback(_get_dircap)
+ d.addCallback(lambda ign: self.GET(self.dir_url_base, followRedirect=True))
+ d.addCallback(lambda body: self.failUnlessIn("<html>", body))
+ d.addCallback(lambda ign: self.GET(self.dir_url_json1))
+ d.addCallback(lambda res: simplejson.loads(res)) # just check it decodes
+ d.addCallback(lambda ign: self.GET(self.dir_url_json2))
+ d.addCallback(lambda res: simplejson.loads(res)) # just check it decodes
+ d.addCallback(lambda ign: self.GET(self.dir_url_json_ro))
+ d.addCallback(lambda res: simplejson.loads(res)) # just check it decodes
+ d.addCallback(lambda ign: self.GET(self.child_url))
+ d.addCallback(lambda body: self.failUnlessEqual(DATA, body))
+
+ def _block_dir(ign):
+ f = open(fn, "w")
+ f.write("%s %s\n" % (self.dir_si_b32, "dir-off-limits to you"))
+ f.close()
+ self.g.clients[0].blacklist.last_mtime -= 2.0
+ d.addCallback(_block_dir)
+ d.addCallback(lambda ign: self.shouldHTTPError("get_from_blacklisted_dir base",
+ 403, "Forbidden",
+ "Access Prohibited: dir-off-limits",
+ self.GET, self.dir_url_base))
+ d.addCallback(lambda ign: self.shouldHTTPError("get_from_blacklisted_dir json1",
+ 403, "Forbidden",
+ "Access Prohibited: dir-off-limits",
+ self.GET, self.dir_url_json1))
+ d.addCallback(lambda ign: self.shouldHTTPError("get_from_blacklisted_dir json2",
+ 403, "Forbidden",
+ "Access Prohibited: dir-off-limits",
+ self.GET, self.dir_url_json2))
+ d.addCallback(lambda ign: self.shouldHTTPError("get_from_blacklisted_dir json_ro",
+ 403, "Forbidden",
+ "Access Prohibited: dir-off-limits",
+ self.GET, self.dir_url_json_ro))
+ d.addCallback(lambda ign: self.shouldHTTPError("get_from_blacklisted_dir child",
+ 403, "Forbidden",
+ "Access Prohibited: dir-off-limits",
+ self.GET, self.child_url))
+ return d
+
+
class CompletelyUnhandledError(Exception):
pass
class ErrorBoom(rend.Page):