from twisted.internet import threads # CLI tests use deferToThread
from allmydata.immutable import upload
from allmydata.mutable.common import UnrecoverableFileError
+from allmydata.mutable.publish import MutableData
from allmydata.util import idlib
from allmydata.util import base32
from allmydata.scripts import runner
from allmydata.test.common_util import StallMixin
from allmydata.test.no_network import GridTestMixin
-timeout = 960 # Most of these take longer than 340 seconds on Zand's ARM box.
+timeout = 2400 # One of these took 1046.091s on Zandr's ARM box.
class MutableChecker(GridTestMixin, unittest.TestCase, ErrorMixin):
def _run_cli(self, argv):
self.basedir = "deepcheck/MutableChecker/good"
self.set_up_grid()
CONTENTS = "a little bit of data"
- d = self.g.clients[0].create_mutable_file(CONTENTS)
+ CONTENTS_uploadable = MutableData(CONTENTS)
+ d = self.g.clients[0].create_mutable_file(CONTENTS_uploadable)
def _created(node):
self.node = node
self.fileurl = "uri/" + urllib.quote(node.get_uri())
- si = self.node.get_storage_index()
d.addCallback(_created)
# now make sure the webapi verifier sees no problems
d.addCallback(lambda ign: self.GET(self.fileurl+"?t=check&verify=true",
self.basedir = "deepcheck/MutableChecker/corrupt"
self.set_up_grid()
CONTENTS = "a little bit of data"
- d = self.g.clients[0].create_mutable_file(CONTENTS)
+ CONTENTS_uploadable = MutableData(CONTENTS)
+ d = self.g.clients[0].create_mutable_file(CONTENTS_uploadable)
def _stash_and_corrupt(node):
self.node = node
self.fileurl = "uri/" + urllib.quote(node.get_uri())
self.basedir = "deepcheck/MutableChecker/delete_share"
self.set_up_grid()
CONTENTS = "a little bit of data"
- d = self.g.clients[0].create_mutable_file(CONTENTS)
+ CONTENTS_uploadable = MutableData(CONTENTS)
+ d = self.g.clients[0].create_mutable_file(CONTENTS_uploadable)
def _stash_and_delete(node):
self.node = node
self.fileurl = "uri/" + urllib.quote(node.get_uri())
if not unit:
# stream should end with a newline, so split returns ""
continue
- yield simplejson.loads(unit)
+ try:
+ yield simplejson.loads(unit)
+ except ValueError, le:
+ le.args = tuple(le.args + (unit,))
+ raise
def web(self, n, method="GET", **kwargs):
# returns (data, url)
class DeepCheckWebGood(DeepCheckBase, unittest.TestCase):
# construct a small directory tree (with one dir, one immutable file, one
- # mutable file, one LIT file, and a loop), and then check/examine it in
- # various ways.
+ # mutable file, two LIT files, one DIR2:LIT empty dir, one DIR2:LIT tiny
+ # dir, and a loop), and then check/examine it in various ways.
def set_up_tree(self):
# 2.9s
- # root
- # mutable
- # large
- # small
- # small2
- # loop -> root
c0 = self.g.clients[0]
- d = c0.create_empty_dirnode()
+ d = c0.create_dirnode()
def _created_root(n):
self.root = n
self.root_uri = n.get_uri()
d.addCallback(_created_root)
- d.addCallback(lambda ign: c0.create_mutable_file("mutable file contents"))
+ d.addCallback(lambda ign:
+ c0.create_mutable_file(MutableData("mutable file contents")))
d.addCallback(lambda n: self.root.set_node(u"mutable", n))
def _created_mutable(n):
self.mutable = n
self.small2_uri = n.get_uri()
d.addCallback(_created_small2)
+ empty_litdir_uri = "URI:DIR2-LIT:"
+ tiny_litdir_uri = "URI:DIR2-LIT:gqytunj2onug64tufqzdcosvkjetutcjkq5gw4tvm5vwszdgnz5hgyzufqydulbshj5x2lbm" # contains one child which is itself also LIT
+
+ d.addCallback(lambda ign: self.root._create_and_validate_node(None, empty_litdir_uri, name=u"test_deepcheck empty_lit_dir"))
+ def _created_empty_lit_dir(n):
+ self.empty_lit_dir = n
+ self.empty_lit_dir_uri = n.get_uri()
+ self.root.set_node(u"empty_lit_dir", n)
+ d.addCallback(_created_empty_lit_dir)
+
+ d.addCallback(lambda ign: self.root._create_and_validate_node(None, tiny_litdir_uri, name=u"test_deepcheck tiny_lit_dir"))
+ def _created_tiny_lit_dir(n):
+ self.tiny_lit_dir = n
+ self.tiny_lit_dir_uri = n.get_uri()
+ self.root.set_node(u"tiny_lit_dir", n)
+ d.addCallback(_created_tiny_lit_dir)
+
d.addCallback(lambda ign: self.root.set_node(u"loop", self.root))
return d
needs_rebalancing = bool( num_servers < 10 )
if not incomplete:
self.failUnlessEqual(cr.needs_rebalancing(), needs_rebalancing,
- str((where, cr, cr.get_data())))
- d = cr.get_data()
- self.failUnlessEqual(d["count-shares-good"], 10, where)
- self.failUnlessEqual(d["count-shares-needed"], 3, where)
- self.failUnlessEqual(d["count-shares-expected"], 10, where)
+ str((where, cr, cr.as_dict())))
+ self.failUnlessEqual(cr.get_share_counter_good(), 10, where)
+ self.failUnlessEqual(cr.get_encoding_needed(), 3, where)
+ self.failUnlessEqual(cr.get_encoding_expected(), 10, where)
if not incomplete:
- self.failUnlessEqual(d["count-good-share-hosts"], num_servers,
- where)
- self.failUnlessEqual(d["count-corrupt-shares"], 0, where)
- self.failUnlessEqual(d["list-corrupt-shares"], [], where)
+ self.failUnlessEqual(cr.get_host_counter_good_shares(),
+ num_servers, where)
+ self.failUnlessEqual(cr.get_corrupt_shares(), [], where)
if not incomplete:
- self.failUnlessEqual(sorted(d["servers-responding"]),
- sorted(self.g.servers_by_id.keys()),
+ self.failUnlessEqual(sorted([s.get_serverid()
+ for s in cr.get_servers_responding()]),
+ sorted(self.g.get_all_serverids()),
where)
- self.failUnless("sharemap" in d, str((where, d)))
all_serverids = set()
- for (shareid, serverids) in d["sharemap"].items():
- all_serverids.update(serverids)
+ for (shareid, servers) in cr.get_sharemap().items():
+ all_serverids.update([s.get_serverid() for s in servers])
self.failUnlessEqual(sorted(all_serverids),
- sorted(self.g.servers_by_id.keys()),
+ sorted(self.g.get_all_serverids()),
where)
- self.failUnlessEqual(d["count-wrong-shares"], 0, where)
- self.failUnlessEqual(d["count-recoverable-versions"], 1, where)
- self.failUnlessEqual(d["count-unrecoverable-versions"], 0, where)
+ self.failUnlessEqual(cr.get_share_counter_wrong(), 0, where)
+ self.failUnlessEqual(cr.get_version_counter_recoverable(), 1, where)
+ self.failUnlessEqual(cr.get_version_counter_unrecoverable(), 0, where)
def check_and_repair_is_healthy(self, cr, n, where, incomplete=False):
return d
def check_stats_good(self, s):
- self.failUnlessEqual(s["count-directories"], 1)
- self.failUnlessEqual(s["count-files"], 4)
+ self.failUnlessEqual(s["count-directories"], 3)
+ self.failUnlessEqual(s["count-files"], 5)
self.failUnlessEqual(s["count-immutable-files"], 1)
- self.failUnlessEqual(s["count-literal-files"], 2)
+ self.failUnlessEqual(s["count-literal-files"], 3)
self.failUnlessEqual(s["count-mutable-files"], 1)
# don't check directories: their size will vary
# s["largest-directory"]
# s["size-directories"]
- self.failUnlessEqual(s["largest-directory-children"], 5)
+ self.failUnlessEqual(s["largest-directory-children"], 7)
self.failUnlessEqual(s["largest-immutable-file"], 13000)
# to re-use this function for both the local
# dirnode.start_deep_stats() and the webapi t=start-deep-stats, we
# returns a list of tuples, but JSON only knows about lists., so
# t=start-deep-stats returns a list of lists.
histogram = [tuple(stuff) for stuff in s["size-files-histogram"]]
- self.failUnlessEqual(histogram, [(11, 31, 2),
+ self.failUnlessEqual(histogram, [(4, 10, 1), (11, 31, 2),
(10001, 31622, 1),
])
self.failUnlessEqual(s["size-immutable-files"], 13000)
- self.failUnlessEqual(s["size-literal-files"], 48)
+ self.failUnlessEqual(s["size-literal-files"], 56)
def do_web_stream_manifest(self, ignored):
d = self.web(self.root, method="POST", t="stream-manifest")
files = [u for u in units if u["type"] in ("file", "directory")]
assert units[-1]["type"] == "stats"
stats = units[-1]["stats"]
- self.failUnlessEqual(len(files), 5)
- # [root,mutable,large] are distributed, [small,small2] are not
+ self.failUnlessEqual(len(files), 8)
+ # [root,mutable,large] are distributed, [small,small2,empty_litdir,tiny_litdir] are not
self.failUnlessEqual(len([f for f in files
- if f["verifycap"] is not None]), 3)
+ if f["verifycap"] != ""]), 3)
self.failUnlessEqual(len([f for f in files
- if f["verifycap"] is None]), 2)
+ if f["verifycap"] == ""]), 5)
self.failUnlessEqual(len([f for f in files
- if f["repaircap"] is not None]), 3)
+ if f["repaircap"] != ""]), 3)
self.failUnlessEqual(len([f for f in files
- if f["repaircap"] is None]), 2)
+ if f["repaircap"] == ""]), 5)
self.failUnlessEqual(len([f for f in files
- if f["storage-index"] is not None]), 3)
+ if f["storage-index"] != ""]), 3)
self.failUnlessEqual(len([f for f in files
- if f["storage-index"] is None]), 2)
+ if f["storage-index"] == ""]), 5)
# make sure that a mutable file has filecap==repaircap!=verifycap
mutable = [f for f in files
if f["cap"] is not None
self.check_stats_good(stats)
def do_web_stream_check(self, ignored):
+ # TODO
return
d = self.web(self.root, t="stream-deep-check")
def _check(res):
units = list(self.parse_streamed_json(res))
- files = [u for u in units if u["type"] in ("file", "directory")]
+ #files = [u for u in units if u["type"] in ("file", "directory")]
assert units[-1]["type"] == "stats"
- stats = units[-1]["stats"]
+ #stats = units[-1]["stats"]
# ...
d.addCallback(_check)
return d
d.addCallback(self.failUnlessEqual, None, "small")
d.addCallback(lambda ign: self.small2.check(Monitor()))
d.addCallback(self.failUnlessEqual, None, "small2")
+ d.addCallback(lambda ign: self.empty_lit_dir.check(Monitor()))
+ d.addCallback(self.failUnlessEqual, None, "empty_lit_dir")
+ d.addCallback(lambda ign: self.tiny_lit_dir.check(Monitor()))
+ d.addCallback(self.failUnlessEqual, None, "tiny_lit_dir")
# and again with verify=True
d.addCallback(lambda ign: self.root.check(Monitor(), verify=True))
d.addCallback(self.failUnlessEqual, None, "small")
d.addCallback(lambda ign: self.small2.check(Monitor(), verify=True))
d.addCallback(self.failUnlessEqual, None, "small2")
+ d.addCallback(lambda ign: self.empty_lit_dir.check(Monitor(), verify=True))
+ d.addCallback(self.failUnlessEqual, None, "empty_lit_dir")
+ d.addCallback(lambda ign: self.tiny_lit_dir.check(Monitor(), verify=True))
+ d.addCallback(self.failUnlessEqual, None, "tiny_lit_dir")
# and check_and_repair(), which should be a nop
d.addCallback(lambda ign: self.root.check_and_repair(Monitor()))
d.addCallback(self.check_and_repair_is_healthy, self.root, "root")
d.addCallback(lambda ign: self.mutable.check_and_repair(Monitor()))
d.addCallback(self.check_and_repair_is_healthy, self.mutable, "mutable")
- #TODO d.addCallback(lambda ign: self.large.check_and_repair(Monitor()))
- #TODO d.addCallback(self.check_and_repair_is_healthy, self.large, "large")
- #TODO d.addCallback(lambda ign: self.small.check_and_repair(Monitor()))
- #TODO d.addCallback(self.failUnlessEqual, None, "small")
- #TODO d.addCallback(lambda ign: self.small2.check_and_repair(Monitor()))
- #TODO d.addCallback(self.failUnlessEqual, None, "small2")
+ d.addCallback(lambda ign: self.large.check_and_repair(Monitor()))
+ d.addCallback(self.check_and_repair_is_healthy, self.large, "large")
+ d.addCallback(lambda ign: self.small.check_and_repair(Monitor()))
+ d.addCallback(self.failUnlessEqual, None, "small")
+ d.addCallback(lambda ign: self.small2.check_and_repair(Monitor()))
+ d.addCallback(self.failUnlessEqual, None, "small2")
+ d.addCallback(lambda ign: self.empty_lit_dir.check_and_repair(Monitor()))
+ d.addCallback(self.failUnlessEqual, None, "empty_lit_dir")
+ d.addCallback(lambda ign: self.tiny_lit_dir.check_and_repair(Monitor()))
# check_and_repair(verify=True)
d.addCallback(lambda ign: self.root.check_and_repair(Monitor(), verify=True))
d.addCallback(self.check_and_repair_is_healthy, self.root, "root")
d.addCallback(lambda ign: self.mutable.check_and_repair(Monitor(), verify=True))
d.addCallback(self.check_and_repair_is_healthy, self.mutable, "mutable")
- #TODO d.addCallback(lambda ign: self.large.check_and_repair(Monitor(), verify=True))
- #TODO d.addCallback(self.check_and_repair_is_healthy, self.large, "large",
- #TODO incomplete=True)
- #TODO d.addCallback(lambda ign: self.small.check_and_repair(Monitor(), verify=True))
- #TODO d.addCallback(self.failUnlessEqual, None, "small")
- #TODO d.addCallback(lambda ign: self.small2.check_and_repair(Monitor(), verify=True))
- #TODO d.addCallback(self.failUnlessEqual, None, "small2")
+ d.addCallback(lambda ign: self.large.check_and_repair(Monitor(), verify=True))
+ d.addCallback(self.check_and_repair_is_healthy, self.large, "large", incomplete=True)
+ d.addCallback(lambda ign: self.small.check_and_repair(Monitor(), verify=True))
+ d.addCallback(self.failUnlessEqual, None, "small")
+ d.addCallback(lambda ign: self.small2.check_and_repair(Monitor(), verify=True))
+ d.addCallback(self.failUnlessEqual, None, "small2")
+ d.addCallback(self.failUnlessEqual, None, "small2")
+ d.addCallback(lambda ign: self.empty_lit_dir.check_and_repair(Monitor(), verify=True))
+ d.addCallback(self.failUnlessEqual, None, "empty_lit_dir")
+ d.addCallback(lambda ign: self.tiny_lit_dir.check_and_repair(Monitor(), verify=True))
# now deep-check the root, with various verify= and repair= options
if not incomplete:
self.failUnlessEqual(sorted(r["servers-responding"]),
sorted([idlib.nodeid_b2a(sid)
- for sid in self.g.servers_by_id]),
+ for sid in self.g.get_all_serverids()]),
where)
self.failUnless("sharemap" in r, where)
all_serverids = set()
all_serverids.update(serverids_s)
self.failUnlessEqual(sorted(all_serverids),
sorted([idlib.nodeid_b2a(sid)
- for sid in self.g.servers_by_id]),
+ for sid in self.g.get_all_serverids()]),
where)
self.failUnlessEqual(r["count-wrong-shares"], 0, where)
self.failUnlessEqual(r["count-recoverable-versions"], 1, where)
d.addCallback(self.json_check_lit, self.small, "small")
d.addCallback(lambda ign: self.web_json(self.small2, t="check"))
d.addCallback(self.json_check_lit, self.small2, "small2")
+ d.addCallback(lambda ign: self.web_json(self.empty_lit_dir, t="check"))
+ d.addCallback(self.json_check_lit, self.empty_lit_dir, "empty_lit_dir")
+ d.addCallback(lambda ign: self.web_json(self.tiny_lit_dir, t="check"))
+ d.addCallback(self.json_check_lit, self.tiny_lit_dir, "tiny_lit_dir")
# check and verify
d.addCallback(lambda ign:
d.addCallback(lambda ign:
self.web_json(self.small2, t="check", verify="true"))
d.addCallback(self.json_check_lit, self.small2, "small2+v")
+ d.addCallback(lambda ign: self.web_json(self.empty_lit_dir, t="check", verify="true"))
+ d.addCallback(self.json_check_lit, self.empty_lit_dir, "empty_lit_dir+v")
+ d.addCallback(lambda ign: self.web_json(self.tiny_lit_dir, t="check", verify="true"))
+ d.addCallback(self.json_check_lit, self.tiny_lit_dir, "tiny_lit_dir+v")
# check and repair, no verify
d.addCallback(lambda ign:
d.addCallback(lambda ign:
self.web_json(self.small2, t="check", repair="true"))
d.addCallback(self.json_check_lit, self.small2, "small2+r")
+ d.addCallback(lambda ign: self.web_json(self.empty_lit_dir, t="check", repair="true"))
+ d.addCallback(self.json_check_lit, self.empty_lit_dir, "empty_lit_dir+r")
+ d.addCallback(lambda ign: self.web_json(self.tiny_lit_dir, t="check", repair="true"))
+ d.addCallback(self.json_check_lit, self.tiny_lit_dir, "tiny_lit_dir+r")
# check+verify+repair
d.addCallback(lambda ign:
d.addCallback(lambda ign:
self.web_json(self.small2, t="check", repair="true", verify="true"))
d.addCallback(self.json_check_lit, self.small2, "small2+vr")
+ d.addCallback(lambda ign: self.web_json(self.empty_lit_dir, t="check", repair="true", verify=True))
+ d.addCallback(self.json_check_lit, self.empty_lit_dir, "empty_lit_dir+vr")
+ d.addCallback(lambda ign: self.web_json(self.tiny_lit_dir, t="check", repair="true", verify=True))
+ d.addCallback(self.json_check_lit, self.tiny_lit_dir, "tiny_lit_dir+vr")
# now run a deep-check, with various verify= and repair= flags
d.addCallback(lambda ign:
d.addCallback(lambda ign: self.web(self.large, t="info"))
d.addCallback(lambda ign: self.web(self.small, t="info"))
d.addCallback(lambda ign: self.web(self.small2, t="info"))
+ d.addCallback(lambda ign: self.web(self.empty_lit_dir, t="info"))
+ d.addCallback(lambda ign: self.web(self.tiny_lit_dir, t="info"))
return d
def do_cli_manifest_stream1(self):
basedir = self.get_clientdir(0)
- d = self._run_cli(["manifest",
- "--node-directory", basedir,
+ d = self._run_cli(["--node-directory", basedir,
+ "manifest",
self.root_uri])
def _check((out,err)):
self.failUnlessEqual(err, "")
lines = [l for l in out.split("\n") if l]
- self.failUnlessEqual(len(lines), 5)
+ self.failUnlessEqual(len(lines), 8)
caps = {}
for l in lines:
try:
self.failUnlessEqual(caps[self.large.get_uri()], "large")
self.failUnlessEqual(caps[self.small.get_uri()], "small")
self.failUnlessEqual(caps[self.small2.get_uri()], "small2")
+ self.failUnlessEqual(caps[self.empty_lit_dir.get_uri()], "empty_lit_dir")
+ self.failUnlessEqual(caps[self.tiny_lit_dir.get_uri()], "tiny_lit_dir")
d.addCallback(_check)
return d
def do_cli_manifest_stream2(self):
basedir = self.get_clientdir(0)
- d = self._run_cli(["manifest",
- "--node-directory", basedir,
+ d = self._run_cli(["--node-directory", basedir,
+ "manifest",
"--raw",
self.root_uri])
def _check((out,err)):
def do_cli_manifest_stream3(self):
basedir = self.get_clientdir(0)
- d = self._run_cli(["manifest",
- "--node-directory", basedir,
+ d = self._run_cli(["--node-directory", basedir,
+ "manifest",
"--storage-index",
self.root_uri])
def _check((out,err)):
def do_cli_manifest_stream4(self):
basedir = self.get_clientdir(0)
- d = self._run_cli(["manifest",
- "--node-directory", basedir,
+ d = self._run_cli(["--node-directory", basedir,
+ "manifest",
"--verify-cap",
self.root_uri])
def _check((out,err)):
def do_cli_manifest_stream5(self):
basedir = self.get_clientdir(0)
- d = self._run_cli(["manifest",
- "--node-directory", basedir,
+ d = self._run_cli(["--node-directory", basedir,
+ "manifest",
"--repair-cap",
self.root_uri])
def _check((out,err)):
def do_cli_stats1(self):
basedir = self.get_clientdir(0)
- d = self._run_cli(["stats",
- "--node-directory", basedir,
+ d = self._run_cli(["--node-directory", basedir,
+ "stats",
self.root_uri])
def _check3((out,err)):
lines = [l.strip() for l in out.split("\n") if l]
self.failUnless("count-immutable-files: 1" in lines)
self.failUnless("count-mutable-files: 1" in lines)
- self.failUnless("count-literal-files: 2" in lines)
- self.failUnless("count-files: 4" in lines)
- self.failUnless("count-directories: 1" in lines)
+ self.failUnless("count-literal-files: 3" in lines)
+ self.failUnless("count-files: 5" in lines)
+ self.failUnless("count-directories: 3" in lines)
self.failUnless("size-immutable-files: 13000 (13.00 kB, 12.70 kiB)" in lines, lines)
- self.failUnless("size-literal-files: 48" in lines)
- self.failUnless(" 11-31 : 2 (31 B, 31 B)".strip() in lines)
- self.failUnless("10001-31622 : 1 (31.62 kB, 30.88 kiB)".strip() in lines)
+ self.failUnless("size-literal-files: 56" in lines, lines)
+ self.failUnless(" 4-10 : 1 (10 B, 10 B)".strip() in lines, lines)
+ self.failUnless(" 11-31 : 2 (31 B, 31 B)".strip() in lines, lines)
+ self.failUnless("10001-31622 : 1 (31.62 kB, 30.88 kiB)".strip() in lines, lines)
d.addCallback(_check3)
return d
def do_cli_stats2(self):
basedir = self.get_clientdir(0)
- d = self._run_cli(["stats",
- "--node-directory", basedir,
+ d = self._run_cli(["--node-directory", basedir,
+ "stats",
"--raw",
self.root_uri])
def _check4((out,err)):
self.failUnlessEqual(data["count-immutable-files"], 1)
self.failUnlessEqual(data["count-immutable-files"], 1)
self.failUnlessEqual(data["count-mutable-files"], 1)
- self.failUnlessEqual(data["count-literal-files"], 2)
- self.failUnlessEqual(data["count-files"], 4)
- self.failUnlessEqual(data["count-directories"], 1)
+ self.failUnlessEqual(data["count-literal-files"], 3)
+ self.failUnlessEqual(data["count-files"], 5)
+ self.failUnlessEqual(data["count-directories"], 3)
self.failUnlessEqual(data["size-immutable-files"], 13000)
- self.failUnlessEqual(data["size-literal-files"], 48)
+ self.failUnlessEqual(data["size-literal-files"], 56)
+ self.failUnless([4,10,1] in data["size-files-histogram"])
self.failUnless([11,31,2] in data["size-files-histogram"])
self.failUnless([10001,31622,1] in data["size-files-histogram"])
d.addCallback(_check4)
self.nodes = {}
c0 = self.g.clients[0]
- d = c0.create_empty_dirnode()
+ d = c0.create_dirnode()
def _created_root(n):
self.root = n
self.root_uri = n.get_uri()
d.addCallback(self.create_mangled, "large-missing-shares")
d.addCallback(self.create_mangled, "large-corrupt-shares")
d.addCallback(self.create_mangled, "large-unrecoverable")
- d.addCallback(lambda ignored: c0.create_empty_dirnode())
+ d.addCallback(lambda ignored: c0.create_dirnode())
d.addCallback(self._stash_node, "broken")
large1 = upload.Data("Lots of data\n" * 1000 + "large1" + "\n", None)
d.addCallback(lambda ignored:
self.nodes["broken"].add_file(u"large1", large1))
d.addCallback(lambda ignored:
- self.nodes["broken"].create_empty_directory(u"subdir-good"))
+ self.nodes["broken"].create_subdirectory(u"subdir-good"))
large2 = upload.Data("Lots of data\n" * 1000 + "large2" + "\n", None)
d.addCallback(lambda subdir: subdir.add_file(u"large2-good", large2))
d.addCallback(lambda ignored:
- self.nodes["broken"].create_empty_directory(u"subdir-unrecoverable"))
+ self.nodes["broken"].create_subdirectory(u"subdir-unrecoverable"))
d.addCallback(self._stash_node, "subdir-unrecoverable")
large3 = upload.Data("Lots of data\n" * 1000 + "large3" + "\n", None)
d.addCallback(lambda subdir: subdir.add_file(u"large3-good", large3))
def create_mangled(self, ignored, name):
nodetype, mangletype = name.split("-", 1)
if nodetype == "mutable":
- d = self.g.clients[0].create_mutable_file("mutable file contents")
+ mutable_uploadable = MutableData("mutable file contents")
+ d = self.g.clients[0].create_mutable_file(mutable_uploadable)
d.addCallback(lambda n: self.root.set_node(unicode(name), n))
elif nodetype == "large":
large = upload.Data("Lots of data\n" * 1000 + name + "\n", None)
self.delete_shares_numbered(node.get_uri(), [0,1])
def _corrupt_some_shares(self, node):
- for (shnum, serverid, sharefile) in self.find_shares(node.get_uri()):
+ for (shnum, serverid, sharefile) in self.find_uri_shares(node.get_uri()):
if shnum in (0,1):
self._run_cli(["debug", "corrupt-share", sharefile])
self.failUnless(ICheckResults.providedBy(cr), (cr, type(cr), where))
self.failUnless(cr.is_healthy(), (cr.get_report(), cr.is_healthy(), cr.get_summary(), where))
self.failUnless(cr.is_recoverable(), where)
- d = cr.get_data()
- self.failUnlessEqual(d["count-recoverable-versions"], 1, where)
- self.failUnlessEqual(d["count-unrecoverable-versions"], 0, where)
+ self.failUnlessEqual(cr.get_version_counter_recoverable(), 1, where)
+ self.failUnlessEqual(cr.get_version_counter_unrecoverable(), 0, where)
return cr
except Exception, le:
le.args = tuple(le.args + (where,))
self.failUnless(ICheckResults.providedBy(cr), where)
self.failIf(cr.is_healthy(), where)
self.failUnless(cr.is_recoverable(), where)
- d = cr.get_data()
- self.failUnlessEqual(d["count-recoverable-versions"], 1, where)
- self.failUnlessEqual(d["count-unrecoverable-versions"], 0, where)
+ self.failUnlessEqual(cr.get_version_counter_recoverable(), 1, where)
+ self.failUnlessEqual(cr.get_version_counter_unrecoverable(), 0, where)
return cr
def check_has_corrupt_shares(self, cr, where):
# by "corrupt-shares" we mean the file is still recoverable
self.failUnless(ICheckResults.providedBy(cr), where)
- d = cr.get_data()
self.failIf(cr.is_healthy(), (where, cr))
self.failUnless(cr.is_recoverable(), where)
- d = cr.get_data()
- self.failUnless(d["count-shares-good"] < 10, where)
- self.failUnless(d["count-corrupt-shares"], where)
- self.failUnless(d["list-corrupt-shares"], where)
+ self.failUnless(cr.get_share_counter_good() < 10, where)
+ self.failUnless(cr.get_corrupt_shares(), where)
return cr
def check_is_unrecoverable(self, cr, where):
self.failUnless(ICheckResults.providedBy(cr), where)
- d = cr.get_data()
self.failIf(cr.is_healthy(), where)
self.failIf(cr.is_recoverable(), where)
- self.failUnless(d["count-shares-good"] < d["count-shares-needed"], (d["count-shares-good"], d["count-shares-needed"], where))
- self.failUnlessEqual(d["count-recoverable-versions"], 0, where)
- self.failUnlessEqual(d["count-unrecoverable-versions"], 1, where)
+ self.failUnless(cr.get_share_counter_good() < cr.get_encoding_needed(),
+ (cr.get_share_counter_good(), cr.get_encoding_needed(),
+ where))
+ self.failUnlessEqual(cr.get_version_counter_recoverable(), 0, where)
+ self.failUnlessEqual(cr.get_version_counter_unrecoverable(), 1, where)
return cr
def do_check(self, ignored):
COUNT = 400
c0 = self.g.clients[0]
- d = c0.create_empty_dirnode()
+ d = c0.create_dirnode()
self.stash = {}
def _created_root(n):
self.root = n
return n
d.addCallback(_created_root)
- d.addCallback(lambda root: root.create_empty_directory(u"subdir"))
+ d.addCallback(lambda root: root.create_subdirectory(u"subdir"))
def _add_children(subdir_node):
self.subdir_node = subdir_node
- kids = []
+ kids = {}
for i in range(1, COUNT):
- litnode = LiteralFileURI("%03d-data" % i)
- kids.append( (u"%03d-small" % i, litnode) )
+ litcap = LiteralFileURI("%03d-data" % i).to_string()
+ kids[u"%03d-small" % i] = (litcap, litcap)
return subdir_node.set_children(kids)
d.addCallback(_add_children)
up = upload.Data("large enough for CHK" * 100, "")