4 from twisted.trial import unittest
5 from cStringIO import StringIO
10 from allmydata.util import fileutil, hashutil, base32
11 from allmydata import uri
12 from allmydata.immutable import upload
14 # Test that the scripts can be imported -- although the actual tests of their functionality are
15 # done by invoking them in a subprocess.
16 from allmydata.scripts import tahoe_ls, tahoe_get, tahoe_put, tahoe_rm, tahoe_cp
17 _hush_pyflakes = [tahoe_ls, tahoe_get, tahoe_put, tahoe_rm, tahoe_cp]
19 from allmydata.scripts.common import DEFAULT_ALIAS, get_aliases
21 from allmydata.scripts import cli, debug, runner, backupdb
22 from allmydata.test.common_util import StallMixin
23 from allmydata.test.no_network import GridTestMixin
24 from twisted.internet import threads # CLI tests use deferToThread
25 from twisted.python import usage
27 class CLI(unittest.TestCase):
28 # this test case only looks at argument-processing and simple stuff.
29 def test_options(self):
30 fileutil.rm_dir("cli/test_options")
31 fileutil.make_dirs("cli/test_options")
32 fileutil.make_dirs("cli/test_options/private")
33 open("cli/test_options/node.url","w").write("http://localhost:8080/\n")
34 filenode_uri = uri.WriteableSSKFileURI(writekey="\x00"*16,
35 fingerprint="\x00"*32)
36 private_uri = uri.NewDirectoryURI(filenode_uri).to_string()
37 open("cli/test_options/private/root_dir.cap", "w").write(private_uri + "\n")
39 o.parseOptions(["--node-directory", "cli/test_options"])
40 self.failUnlessEqual(o['node-url'], "http://localhost:8080/")
41 self.failUnlessEqual(o.aliases[DEFAULT_ALIAS], private_uri)
42 self.failUnlessEqual(o.where, "")
45 o.parseOptions(["--node-directory", "cli/test_options",
46 "--node-url", "http://example.org:8111/"])
47 self.failUnlessEqual(o['node-url'], "http://example.org:8111/")
48 self.failUnlessEqual(o.aliases[DEFAULT_ALIAS], private_uri)
49 self.failUnlessEqual(o.where, "")
52 o.parseOptions(["--node-directory", "cli/test_options",
54 self.failUnlessEqual(o['node-url'], "http://localhost:8080/")
55 self.failUnlessEqual(o.aliases[DEFAULT_ALIAS], "root")
56 self.failUnlessEqual(o.where, "")
59 other_filenode_uri = uri.WriteableSSKFileURI(writekey="\x11"*16,
60 fingerprint="\x11"*32)
61 other_uri = uri.NewDirectoryURI(other_filenode_uri).to_string()
62 o.parseOptions(["--node-directory", "cli/test_options",
63 "--dir-cap", other_uri])
64 self.failUnlessEqual(o['node-url'], "http://localhost:8080/")
65 self.failUnlessEqual(o.aliases[DEFAULT_ALIAS], other_uri)
66 self.failUnlessEqual(o.where, "")
69 o.parseOptions(["--node-directory", "cli/test_options",
70 "--dir-cap", other_uri, "subdir"])
71 self.failUnlessEqual(o['node-url'], "http://localhost:8080/")
72 self.failUnlessEqual(o.aliases[DEFAULT_ALIAS], other_uri)
73 self.failUnlessEqual(o.where, "subdir")
76 self.failUnlessRaises(usage.UsageError,
78 ["--node-directory", "cli/test_options",
79 "--node-url", "NOT-A-URL"])
82 o.parseOptions(["--node-directory", "cli/test_options",
83 "--node-url", "http://localhost:8080"])
84 self.failUnlessEqual(o["node-url"], "http://localhost:8080/")
86 def _dump_cap(self, *args):
87 config = debug.DumpCapOptions()
88 config.stdout,config.stderr = StringIO(), StringIO()
89 config.parseOptions(args)
90 debug.dump_cap(config)
91 self.failIf(config.stderr.getvalue())
92 output = config.stdout.getvalue()
95 def test_dump_cap_chk(self):
96 key = "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f"
97 storage_index = hashutil.storage_index_hash(key)
98 uri_extension_hash = hashutil.uri_extension_hash("stuff")
102 u = uri.CHKFileURI(key=key,
103 uri_extension_hash=uri_extension_hash,
104 needed_shares=needed_shares,
105 total_shares=total_shares,
107 output = self._dump_cap(u.to_string())
108 self.failUnless("CHK File:" in output, output)
109 self.failUnless("key: aaaqeayeaudaocajbifqydiob4" in output, output)
110 self.failUnless("UEB hash: nf3nimquen7aeqm36ekgxomalstenpkvsdmf6fplj7swdatbv5oa" in output, output)
111 self.failUnless("size: 1234" in output, output)
112 self.failUnless("k/N: 25/100" in output, output)
113 self.failUnless("storage index: hdis5iaveku6lnlaiccydyid7q" in output, output)
115 output = self._dump_cap("--client-secret", "5s33nk3qpvnj2fw3z4mnm2y6fa",
117 self.failUnless("client renewal secret: znxmki5zdibb5qlt46xbdvk2t55j7hibejq3i5ijyurkr6m6jkhq" in output, output)
119 output = self._dump_cap(u.get_verify_cap().to_string())
120 self.failIf("key: " in output, output)
121 self.failUnless("UEB hash: nf3nimquen7aeqm36ekgxomalstenpkvsdmf6fplj7swdatbv5oa" in output, output)
122 self.failUnless("size: 1234" in output, output)
123 self.failUnless("k/N: 25/100" in output, output)
124 self.failUnless("storage index: hdis5iaveku6lnlaiccydyid7q" in output, output)
126 prefixed_u = "http://127.0.0.1/uri/%s" % urllib.quote(u.to_string())
127 output = self._dump_cap(prefixed_u)
128 self.failUnless("CHK File:" in output, output)
129 self.failUnless("key: aaaqeayeaudaocajbifqydiob4" in output, output)
130 self.failUnless("UEB hash: nf3nimquen7aeqm36ekgxomalstenpkvsdmf6fplj7swdatbv5oa" in output, output)
131 self.failUnless("size: 1234" in output, output)
132 self.failUnless("k/N: 25/100" in output, output)
133 self.failUnless("storage index: hdis5iaveku6lnlaiccydyid7q" in output, output)
135 def test_dump_cap_lit(self):
136 u = uri.LiteralFileURI("this is some data")
137 output = self._dump_cap(u.to_string())
138 self.failUnless("Literal File URI:" in output, output)
139 self.failUnless("data: this is some data" in output, output)
141 def test_dump_cap_ssk(self):
142 writekey = "\x01" * 16
143 fingerprint = "\xfe" * 32
144 u = uri.WriteableSSKFileURI(writekey, fingerprint)
146 output = self._dump_cap(u.to_string())
147 self.failUnless("SSK Writeable URI:" in output, output)
148 self.failUnless("writekey: aeaqcaibaeaqcaibaeaqcaibae" in output, output)
149 self.failUnless("readkey: nvgh5vj2ekzzkim5fgtb4gey5y" in output, output)
150 self.failUnless("storage index: nt4fwemuw7flestsezvo2eveke" in output, output)
151 self.failUnless("fingerprint: 737p57x6737p57x6737p57x6737p57x6737p57x6737p57x6737a" in output, output)
153 output = self._dump_cap("--client-secret", "5s33nk3qpvnj2fw3z4mnm2y6fa",
155 self.failUnless("file renewal secret: arpszxzc2t6kb4okkg7sp765xgkni5z7caavj7lta73vmtymjlxq" in output, output)
157 fileutil.make_dirs("cli/test_dump_cap/private")
158 f = open("cli/test_dump_cap/private/secret", "w")
159 f.write("5s33nk3qpvnj2fw3z4mnm2y6fa\n")
161 output = self._dump_cap("--client-dir", "cli/test_dump_cap",
163 self.failUnless("file renewal secret: arpszxzc2t6kb4okkg7sp765xgkni5z7caavj7lta73vmtymjlxq" in output, output)
165 output = self._dump_cap("--client-dir", "cli/test_dump_cap_BOGUS",
167 self.failIf("file renewal secret:" in output, output)
169 output = self._dump_cap("--nodeid", "tqc35esocrvejvg4mablt6aowg6tl43j",
171 self.failUnless("write_enabler: mgcavriox2wlb5eer26unwy5cw56elh3sjweffckkmivvsxtaknq" in output, output)
172 self.failIf("file renewal secret:" in output, output)
174 output = self._dump_cap("--nodeid", "tqc35esocrvejvg4mablt6aowg6tl43j",
175 "--client-secret", "5s33nk3qpvnj2fw3z4mnm2y6fa",
177 self.failUnless("write_enabler: mgcavriox2wlb5eer26unwy5cw56elh3sjweffckkmivvsxtaknq" in output, output)
178 self.failUnless("file renewal secret: arpszxzc2t6kb4okkg7sp765xgkni5z7caavj7lta73vmtymjlxq" in output, output)
179 self.failUnless("lease renewal secret: 7pjtaumrb7znzkkbvekkmuwpqfjyfyamznfz4bwwvmh4nw33lorq" in output, output)
182 output = self._dump_cap(u.to_string())
183 self.failUnless("SSK Read-only URI:" in output, output)
184 self.failUnless("readkey: nvgh5vj2ekzzkim5fgtb4gey5y" in output, output)
185 self.failUnless("storage index: nt4fwemuw7flestsezvo2eveke" in output, output)
186 self.failUnless("fingerprint: 737p57x6737p57x6737p57x6737p57x6737p57x6737p57x6737a" in output, output)
188 u = u.get_verify_cap()
189 output = self._dump_cap(u.to_string())
190 self.failUnless("SSK Verifier URI:" in output, output)
191 self.failUnless("storage index: nt4fwemuw7flestsezvo2eveke" in output, output)
192 self.failUnless("fingerprint: 737p57x6737p57x6737p57x6737p57x6737p57x6737p57x6737a" in output, output)
194 def test_dump_cap_directory(self):
195 writekey = "\x01" * 16
196 fingerprint = "\xfe" * 32
197 u1 = uri.WriteableSSKFileURI(writekey, fingerprint)
198 u = uri.NewDirectoryURI(u1)
200 output = self._dump_cap(u.to_string())
201 self.failUnless("Directory Writeable URI:" in output, output)
202 self.failUnless("writekey: aeaqcaibaeaqcaibaeaqcaibae" in output,
204 self.failUnless("readkey: nvgh5vj2ekzzkim5fgtb4gey5y" in output, output)
205 self.failUnless("storage index: nt4fwemuw7flestsezvo2eveke" in output,
207 self.failUnless("fingerprint: 737p57x6737p57x6737p57x6737p57x6737p57x6737p57x6737a" in output, output)
209 output = self._dump_cap("--client-secret", "5s33nk3qpvnj2fw3z4mnm2y6fa",
211 self.failUnless("file renewal secret: arpszxzc2t6kb4okkg7sp765xgkni5z7caavj7lta73vmtymjlxq" in output, output)
213 output = self._dump_cap("--nodeid", "tqc35esocrvejvg4mablt6aowg6tl43j",
215 self.failUnless("write_enabler: mgcavriox2wlb5eer26unwy5cw56elh3sjweffckkmivvsxtaknq" in output, output)
216 self.failIf("file renewal secret:" in output, output)
218 output = self._dump_cap("--nodeid", "tqc35esocrvejvg4mablt6aowg6tl43j",
219 "--client-secret", "5s33nk3qpvnj2fw3z4mnm2y6fa",
221 self.failUnless("write_enabler: mgcavriox2wlb5eer26unwy5cw56elh3sjweffckkmivvsxtaknq" in output, output)
222 self.failUnless("file renewal secret: arpszxzc2t6kb4okkg7sp765xgkni5z7caavj7lta73vmtymjlxq" in output, output)
223 self.failUnless("lease renewal secret: 7pjtaumrb7znzkkbvekkmuwpqfjyfyamznfz4bwwvmh4nw33lorq" in output, output)
226 output = self._dump_cap(u.to_string())
227 self.failUnless("Directory Read-only URI:" in output, output)
228 self.failUnless("readkey: nvgh5vj2ekzzkim5fgtb4gey5y" in output, output)
229 self.failUnless("storage index: nt4fwemuw7flestsezvo2eveke" in output, output)
230 self.failUnless("fingerprint: 737p57x6737p57x6737p57x6737p57x6737p57x6737p57x6737a" in output, output)
232 u = u.get_verify_cap()
233 output = self._dump_cap(u.to_string())
234 self.failUnless("Directory Verifier URI:" in output, output)
235 self.failUnless("storage index: nt4fwemuw7flestsezvo2eveke" in output, output)
236 self.failUnless("fingerprint: 737p57x6737p57x6737p57x6737p57x6737p57x6737p57x6737a" in output, output)
238 def _catalog_shares(self, *basedirs):
239 o = debug.CatalogSharesOptions()
240 o.stdout,o.stderr = StringIO(), StringIO()
241 args = list(basedirs)
243 debug.catalog_shares(o)
244 out = o.stdout.getvalue()
245 err = o.stderr.getvalue()
248 def test_catalog_shares_error(self):
249 nodedir1 = "cli/test_catalog_shares/node1"
250 sharedir = os.path.join(nodedir1, "storage", "shares", "mq", "mqfblse6m5a6dh45isu2cg7oji")
251 fileutil.make_dirs(sharedir)
252 f = open(os.path.join(sharedir, "8"), "wb")
253 open("cli/test_catalog_shares/node1/storage/shares/mq/not-a-dir", "wb").close()
254 # write a bogus share that looks a little bit like CHK
255 f.write("\x00\x00\x00\x01" + "\xff" * 200) # this triggers an assert
258 nodedir2 = "cli/test_catalog_shares/node2"
259 fileutil.make_dirs(nodedir2)
260 open("cli/test_catalog_shares/node1/storage/shares/not-a-dir", "wb").close()
262 # now make sure that the 'catalog-shares' commands survives the error
263 out, err = self._catalog_shares(nodedir1, nodedir2)
264 self.failUnlessEqual(out, "", out)
265 self.failUnless("Error processing " in err,
266 "didn't see 'error processing' in '%s'" % err)
267 #self.failUnless(nodedir1 in err,
268 # "didn't see '%s' in '%s'" % (nodedir1, err))
269 # windows mangles the path, and os.path.join isn't enough to make
270 # up for it, so just look for individual strings
271 self.failUnless("node1" in err,
272 "didn't see 'node1' in '%s'" % err)
273 self.failUnless("mqfblse6m5a6dh45isu2cg7oji" in err,
274 "didn't see 'mqfblse6m5a6dh45isu2cg7oji' in '%s'" % err)
277 class Help(unittest.TestCase):
280 help = str(cli.GetOptions())
281 self.failUnless("get VDRIVE_FILE LOCAL_FILE" in help, help)
282 self.failUnless("% tahoe get FOO |less" in help, help)
285 help = str(cli.PutOptions())
286 self.failUnless("put LOCAL_FILE VDRIVE_FILE" in help, help)
287 self.failUnless("% cat FILE | tahoe put" in help, help)
290 help = str(cli.RmOptions())
291 self.failUnless("rm VDRIVE_FILE" in help, help)
294 help = str(cli.MvOptions())
295 self.failUnless("mv FROM TO" in help, help)
298 help = str(cli.LnOptions())
299 self.failUnless("ln FROM TO" in help, help)
301 def test_backup(self):
302 help = str(cli.BackupOptions())
303 self.failUnless("backup FROM ALIAS:TO" in help, help)
305 def test_webopen(self):
306 help = str(cli.WebopenOptions())
307 self.failUnless("webopen [ALIAS:PATH]" in help, help)
309 def test_manifest(self):
310 help = str(cli.ManifestOptions())
311 self.failUnless("manifest [ALIAS:PATH]" in help, help)
313 def test_stats(self):
314 help = str(cli.StatsOptions())
315 self.failUnless("stats [ALIAS:PATH]" in help, help)
317 def test_check(self):
318 help = str(cli.CheckOptions())
319 self.failUnless("check [ALIAS:PATH]" in help, help)
321 def test_deep_check(self):
322 help = str(cli.DeepCheckOptions())
323 self.failUnless("deep-check [ALIAS:PATH]" in help, help)
325 def test_create_alias(self):
326 help = str(cli.CreateAliasOptions())
327 self.failUnless("create-alias ALIAS" in help, help)
329 def test_add_aliases(self):
330 help = str(cli.AddAliasOptions())
331 self.failUnless("add-alias ALIAS DIRCAP" in help, help)
334 def do_cli(self, verb, *args, **kwargs):
336 "--node-directory", self.get_clientdir(),
338 argv = [verb] + nodeargs + list(args)
339 stdin = kwargs.get("stdin", "")
340 stdout, stderr = StringIO(), StringIO()
341 d = threads.deferToThread(runner.runner, argv, run_by_human=False,
342 stdin=StringIO(stdin),
343 stdout=stdout, stderr=stderr)
345 return rc, stdout.getvalue(), stderr.getvalue()
349 class CreateAlias(GridTestMixin, CLITestMixin, unittest.TestCase):
351 def _test_webopen(self, args, expected_url):
352 woo = cli.WebopenOptions()
353 all_args = ["--node-directory", self.get_clientdir()] + list(args)
354 woo.parseOptions(all_args)
356 rc = cli.webopen(woo, urls.append)
357 self.failUnlessEqual(rc, 0)
358 self.failUnlessEqual(len(urls), 1)
359 self.failUnlessEqual(urls[0], expected_url)
361 def test_create(self):
362 self.basedir = os.path.dirname(self.mktemp())
365 d = self.do_cli("create-alias", "tahoe")
366 def _done((rc,stdout,stderr)):
367 self.failUnless("Alias 'tahoe' created" in stdout)
369 aliases = get_aliases(self.get_clientdir())
370 self.failUnless("tahoe" in aliases)
371 self.failUnless(aliases["tahoe"].startswith("URI:DIR2:"))
373 d.addCallback(lambda res: self.do_cli("create-alias", "two"))
375 def _stash_urls(res):
376 aliases = get_aliases(self.get_clientdir())
377 node_url_file = os.path.join(self.get_clientdir(), "node.url")
378 nodeurl = open(node_url_file, "r").read().strip()
379 uribase = nodeurl + "uri/"
380 self.tahoe_url = uribase + urllib.quote(aliases["tahoe"])
381 self.tahoe_subdir_url = self.tahoe_url + "/subdir"
382 self.two_url = uribase + urllib.quote(aliases["two"])
383 self.two_uri = aliases["two"]
384 d.addCallback(_stash_urls)
386 d.addCallback(lambda res: self.do_cli("create-alias", "two")) # dup
387 def _check_create_duplicate((rc,stdout,stderr)):
388 self.failIfEqual(rc, 0)
389 self.failUnless("Alias 'two' already exists!" in stderr)
390 aliases = get_aliases(self.get_clientdir())
391 self.failUnlessEqual(aliases["two"], self.two_uri)
392 d.addCallback(_check_create_duplicate)
394 d.addCallback(lambda res: self.do_cli("add-alias", "added", self.two_uri))
395 def _check_add((rc,stdout,stderr)):
396 self.failUnlessEqual(rc, 0)
397 self.failUnless("Alias 'added' added" in stdout)
398 d.addCallback(_check_add)
400 # check add-alias with a duplicate
401 d.addCallback(lambda res: self.do_cli("add-alias", "two", self.two_uri))
402 def _check_add_duplicate((rc,stdout,stderr)):
403 self.failIfEqual(rc, 0)
404 self.failUnless("Alias 'two' already exists!" in stderr)
405 aliases = get_aliases(self.get_clientdir())
406 self.failUnlessEqual(aliases["two"], self.two_uri)
407 d.addCallback(_check_add_duplicate)
409 def _test_urls(junk):
410 self._test_webopen([], self.tahoe_url)
411 self._test_webopen(["/"], self.tahoe_url)
412 self._test_webopen(["tahoe:"], self.tahoe_url)
413 self._test_webopen(["tahoe:/"], self.tahoe_url)
414 self._test_webopen(["tahoe:subdir"], self.tahoe_subdir_url)
415 self._test_webopen(["tahoe:subdir/"], self.tahoe_subdir_url + '/')
416 self._test_webopen(["tahoe:subdir/file"], self.tahoe_subdir_url + '/file')
417 # if "file" is indeed a file, then the url produced by webopen in
418 # this case is disallowed by the webui. but by design, webopen
419 # passes through the mistake from the user to the resultant
421 self._test_webopen(["tahoe:subdir/file/"], self.tahoe_subdir_url + '/file/')
422 self._test_webopen(["two:"], self.two_url)
423 d.addCallback(_test_urls)
427 class Put(GridTestMixin, CLITestMixin, unittest.TestCase):
429 def test_unlinked_immutable_stdin(self):
430 # tahoe get `echo DATA | tahoe put`
431 # tahoe get `echo DATA | tahoe put -`
433 self.basedir = self.mktemp()
436 d = self.do_cli("put", stdin=DATA)
438 (rc, stdout, stderr) = res
439 self.failUnless("waiting for file data on stdin.." in stderr)
440 self.failUnless("200 OK" in stderr, stderr)
441 self.readcap = stdout
442 self.failUnless(self.readcap.startswith("URI:CHK:"))
443 d.addCallback(_uploaded)
444 d.addCallback(lambda res: self.do_cli("get", self.readcap))
445 def _downloaded(res):
446 (rc, stdout, stderr) = res
447 self.failUnlessEqual(stderr, "")
448 self.failUnlessEqual(stdout, DATA)
449 d.addCallback(_downloaded)
450 d.addCallback(lambda res: self.do_cli("put", "-", stdin=DATA))
451 d.addCallback(lambda (rc,stdout,stderr):
452 self.failUnlessEqual(stdout, self.readcap))
455 def test_unlinked_immutable_from_file(self):
457 # tahoe put ./file.txt
458 # tahoe put /tmp/file.txt
459 # tahoe put ~/file.txt
460 self.basedir = os.path.dirname(self.mktemp())
461 # this will be "allmydata.test.test_cli/Put/test_put_from_file/RANDOM"
462 # and the RANDOM directory will exist. Raw mktemp returns a filename.
465 rel_fn = os.path.join(self.basedir, "DATAFILE")
466 abs_fn = os.path.abspath(rel_fn)
467 # we make the file small enough to fit in a LIT file, for speed
468 f = open(rel_fn, "w")
469 f.write("short file")
471 d = self.do_cli("put", rel_fn)
472 def _uploaded((rc,stdout,stderr)):
474 self.failUnless(readcap.startswith("URI:LIT:"))
475 self.readcap = readcap
476 d.addCallback(_uploaded)
477 d.addCallback(lambda res: self.do_cli("put", "./" + rel_fn))
478 d.addCallback(lambda (rc,stdout,stderr):
479 self.failUnlessEqual(stdout, self.readcap))
480 d.addCallback(lambda res: self.do_cli("put", abs_fn))
481 d.addCallback(lambda (rc,stdout,stderr):
482 self.failUnlessEqual(stdout, self.readcap))
483 # we just have to assume that ~ is handled properly
486 def test_immutable_from_file(self):
487 # tahoe put file.txt uploaded.txt
488 # tahoe - uploaded.txt
489 # tahoe put file.txt subdir/uploaded.txt
490 # tahoe put file.txt tahoe:uploaded.txt
491 # tahoe put file.txt tahoe:subdir/uploaded.txt
492 # tahoe put file.txt DIRCAP:./uploaded.txt
493 # tahoe put file.txt DIRCAP:./subdir/uploaded.txt
494 self.basedir = os.path.dirname(self.mktemp())
497 rel_fn = os.path.join(self.basedir, "DATAFILE")
498 abs_fn = os.path.abspath(rel_fn)
499 # we make the file small enough to fit in a LIT file, for speed
501 DATA2 = "short file two"
502 f = open(rel_fn, "w")
506 d = self.do_cli("create-alias", "tahoe")
508 d.addCallback(lambda res:
509 self.do_cli("put", rel_fn, "uploaded.txt"))
510 def _uploaded((rc,stdout,stderr)):
511 readcap = stdout.strip()
512 self.failUnless(readcap.startswith("URI:LIT:"))
513 self.failUnless("201 Created" in stderr, stderr)
514 self.readcap = readcap
515 d.addCallback(_uploaded)
516 d.addCallback(lambda res:
517 self.do_cli("get", "tahoe:uploaded.txt"))
518 d.addCallback(lambda (rc,stdout,stderr):
519 self.failUnlessEqual(stdout, DATA))
521 d.addCallback(lambda res:
522 self.do_cli("put", "-", "uploaded.txt", stdin=DATA2))
523 def _replaced((rc,stdout,stderr)):
524 readcap = stdout.strip()
525 self.failUnless(readcap.startswith("URI:LIT:"))
526 self.failUnless("200 OK" in stderr, stderr)
527 d.addCallback(_replaced)
529 d.addCallback(lambda res:
530 self.do_cli("put", rel_fn, "subdir/uploaded2.txt"))
531 d.addCallback(lambda res: self.do_cli("get", "subdir/uploaded2.txt"))
532 d.addCallback(lambda (rc,stdout,stderr):
533 self.failUnlessEqual(stdout, DATA))
535 d.addCallback(lambda res:
536 self.do_cli("put", rel_fn, "tahoe:uploaded3.txt"))
537 d.addCallback(lambda res: self.do_cli("get", "tahoe:uploaded3.txt"))
538 d.addCallback(lambda (rc,stdout,stderr):
539 self.failUnlessEqual(stdout, DATA))
541 d.addCallback(lambda res:
542 self.do_cli("put", rel_fn, "tahoe:subdir/uploaded4.txt"))
543 d.addCallback(lambda res:
544 self.do_cli("get", "tahoe:subdir/uploaded4.txt"))
545 d.addCallback(lambda (rc,stdout,stderr):
546 self.failUnlessEqual(stdout, DATA))
548 def _get_dircap(res):
549 self.dircap = get_aliases(self.get_clientdir())["tahoe"]
550 d.addCallback(_get_dircap)
552 d.addCallback(lambda res:
553 self.do_cli("put", rel_fn,
554 self.dircap+":./uploaded5.txt"))
555 d.addCallback(lambda res:
556 self.do_cli("get", "tahoe:uploaded5.txt"))
557 d.addCallback(lambda (rc,stdout,stderr):
558 self.failUnlessEqual(stdout, DATA))
560 d.addCallback(lambda res:
561 self.do_cli("put", rel_fn,
562 self.dircap+":./subdir/uploaded6.txt"))
563 d.addCallback(lambda res:
564 self.do_cli("get", "tahoe:subdir/uploaded6.txt"))
565 d.addCallback(lambda (rc,stdout,stderr):
566 self.failUnlessEqual(stdout, DATA))
570 def test_mutable_unlinked(self):
571 # FILECAP = `echo DATA | tahoe put --mutable`
572 # tahoe get FILECAP, compare against DATA
573 # echo DATA2 | tahoe put - FILECAP
574 # tahoe get FILECAP, compare against DATA2
575 # tahoe put file.txt FILECAP
576 self.basedir = os.path.dirname(self.mktemp())
581 rel_fn = os.path.join(self.basedir, "DATAFILE")
582 abs_fn = os.path.abspath(rel_fn)
583 DATA3 = "three" * 100
584 f = open(rel_fn, "w")
588 d = self.do_cli("put", "--mutable", stdin=DATA)
590 (rc, stdout, stderr) = res
591 self.failUnless("waiting for file data on stdin.." in stderr)
592 self.failUnless("200 OK" in stderr)
593 self.filecap = stdout
594 self.failUnless(self.filecap.startswith("URI:SSK:"))
595 d.addCallback(_created)
596 d.addCallback(lambda res: self.do_cli("get", self.filecap))
597 d.addCallback(lambda (rc,out,err): self.failUnlessEqual(out, DATA))
599 d.addCallback(lambda res: self.do_cli("put", "-", self.filecap, stdin=DATA2))
601 (rc, stdout, stderr) = res
602 self.failUnless("waiting for file data on stdin.." in stderr)
603 self.failUnless("200 OK" in stderr)
604 self.failUnlessEqual(self.filecap, stdout)
605 d.addCallback(_replaced)
606 d.addCallback(lambda res: self.do_cli("get", self.filecap))
607 d.addCallback(lambda (rc,out,err): self.failUnlessEqual(out, DATA2))
609 d.addCallback(lambda res: self.do_cli("put", rel_fn, self.filecap))
611 (rc, stdout, stderr) = res
612 self.failUnless("200 OK" in stderr)
613 self.failUnlessEqual(self.filecap, stdout)
614 d.addCallback(_replaced2)
615 d.addCallback(lambda res: self.do_cli("get", self.filecap))
616 d.addCallback(lambda (rc,out,err): self.failUnlessEqual(out, DATA3))
620 def test_mutable(self):
621 # echo DATA1 | tahoe put --mutable - uploaded.txt
622 # echo DATA2 | tahoe put - uploaded.txt # should modify-in-place
623 # tahoe get uploaded.txt, compare against DATA2
625 self.basedir = os.path.dirname(self.mktemp())
629 fn1 = os.path.join(self.basedir, "DATA1")
634 fn2 = os.path.join(self.basedir, "DATA2")
639 d = self.do_cli("create-alias", "tahoe")
640 d.addCallback(lambda res:
641 self.do_cli("put", "--mutable", fn1, "tahoe:uploaded.txt"))
642 d.addCallback(lambda res:
643 self.do_cli("put", fn2, "tahoe:uploaded.txt"))
644 d.addCallback(lambda res:
645 self.do_cli("get", "tahoe:uploaded.txt"))
646 d.addCallback(lambda (rc,out,err): self.failUnlessEqual(out, DATA2))
649 class Cp(GridTestMixin, CLITestMixin, unittest.TestCase):
651 def test_not_enough_args(self):
653 self.failUnlessRaises(usage.UsageError,
654 o.parseOptions, ["onearg"])
656 def test_unicode_filename(self):
657 self.basedir = os.path.dirname(self.mktemp())
660 fn1 = os.path.join(self.basedir, "Ärtonwall")
661 DATA1 = "unicode file content"
662 open(fn1, "wb").write(DATA1)
664 fn2 = os.path.join(self.basedir, "Metallica")
665 DATA2 = "non-unicode file content"
666 open(fn2, "wb").write(DATA2)
669 # Assure that uploading a file whose name contains unicode character doesn't
670 # prevent further uploads in the same directory
671 d = self.do_cli("create-alias", "tahoe")
672 d.addCallback(lambda res: self.do_cli("cp", fn1, "tahoe:"))
673 d.addCallback(lambda res: self.do_cli("cp", fn2, "tahoe:"))
675 d.addCallback(lambda res: self.do_cli("get", "tahoe:Ärtonwall"))
676 d.addCallback(lambda (rc,out,err): self.failUnlessEqual(out, DATA1))
678 d.addCallback(lambda res: self.do_cli("get", "tahoe:Metallica"))
679 d.addCallback(lambda (rc,out,err): self.failUnlessEqual(out, DATA2))
682 test_unicode_filename.todo = "This behavior is not yet supported, although it does happen to work (for reasons that are ill-understood) on many platforms. See issue ticket #534."
684 def test_dangling_symlink_vs_recursion(self):
685 if not hasattr(os, 'symlink'):
686 raise unittest.SkipTest("There is no symlink on this platform.")
687 # cp -r on a directory containing a dangling symlink shouldn't assert
688 self.basedir = os.path.dirname(self.mktemp())
690 dn = os.path.join(self.basedir, "dir")
692 fn = os.path.join(dn, "Fakebandica")
693 ln = os.path.join(dn, "link")
696 d = self.do_cli("create-alias", "tahoe")
697 d.addCallback(lambda res: self.do_cli("cp", "--recursive",
701 class Backup(GridTestMixin, CLITestMixin, StallMixin, unittest.TestCase):
703 def writeto(self, path, data):
704 d = os.path.dirname(os.path.join(self.basedir, "home", path))
705 fileutil.make_dirs(d)
706 f = open(os.path.join(self.basedir, "home", path), "w")
710 def count_output(self, out):
711 mo = re.search(r"(\d)+ files uploaded \((\d+) reused\), (\d+) directories created \((\d+) reused\)", out)
712 return [int(s) for s in mo.groups()]
714 def count_output2(self, out):
715 mo = re.search(r"(\d)+ files checked, (\d+) directories checked, (\d+) directories read", out)
716 return [int(s) for s in mo.groups()]
718 def test_backup(self):
719 self.basedir = os.path.dirname(self.mktemp())
722 # is the backupdb available? If so, we test that a second backup does
723 # not create new directories.
725 have_bdb = backupdb.get_backupdb(os.path.join(self.basedir, "dbtest"),
728 # create a small local directory with a couple of files
729 source = os.path.join(self.basedir, "home")
730 fileutil.make_dirs(os.path.join(source, "empty"))
731 self.writeto("parent/subdir/foo.txt", "foo")
732 self.writeto("parent/subdir/bar.txt", "bar\n" * 1000)
733 self.writeto("parent/blah.txt", "blah")
735 def do_backup(use_backupdb=True, verbose=False):
737 if not have_bdb or not use_backupdb:
738 cmd.append("--no-backupdb")
740 cmd.append("--verbose")
742 cmd.append("tahoe:backups")
743 return self.do_cli(*cmd)
745 d = self.do_cli("create-alias", "tahoe")
748 d.addCallback(lambda res: self.do_cli("backup", source, "tahoe:backups"))
749 def _should_complain((rc, out, err)):
750 self.failUnless("I was unable to import a python sqlite library" in err, err)
751 d.addCallback(_should_complain)
752 d.addCallback(self.stall, 1.1) # make sure the backups get distinct timestamps
754 d.addCallback(lambda res: do_backup())
755 def _check0((rc, out, err)):
756 self.failUnlessEqual(err, "")
757 self.failUnlessEqual(rc, 0)
758 fu, fr, dc, dr = self.count_output(out)
759 # foo.txt, bar.txt, blah.txt
760 self.failUnlessEqual(fu, 3)
761 self.failUnlessEqual(fr, 0)
762 # empty, home, home/parent, home/parent/subdir
763 self.failUnlessEqual(dc, 4)
764 self.failUnlessEqual(dr, 0)
765 d.addCallback(_check0)
767 d.addCallback(lambda res: self.do_cli("ls", "tahoe:backups"))
768 def _check1((rc, out, err)):
769 self.failUnlessEqual(err, "")
770 self.failUnlessEqual(rc, 0)
771 self.failUnlessEqual(sorted(out.split()), ["Archives", "Latest"])
772 d.addCallback(_check1)
773 d.addCallback(lambda res: self.do_cli("ls", "tahoe:backups/Latest"))
774 def _check2((rc, out, err)):
775 self.failUnlessEqual(err, "")
776 self.failUnlessEqual(rc, 0)
777 self.failUnlessEqual(sorted(out.split()), ["empty", "parent"])
778 d.addCallback(_check2)
779 d.addCallback(lambda res: self.do_cli("ls", "tahoe:backups/Latest/empty"))
780 def _check2a((rc, out, err)):
781 self.failUnlessEqual(err, "")
782 self.failUnlessEqual(rc, 0)
783 self.failUnlessEqual(out.strip(), "")
784 d.addCallback(_check2a)
785 d.addCallback(lambda res: self.do_cli("get", "tahoe:backups/Latest/parent/subdir/foo.txt"))
786 def _check3((rc, out, err)):
787 self.failUnlessEqual(err, "")
788 self.failUnlessEqual(rc, 0)
789 self.failUnlessEqual(out, "foo")
790 d.addCallback(_check3)
791 d.addCallback(lambda res: self.do_cli("ls", "tahoe:backups/Archives"))
792 def _check4((rc, out, err)):
793 self.failUnlessEqual(err, "")
794 self.failUnlessEqual(rc, 0)
795 self.old_archives = out.split()
796 self.failUnlessEqual(len(self.old_archives), 1)
797 d.addCallback(_check4)
800 d.addCallback(self.stall, 1.1)
801 d.addCallback(lambda res: do_backup())
802 def _check4a((rc, out, err)):
803 # second backup should reuse everything, if the backupdb is
805 self.failUnlessEqual(err, "")
806 self.failUnlessEqual(rc, 0)
808 fu, fr, dc, dr = self.count_output(out)
809 # foo.txt, bar.txt, blah.txt
810 self.failUnlessEqual(fu, 0)
811 self.failUnlessEqual(fr, 3)
812 # empty, home, home/parent, home/parent/subdir
813 self.failUnlessEqual(dc, 0)
814 self.failUnlessEqual(dr, 4)
815 d.addCallback(_check4a)
818 # sneak into the backupdb, crank back the "last checked"
819 # timestamp to force a check on all files
820 def _reset_last_checked(res):
821 dbfile = os.path.join(self.get_clientdir(),
822 "private", "backupdb.sqlite")
823 self.failUnless(os.path.exists(dbfile), dbfile)
824 bdb = backupdb.get_backupdb(dbfile)
825 bdb.cursor.execute("UPDATE last_upload SET last_checked=0")
826 bdb.connection.commit()
828 d.addCallback(_reset_last_checked)
830 d.addCallback(self.stall, 1.1)
831 d.addCallback(lambda res: do_backup(verbose=True))
832 def _check4b((rc, out, err)):
833 # we should check all files, and re-use all of them. None of
834 # the directories should have been changed.
835 self.failUnlessEqual(err, "")
836 self.failUnlessEqual(rc, 0)
837 fu, fr, dc, dr = self.count_output(out)
838 fchecked, dchecked, dread = self.count_output2(out)
839 self.failUnlessEqual(fchecked, 3)
840 self.failUnlessEqual(fu, 0)
841 self.failUnlessEqual(fr, 3)
842 # TODO: backupdb doesn't do dirs yet; when it does, this will
843 # change to dchecked=4, and maybe dread=0
844 self.failUnlessEqual(dchecked, 0)
845 self.failUnlessEqual(dread, 4)
846 self.failUnlessEqual(dc, 0)
847 self.failUnlessEqual(dr, 4)
848 d.addCallback(_check4b)
850 d.addCallback(lambda res: self.do_cli("ls", "tahoe:backups/Archives"))
851 def _check5((rc, out, err)):
852 self.failUnlessEqual(err, "")
853 self.failUnlessEqual(rc, 0)
854 self.new_archives = out.split()
858 self.failUnlessEqual(len(self.new_archives), expected_new, out)
859 # the original backup should still be the oldest (i.e. sorts
860 # alphabetically towards the beginning)
861 self.failUnlessEqual(sorted(self.new_archives)[0],
862 self.old_archives[0])
863 d.addCallback(_check5)
865 d.addCallback(self.stall, 1.1)
867 self.writeto("parent/subdir/foo.txt", "FOOF!")
868 # and turn a file into a directory
869 os.unlink(os.path.join(source, "parent/blah.txt"))
870 os.mkdir(os.path.join(source, "parent/blah.txt"))
871 self.writeto("parent/blah.txt/surprise file", "surprise")
872 self.writeto("parent/blah.txt/surprisedir/subfile", "surprise")
873 # turn a directory into a file
874 os.rmdir(os.path.join(source, "empty"))
875 self.writeto("empty", "imagine nothing being here")
877 d.addCallback(_modify)
878 def _check5a((rc, out, err)):
879 # second backup should reuse bar.txt (if backupdb is available),
880 # and upload the rest. None of the directories can be reused.
881 self.failUnlessEqual(err, "")
882 self.failUnlessEqual(rc, 0)
884 fu, fr, dc, dr = self.count_output(out)
885 # new foo.txt, surprise file, subfile, empty
886 self.failUnlessEqual(fu, 4)
888 self.failUnlessEqual(fr, 1)
889 # home, parent, subdir, blah.txt, surprisedir
890 self.failUnlessEqual(dc, 5)
891 self.failUnlessEqual(dr, 0)
892 d.addCallback(_check5a)
893 d.addCallback(lambda res: self.do_cli("ls", "tahoe:backups/Archives"))
894 def _check6((rc, out, err)):
895 self.failUnlessEqual(err, "")
896 self.failUnlessEqual(rc, 0)
897 self.new_archives = out.split()
901 self.failUnlessEqual(len(self.new_archives), expected_new)
902 self.failUnlessEqual(sorted(self.new_archives)[0],
903 self.old_archives[0])
904 d.addCallback(_check6)
905 d.addCallback(lambda res: self.do_cli("get", "tahoe:backups/Latest/parent/subdir/foo.txt"))
906 def _check7((rc, out, err)):
907 self.failUnlessEqual(err, "")
908 self.failUnlessEqual(rc, 0)
909 self.failUnlessEqual(out, "FOOF!")
910 # the old snapshot should not be modified
911 return self.do_cli("get", "tahoe:backups/Archives/%s/parent/subdir/foo.txt" % self.old_archives[0])
912 d.addCallback(_check7)
913 def _check8((rc, out, err)):
914 self.failUnlessEqual(err, "")
915 self.failUnlessEqual(rc, 0)
916 self.failUnlessEqual(out, "foo")
917 d.addCallback(_check8)
919 d.addCallback(self.stall, 1.1)
920 d.addCallback(lambda res: do_backup(use_backupdb=False))
921 def _check9((rc, out, err)):
922 # --no-backupdb means re-upload everything. We still get to
923 # re-use the directories, since nothing changed.
924 self.failUnlessEqual(err, "")
925 self.failUnlessEqual(rc, 0)
926 fu, fr, dc, dr = self.count_output(out)
927 self.failUnlessEqual(fu, 5)
928 self.failUnlessEqual(fr, 0)
929 self.failUnlessEqual(dc, 0)
930 self.failUnlessEqual(dr, 5)
931 d.addCallback(_check9)
935 # on our old dapper buildslave, this test takes a long time (usually
936 # 130s), so we have to bump up the default 120s timeout. The create-alias
937 # and initial backup alone take 60s, probably because of the handful of
938 # dirnodes being created (RSA key generation). The backup between check4
939 # and check4a takes 6s, as does the backup before check4b.
940 test_backup.timeout = 300
942 def test_exclude_options(self):
943 root_listdir = ('lib.a', '_darcs', 'subdir', 'nice_doc.lyx')
944 subdir_listdir = ('another_doc.lyx', 'run_snake_run.py', 'CVS', '.svn', '_darcs')
946 def _check_filtering(filtered, all, included, excluded):
947 filtered = set(filtered)
949 included = set(included)
950 excluded = set(excluded)
951 self.failUnlessEqual(filtered, included)
952 self.failUnlessEqual(all.difference(filtered), excluded)
954 # test simple exclude
955 backup_options = cli.BackupOptions()
956 backup_options.parseOptions(['--exclude', '*lyx', '--node-url',
957 'http://ignore.it:2357', 'from', 'to'])
958 filtered = list(backup_options.filter_listdir(root_listdir))
959 _check_filtering(filtered, root_listdir, ('lib.a', '_darcs', 'subdir'),
962 backup_options = cli.BackupOptions()
963 backup_options.parseOptions(['--exclude', '*lyx', '--exclude', 'lib.?', '--node-url',
964 'http://ignore.it:2357', 'from', 'to'])
965 filtered = list(backup_options.filter_listdir(root_listdir))
966 _check_filtering(filtered, root_listdir, ('_darcs', 'subdir'),
967 ('nice_doc.lyx', 'lib.a'))
968 # vcs metadata exclusion
969 backup_options = cli.BackupOptions()
970 backup_options.parseOptions(['--exclude-vcs', '--node-url',
971 'http://ignore.it:2357', 'from', 'to'])
972 filtered = list(backup_options.filter_listdir(subdir_listdir))
973 _check_filtering(filtered, subdir_listdir, ('another_doc.lyx', 'run_snake_run.py',),
974 ('CVS', '.svn', '_darcs'))
975 # read exclude patterns from file
976 basedir = os.path.dirname(self.mktemp())
977 exclusion_string = "_darcs\n*py\n.svn"
978 excl_filepath = os.path.join(basedir, 'exclusion')
979 excl_file = file(excl_filepath, 'w')
980 excl_file.write(exclusion_string)
982 backup_options = cli.BackupOptions()
983 backup_options.parseOptions(['--exclude-from', excl_filepath, '--node-url',
984 'http://ignore.it:2357', 'from', 'to'])
985 filtered = list(backup_options.filter_listdir(subdir_listdir))
986 _check_filtering(filtered, subdir_listdir, ('another_doc.lyx', 'CVS'),
987 ('.svn', '_darcs', 'run_snake_run.py'))
988 # text BackupConfigurationError
989 self.failUnlessRaises(cli.BackupConfigurationError,
990 backup_options.parseOptions,
991 ['--exclude-from', excl_filepath + '.no', '--node-url',
992 'http://ignore.it:2357', 'from', 'to'])
995 class Check(GridTestMixin, CLITestMixin, unittest.TestCase):
997 def test_check(self):
998 self.basedir = "cli/Check/check"
1000 c0 = self.g.clients[0]
1002 d = c0.create_mutable_file(DATA)
1004 self.uri = n.get_uri()
1005 d.addCallback(_stash_uri)
1007 d.addCallback(lambda ign: self.do_cli("check", self.uri))
1008 def _check1((rc, out, err)):
1009 self.failUnlessEqual(err, "")
1010 self.failUnlessEqual(rc, 0)
1011 lines = out.splitlines()
1012 self.failUnless("Summary: Healthy" in lines, out)
1013 self.failUnless(" good-shares: 10 (encoding is 3-of-10)" in lines, out)
1014 d.addCallback(_check1)
1016 d.addCallback(lambda ign: self.do_cli("check", "--raw", self.uri))
1017 def _check2((rc, out, err)):
1018 self.failUnlessEqual(err, "")
1019 self.failUnlessEqual(rc, 0)
1020 data = simplejson.loads(out)
1021 self.failUnlessEqual(data["summary"], "Healthy")
1022 d.addCallback(_check2)
1024 def _clobber_shares(ignored):
1025 # delete one, corrupt a second
1026 shares = self.find_shares(self.uri)
1027 self.failUnlessEqual(len(shares), 10)
1028 os.unlink(shares[0][2])
1029 cso = debug.CorruptShareOptions()
1030 cso.stdout = StringIO()
1031 cso.parseOptions([shares[1][2]])
1032 storage_index = uri.from_string(self.uri).get_storage_index()
1033 self._corrupt_share_line = " server %s, SI %s, shnum %d" % \
1034 (base32.b2a(shares[1][1]),
1035 base32.b2a(storage_index),
1037 debug.corrupt_share(cso)
1038 d.addCallback(_clobber_shares)
1040 d.addCallback(lambda ign: self.do_cli("check", "--verify", self.uri))
1041 def _check3((rc, out, err)):
1042 self.failUnlessEqual(err, "")
1043 self.failUnlessEqual(rc, 0)
1044 lines = out.splitlines()
1045 summary = [l for l in lines if l.startswith("Summary")][0]
1046 self.failUnless("Summary: Unhealthy: 8 shares (enc 3-of-10)"
1047 in summary, summary)
1048 self.failUnless(" good-shares: 8 (encoding is 3-of-10)" in lines, out)
1049 self.failUnless(" corrupt shares:" in lines, out)
1050 self.failUnless(self._corrupt_share_line in lines, out)
1051 d.addCallback(_check3)
1053 d.addCallback(lambda ign:
1054 self.do_cli("check", "--verify", "--repair", self.uri))
1055 def _check4((rc, out, err)):
1056 self.failUnlessEqual(err, "")
1057 self.failUnlessEqual(rc, 0)
1058 lines = out.splitlines()
1059 self.failUnless("Summary: not healthy" in lines, out)
1060 self.failUnless(" good-shares: 8 (encoding is 3-of-10)" in lines, out)
1061 self.failUnless(" corrupt shares:" in lines, out)
1062 self.failUnless(self._corrupt_share_line in lines, out)
1063 self.failUnless(" repair successful" in lines, out)
1064 d.addCallback(_check4)
1066 d.addCallback(lambda ign:
1067 self.do_cli("check", "--verify", "--repair", self.uri))
1068 def _check5((rc, out, err)):
1069 self.failUnlessEqual(err, "")
1070 self.failUnlessEqual(rc, 0)
1071 lines = out.splitlines()
1072 self.failUnless("Summary: healthy" in lines, out)
1073 self.failUnless(" good-shares: 10 (encoding is 3-of-10)" in lines, out)
1074 self.failIf(" corrupt shares:" in lines, out)
1075 d.addCallback(_check5)
1079 def test_deep_check(self):
1080 self.basedir = "cli/Check/deep_check"
1082 c0 = self.g.clients[0]
1086 d = c0.create_empty_dirnode()
1087 def _stash_root_and_create_file(n):
1089 self.rooturi = n.get_uri()
1090 return n.add_file(u"good", upload.Data(DATA, convergence=""))
1091 d.addCallback(_stash_root_and_create_file)
1092 def _stash_uri(fn, which):
1093 self.uris[which] = fn.get_uri()
1094 d.addCallback(_stash_uri, "good")
1095 d.addCallback(lambda ign:
1096 self.rootnode.add_file(u"small",
1097 upload.Data("literal",
1099 d.addCallback(_stash_uri, "small")
1100 d.addCallback(lambda ign: c0.create_mutable_file(DATA+"1"))
1101 d.addCallback(lambda fn: self.rootnode.set_node(u"mutable", fn))
1102 d.addCallback(_stash_uri, "mutable")
1104 d.addCallback(lambda ign: self.do_cli("deep-check", self.rooturi))
1105 def _check1((rc, out, err)):
1106 self.failUnlessEqual(err, "")
1107 self.failUnlessEqual(rc, 0)
1108 lines = out.splitlines()
1109 self.failUnless("done: 4 objects checked, 4 healthy, 0 unhealthy"
1111 d.addCallback(_check1)
1113 d.addCallback(lambda ign: self.do_cli("deep-check", "--verbose",
1115 def _check2((rc, out, err)):
1116 self.failUnlessEqual(err, "")
1117 self.failUnlessEqual(rc, 0)
1118 lines = out.splitlines()
1119 self.failUnless("<root>: Healthy" in lines, out)
1120 self.failUnless("small: Healthy (LIT)" in lines, out)
1121 self.failUnless("good: Healthy" in lines, out)
1122 self.failUnless("mutable: Healthy" in lines, out)
1123 self.failUnless("done: 4 objects checked, 4 healthy, 0 unhealthy"
1125 d.addCallback(_check2)
1127 def _clobber_shares(ignored):
1128 shares = self.find_shares(self.uris["good"])
1129 self.failUnlessEqual(len(shares), 10)
1130 os.unlink(shares[0][2])
1132 shares = self.find_shares(self.uris["mutable"])
1133 cso = debug.CorruptShareOptions()
1134 cso.stdout = StringIO()
1135 cso.parseOptions([shares[1][2]])
1136 storage_index = uri.from_string(self.uris["mutable"]).get_storage_index()
1137 self._corrupt_share_line = " corrupt: server %s, SI %s, shnum %d" % \
1138 (base32.b2a(shares[1][1]),
1139 base32.b2a(storage_index),
1141 debug.corrupt_share(cso)
1142 d.addCallback(_clobber_shares)
1144 d.addCallback(lambda ign:
1145 self.do_cli("deep-check", "--verbose", self.rooturi))
1146 def _check3((rc, out, err)):
1147 self.failUnlessEqual(err, "")
1148 self.failUnlessEqual(rc, 0)
1149 lines = out.splitlines()
1150 self.failUnless("<root>: Healthy" in lines, out)
1151 self.failUnless("small: Healthy (LIT)" in lines, out)
1152 self.failUnless("mutable: Healthy" in lines, out) # needs verifier
1153 self.failUnless("good: Not Healthy: 9 shares (enc 3-of-10)"
1155 self.failIf(self._corrupt_share_line in lines, out)
1156 self.failUnless("done: 4 objects checked, 3 healthy, 1 unhealthy"
1158 d.addCallback(_check3)
1160 d.addCallback(lambda ign:
1161 self.do_cli("deep-check", "--verbose", "--verify",
1163 def _check4((rc, out, err)):
1164 self.failUnlessEqual(err, "")
1165 self.failUnlessEqual(rc, 0)
1166 lines = out.splitlines()
1167 self.failUnless("<root>: Healthy" in lines, out)
1168 self.failUnless("small: Healthy (LIT)" in lines, out)
1169 mutable = [l for l in lines if l.startswith("mutable")][0]
1170 self.failUnless(mutable.startswith("mutable: Unhealthy: 9 shares (enc 3-of-10)"),
1172 self.failUnless(self._corrupt_share_line in lines, out)
1173 self.failUnless("good: Not Healthy: 9 shares (enc 3-of-10)"
1175 self.failUnless("done: 4 objects checked, 2 healthy, 2 unhealthy"
1177 d.addCallback(_check4)
1179 d.addCallback(lambda ign:
1180 self.do_cli("deep-check", "--raw",
1182 def _check5((rc, out, err)):
1183 self.failUnlessEqual(err, "")
1184 self.failUnlessEqual(rc, 0)
1185 lines = out.splitlines()
1186 units = [simplejson.loads(line) for line in lines]
1187 # root, small, good, mutable, stats
1188 self.failUnlessEqual(len(units), 4+1)
1189 d.addCallback(_check5)
1191 d.addCallback(lambda ign:
1192 self.do_cli("deep-check",
1193 "--verbose", "--verify", "--repair",
1195 def _check6((rc, out, err)):
1196 self.failUnlessEqual(err, "")
1197 self.failUnlessEqual(rc, 0)
1198 lines = out.splitlines()
1199 self.failUnless("<root>: healthy" in lines, out)
1200 self.failUnless("small: healthy" in lines, out)
1201 self.failUnless("mutable: not healthy" in lines, out)
1202 self.failUnless(self._corrupt_share_line in lines, out)
1203 self.failUnless("good: not healthy" in lines, out)
1204 self.failUnless("done: 4 objects checked" in lines, out)
1205 self.failUnless(" pre-repair: 2 healthy, 2 unhealthy" in lines, out)
1206 self.failUnless(" 2 repairs attempted, 2 successful, 0 failed"
1208 self.failUnless(" post-repair: 4 healthy, 0 unhealthy" in lines,out)
1209 d.addCallback(_check6)