4 from twisted.trial import unittest
5 from cStringIO import StringIO
10 from allmydata.util import fileutil, hashutil, base32
11 from allmydata import uri
12 from allmydata.immutable import upload
14 # Test that the scripts can be imported -- although the actual tests of their functionality are
15 # done by invoking them in a subprocess.
16 from allmydata.scripts import tahoe_ls, tahoe_get, tahoe_put, tahoe_rm, tahoe_cp
17 _hush_pyflakes = [tahoe_ls, tahoe_get, tahoe_put, tahoe_rm, tahoe_cp]
19 from allmydata.scripts import common
20 from allmydata.scripts.common import DEFAULT_ALIAS, get_aliases, get_alias, \
23 from allmydata.scripts import cli, debug, runner, backupdb
24 from allmydata.test.common_util import StallMixin
25 from allmydata.test.no_network import GridTestMixin
26 from twisted.internet import threads # CLI tests use deferToThread
27 from twisted.python import usage
29 timeout = 480 # deep_check takes 360s on Zandr's linksys box, others take > 240s
31 class CLI(unittest.TestCase):
32 # this test case only looks at argument-processing and simple stuff.
33 def test_options(self):
34 fileutil.rm_dir("cli/test_options")
35 fileutil.make_dirs("cli/test_options")
36 fileutil.make_dirs("cli/test_options/private")
37 open("cli/test_options/node.url","w").write("http://localhost:8080/\n")
38 filenode_uri = uri.WriteableSSKFileURI(writekey="\x00"*16,
39 fingerprint="\x00"*32)
40 private_uri = uri.DirectoryURI(filenode_uri).to_string()
41 open("cli/test_options/private/root_dir.cap", "w").write(private_uri + "\n")
43 o.parseOptions(["--node-directory", "cli/test_options"])
44 self.failUnlessEqual(o['node-url'], "http://localhost:8080/")
45 self.failUnlessEqual(o.aliases[DEFAULT_ALIAS], private_uri)
46 self.failUnlessEqual(o.where, "")
49 o.parseOptions(["--node-directory", "cli/test_options",
50 "--node-url", "http://example.org:8111/"])
51 self.failUnlessEqual(o['node-url'], "http://example.org:8111/")
52 self.failUnlessEqual(o.aliases[DEFAULT_ALIAS], private_uri)
53 self.failUnlessEqual(o.where, "")
56 o.parseOptions(["--node-directory", "cli/test_options",
58 self.failUnlessEqual(o['node-url'], "http://localhost:8080/")
59 self.failUnlessEqual(o.aliases[DEFAULT_ALIAS], "root")
60 self.failUnlessEqual(o.where, "")
63 other_filenode_uri = uri.WriteableSSKFileURI(writekey="\x11"*16,
64 fingerprint="\x11"*32)
65 other_uri = uri.DirectoryURI(other_filenode_uri).to_string()
66 o.parseOptions(["--node-directory", "cli/test_options",
67 "--dir-cap", other_uri])
68 self.failUnlessEqual(o['node-url'], "http://localhost:8080/")
69 self.failUnlessEqual(o.aliases[DEFAULT_ALIAS], other_uri)
70 self.failUnlessEqual(o.where, "")
73 o.parseOptions(["--node-directory", "cli/test_options",
74 "--dir-cap", other_uri, "subdir"])
75 self.failUnlessEqual(o['node-url'], "http://localhost:8080/")
76 self.failUnlessEqual(o.aliases[DEFAULT_ALIAS], other_uri)
77 self.failUnlessEqual(o.where, "subdir")
80 self.failUnlessRaises(usage.UsageError,
82 ["--node-directory", "cli/test_options",
83 "--node-url", "NOT-A-URL"])
86 o.parseOptions(["--node-directory", "cli/test_options",
87 "--node-url", "http://localhost:8080"])
88 self.failUnlessEqual(o["node-url"], "http://localhost:8080/")
90 def _dump_cap(self, *args):
91 config = debug.DumpCapOptions()
92 config.stdout,config.stderr = StringIO(), StringIO()
93 config.parseOptions(args)
94 debug.dump_cap(config)
95 self.failIf(config.stderr.getvalue())
96 output = config.stdout.getvalue()
99 def test_dump_cap_chk(self):
100 key = "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f"
101 uri_extension_hash = hashutil.uri_extension_hash("stuff")
105 u = uri.CHKFileURI(key=key,
106 uri_extension_hash=uri_extension_hash,
107 needed_shares=needed_shares,
108 total_shares=total_shares,
110 output = self._dump_cap(u.to_string())
111 self.failUnless("CHK File:" in output, output)
112 self.failUnless("key: aaaqeayeaudaocajbifqydiob4" in output, output)
113 self.failUnless("UEB hash: nf3nimquen7aeqm36ekgxomalstenpkvsdmf6fplj7swdatbv5oa" in output, output)
114 self.failUnless("size: 1234" in output, output)
115 self.failUnless("k/N: 25/100" in output, output)
116 self.failUnless("storage index: hdis5iaveku6lnlaiccydyid7q" in output, output)
118 output = self._dump_cap("--client-secret", "5s33nk3qpvnj2fw3z4mnm2y6fa",
120 self.failUnless("client renewal secret: znxmki5zdibb5qlt46xbdvk2t55j7hibejq3i5ijyurkr6m6jkhq" in output, output)
122 output = self._dump_cap(u.get_verify_cap().to_string())
123 self.failIf("key: " in output, output)
124 self.failUnless("UEB hash: nf3nimquen7aeqm36ekgxomalstenpkvsdmf6fplj7swdatbv5oa" in output, output)
125 self.failUnless("size: 1234" in output, output)
126 self.failUnless("k/N: 25/100" in output, output)
127 self.failUnless("storage index: hdis5iaveku6lnlaiccydyid7q" in output, output)
129 prefixed_u = "http://127.0.0.1/uri/%s" % urllib.quote(u.to_string())
130 output = self._dump_cap(prefixed_u)
131 self.failUnless("CHK File:" in output, output)
132 self.failUnless("key: aaaqeayeaudaocajbifqydiob4" in output, output)
133 self.failUnless("UEB hash: nf3nimquen7aeqm36ekgxomalstenpkvsdmf6fplj7swdatbv5oa" in output, output)
134 self.failUnless("size: 1234" in output, output)
135 self.failUnless("k/N: 25/100" in output, output)
136 self.failUnless("storage index: hdis5iaveku6lnlaiccydyid7q" in output, output)
138 def test_dump_cap_lit(self):
139 u = uri.LiteralFileURI("this is some data")
140 output = self._dump_cap(u.to_string())
141 self.failUnless("Literal File URI:" in output, output)
142 self.failUnless("data: this is some data" in output, output)
144 def test_dump_cap_ssk(self):
145 writekey = "\x01" * 16
146 fingerprint = "\xfe" * 32
147 u = uri.WriteableSSKFileURI(writekey, fingerprint)
149 output = self._dump_cap(u.to_string())
150 self.failUnless("SSK Writeable URI:" in output, output)
151 self.failUnless("writekey: aeaqcaibaeaqcaibaeaqcaibae" in output, output)
152 self.failUnless("readkey: nvgh5vj2ekzzkim5fgtb4gey5y" in output, output)
153 self.failUnless("storage index: nt4fwemuw7flestsezvo2eveke" in output, output)
154 self.failUnless("fingerprint: 737p57x6737p57x6737p57x6737p57x6737p57x6737p57x6737a" in output, output)
156 output = self._dump_cap("--client-secret", "5s33nk3qpvnj2fw3z4mnm2y6fa",
158 self.failUnless("file renewal secret: arpszxzc2t6kb4okkg7sp765xgkni5z7caavj7lta73vmtymjlxq" in output, output)
160 fileutil.make_dirs("cli/test_dump_cap/private")
161 f = open("cli/test_dump_cap/private/secret", "w")
162 f.write("5s33nk3qpvnj2fw3z4mnm2y6fa\n")
164 output = self._dump_cap("--client-dir", "cli/test_dump_cap",
166 self.failUnless("file renewal secret: arpszxzc2t6kb4okkg7sp765xgkni5z7caavj7lta73vmtymjlxq" in output, output)
168 output = self._dump_cap("--client-dir", "cli/test_dump_cap_BOGUS",
170 self.failIf("file renewal secret:" in output, output)
172 output = self._dump_cap("--nodeid", "tqc35esocrvejvg4mablt6aowg6tl43j",
174 self.failUnless("write_enabler: mgcavriox2wlb5eer26unwy5cw56elh3sjweffckkmivvsxtaknq" in output, output)
175 self.failIf("file renewal secret:" in output, output)
177 output = self._dump_cap("--nodeid", "tqc35esocrvejvg4mablt6aowg6tl43j",
178 "--client-secret", "5s33nk3qpvnj2fw3z4mnm2y6fa",
180 self.failUnless("write_enabler: mgcavriox2wlb5eer26unwy5cw56elh3sjweffckkmivvsxtaknq" in output, output)
181 self.failUnless("file renewal secret: arpszxzc2t6kb4okkg7sp765xgkni5z7caavj7lta73vmtymjlxq" in output, output)
182 self.failUnless("lease renewal secret: 7pjtaumrb7znzkkbvekkmuwpqfjyfyamznfz4bwwvmh4nw33lorq" in output, output)
185 output = self._dump_cap(u.to_string())
186 self.failUnless("SSK Read-only URI:" in output, output)
187 self.failUnless("readkey: nvgh5vj2ekzzkim5fgtb4gey5y" in output, output)
188 self.failUnless("storage index: nt4fwemuw7flestsezvo2eveke" in output, output)
189 self.failUnless("fingerprint: 737p57x6737p57x6737p57x6737p57x6737p57x6737p57x6737a" in output, output)
191 u = u.get_verify_cap()
192 output = self._dump_cap(u.to_string())
193 self.failUnless("SSK Verifier URI:" in output, output)
194 self.failUnless("storage index: nt4fwemuw7flestsezvo2eveke" in output, output)
195 self.failUnless("fingerprint: 737p57x6737p57x6737p57x6737p57x6737p57x6737p57x6737a" in output, output)
197 def test_dump_cap_directory(self):
198 writekey = "\x01" * 16
199 fingerprint = "\xfe" * 32
200 u1 = uri.WriteableSSKFileURI(writekey, fingerprint)
201 u = uri.DirectoryURI(u1)
203 output = self._dump_cap(u.to_string())
204 self.failUnless("Directory Writeable URI:" in output, output)
205 self.failUnless("writekey: aeaqcaibaeaqcaibaeaqcaibae" in output,
207 self.failUnless("readkey: nvgh5vj2ekzzkim5fgtb4gey5y" in output, output)
208 self.failUnless("storage index: nt4fwemuw7flestsezvo2eveke" in output,
210 self.failUnless("fingerprint: 737p57x6737p57x6737p57x6737p57x6737p57x6737p57x6737a" in output, output)
212 output = self._dump_cap("--client-secret", "5s33nk3qpvnj2fw3z4mnm2y6fa",
214 self.failUnless("file renewal secret: arpszxzc2t6kb4okkg7sp765xgkni5z7caavj7lta73vmtymjlxq" in output, output)
216 output = self._dump_cap("--nodeid", "tqc35esocrvejvg4mablt6aowg6tl43j",
218 self.failUnless("write_enabler: mgcavriox2wlb5eer26unwy5cw56elh3sjweffckkmivvsxtaknq" in output, output)
219 self.failIf("file renewal secret:" in output, output)
221 output = self._dump_cap("--nodeid", "tqc35esocrvejvg4mablt6aowg6tl43j",
222 "--client-secret", "5s33nk3qpvnj2fw3z4mnm2y6fa",
224 self.failUnless("write_enabler: mgcavriox2wlb5eer26unwy5cw56elh3sjweffckkmivvsxtaknq" in output, output)
225 self.failUnless("file renewal secret: arpszxzc2t6kb4okkg7sp765xgkni5z7caavj7lta73vmtymjlxq" in output, output)
226 self.failUnless("lease renewal secret: 7pjtaumrb7znzkkbvekkmuwpqfjyfyamznfz4bwwvmh4nw33lorq" in output, output)
229 output = self._dump_cap(u.to_string())
230 self.failUnless("Directory Read-only URI:" in output, output)
231 self.failUnless("readkey: nvgh5vj2ekzzkim5fgtb4gey5y" in output, output)
232 self.failUnless("storage index: nt4fwemuw7flestsezvo2eveke" in output, output)
233 self.failUnless("fingerprint: 737p57x6737p57x6737p57x6737p57x6737p57x6737p57x6737a" in output, output)
235 u = u.get_verify_cap()
236 output = self._dump_cap(u.to_string())
237 self.failUnless("Directory Verifier URI:" in output, output)
238 self.failUnless("storage index: nt4fwemuw7flestsezvo2eveke" in output, output)
239 self.failUnless("fingerprint: 737p57x6737p57x6737p57x6737p57x6737p57x6737p57x6737a" in output, output)
241 def _catalog_shares(self, *basedirs):
242 o = debug.CatalogSharesOptions()
243 o.stdout,o.stderr = StringIO(), StringIO()
244 args = list(basedirs)
246 debug.catalog_shares(o)
247 out = o.stdout.getvalue()
248 err = o.stderr.getvalue()
251 def test_catalog_shares_error(self):
252 nodedir1 = "cli/test_catalog_shares/node1"
253 sharedir = os.path.join(nodedir1, "storage", "shares", "mq", "mqfblse6m5a6dh45isu2cg7oji")
254 fileutil.make_dirs(sharedir)
255 f = open(os.path.join(sharedir, "8"), "wb")
256 open("cli/test_catalog_shares/node1/storage/shares/mq/not-a-dir", "wb").close()
257 # write a bogus share that looks a little bit like CHK
258 f.write("\x00\x00\x00\x01" + "\xff" * 200) # this triggers an assert
261 nodedir2 = "cli/test_catalog_shares/node2"
262 fileutil.make_dirs(nodedir2)
263 open("cli/test_catalog_shares/node1/storage/shares/not-a-dir", "wb").close()
265 # now make sure that the 'catalog-shares' commands survives the error
266 out, err = self._catalog_shares(nodedir1, nodedir2)
267 self.failUnlessEqual(out, "", out)
268 self.failUnless("Error processing " in err,
269 "didn't see 'error processing' in '%s'" % err)
270 #self.failUnless(nodedir1 in err,
271 # "didn't see '%s' in '%s'" % (nodedir1, err))
272 # windows mangles the path, and os.path.join isn't enough to make
273 # up for it, so just look for individual strings
274 self.failUnless("node1" in err,
275 "didn't see 'node1' in '%s'" % err)
276 self.failUnless("mqfblse6m5a6dh45isu2cg7oji" in err,
277 "didn't see 'mqfblse6m5a6dh45isu2cg7oji' in '%s'" % err)
279 def test_alias(self):
280 aliases = {"tahoe": "TA",
284 return get_alias(aliases, path, "tahoe")
285 uses_lettercolon = common.platform_uses_lettercolon_drivename()
286 self.failUnlessEqual(ga1("bare"), ("TA", "bare"))
287 self.failUnlessEqual(ga1("baredir/file"), ("TA", "baredir/file"))
288 self.failUnlessEqual(ga1("baredir/file:7"), ("TA", "baredir/file:7"))
289 self.failUnlessEqual(ga1("tahoe:"), ("TA", ""))
290 self.failUnlessEqual(ga1("tahoe:file"), ("TA", "file"))
291 self.failUnlessEqual(ga1("tahoe:dir/file"), ("TA", "dir/file"))
292 self.failUnlessEqual(ga1("work:"), ("WA", ""))
293 self.failUnlessEqual(ga1("work:file"), ("WA", "file"))
294 self.failUnlessEqual(ga1("work:dir/file"), ("WA", "dir/file"))
295 # default != None means we really expect a tahoe path, regardless of
296 # whether we're on windows or not. This is what 'tahoe get' uses.
297 self.failUnlessEqual(ga1("c:"), ("CA", ""))
298 self.failUnlessEqual(ga1("c:file"), ("CA", "file"))
299 self.failUnlessEqual(ga1("c:dir/file"), ("CA", "dir/file"))
300 self.failUnlessEqual(ga1("URI:stuff"), ("URI:stuff", ""))
301 self.failUnlessEqual(ga1("URI:stuff:./file"), ("URI:stuff", "file"))
302 self.failUnlessEqual(ga1("URI:stuff:./dir/file"),
303 ("URI:stuff", "dir/file"))
304 self.failUnlessRaises(common.UnknownAliasError, ga1, "missing:")
305 self.failUnlessRaises(common.UnknownAliasError, ga1, "missing:dir")
306 self.failUnlessRaises(common.UnknownAliasError, ga1, "missing:dir/file")
309 return get_alias(aliases, path, None)
310 self.failUnlessEqual(ga2("bare"), (DefaultAliasMarker, "bare"))
311 self.failUnlessEqual(ga2("baredir/file"),
312 (DefaultAliasMarker, "baredir/file"))
313 self.failUnlessEqual(ga2("baredir/file:7"),
314 (DefaultAliasMarker, "baredir/file:7"))
315 self.failUnlessEqual(ga2("baredir/sub:1/file:7"),
316 (DefaultAliasMarker, "baredir/sub:1/file:7"))
317 self.failUnlessEqual(ga2("tahoe:"), ("TA", ""))
318 self.failUnlessEqual(ga2("tahoe:file"), ("TA", "file"))
319 self.failUnlessEqual(ga2("tahoe:dir/file"), ("TA", "dir/file"))
320 # on windows, we really want c:foo to indicate a local file.
321 # default==None is what 'tahoe cp' uses.
323 self.failUnlessEqual(ga2("c:"), (DefaultAliasMarker, "c:"))
324 self.failUnlessEqual(ga2("c:file"), (DefaultAliasMarker, "c:file"))
325 self.failUnlessEqual(ga2("c:dir/file"),
326 (DefaultAliasMarker, "c:dir/file"))
328 self.failUnlessEqual(ga2("c:"), ("CA", ""))
329 self.failUnlessEqual(ga2("c:file"), ("CA", "file"))
330 self.failUnlessEqual(ga2("c:dir/file"), ("CA", "dir/file"))
331 self.failUnlessEqual(ga2("work:"), ("WA", ""))
332 self.failUnlessEqual(ga2("work:file"), ("WA", "file"))
333 self.failUnlessEqual(ga2("work:dir/file"), ("WA", "dir/file"))
334 self.failUnlessEqual(ga2("URI:stuff"), ("URI:stuff", ""))
335 self.failUnlessEqual(ga2("URI:stuff:./file"), ("URI:stuff", "file"))
336 self.failUnlessEqual(ga2("URI:stuff:./dir/file"), ("URI:stuff", "dir/file"))
337 self.failUnlessRaises(common.UnknownAliasError, ga2, "missing:")
338 self.failUnlessRaises(common.UnknownAliasError, ga2, "missing:dir")
339 self.failUnlessRaises(common.UnknownAliasError, ga2, "missing:dir/file")
342 old = common.pretend_platform_uses_lettercolon
344 common.pretend_platform_uses_lettercolon = True
345 retval = get_alias(aliases, path, None)
347 common.pretend_platform_uses_lettercolon = old
349 self.failUnlessEqual(ga3("bare"), (DefaultAliasMarker, "bare"))
350 self.failUnlessEqual(ga3("baredir/file"),
351 (DefaultAliasMarker, "baredir/file"))
352 self.failUnlessEqual(ga3("baredir/file:7"),
353 (DefaultAliasMarker, "baredir/file:7"))
354 self.failUnlessEqual(ga3("baredir/sub:1/file:7"),
355 (DefaultAliasMarker, "baredir/sub:1/file:7"))
356 self.failUnlessEqual(ga3("tahoe:"), ("TA", ""))
357 self.failUnlessEqual(ga3("tahoe:file"), ("TA", "file"))
358 self.failUnlessEqual(ga3("tahoe:dir/file"), ("TA", "dir/file"))
359 self.failUnlessEqual(ga3("c:"), (DefaultAliasMarker, "c:"))
360 self.failUnlessEqual(ga3("c:file"), (DefaultAliasMarker, "c:file"))
361 self.failUnlessEqual(ga3("c:dir/file"),
362 (DefaultAliasMarker, "c:dir/file"))
363 self.failUnlessEqual(ga3("work:"), ("WA", ""))
364 self.failUnlessEqual(ga3("work:file"), ("WA", "file"))
365 self.failUnlessEqual(ga3("work:dir/file"), ("WA", "dir/file"))
366 self.failUnlessEqual(ga3("URI:stuff"), ("URI:stuff", ""))
367 self.failUnlessEqual(ga3("URI:stuff:./file"), ("URI:stuff", "file"))
368 self.failUnlessEqual(ga3("URI:stuff:./dir/file"), ("URI:stuff", "dir/file"))
369 self.failUnlessRaises(common.UnknownAliasError, ga3, "missing:")
370 self.failUnlessRaises(common.UnknownAliasError, ga3, "missing:dir")
371 self.failUnlessRaises(common.UnknownAliasError, ga3, "missing:dir/file")
374 class Help(unittest.TestCase):
377 help = str(cli.GetOptions())
378 self.failUnless("get REMOTE_FILE LOCAL_FILE" in help, help)
379 self.failUnless("% tahoe get FOO |less" in help, help)
382 help = str(cli.PutOptions())
383 self.failUnless("put LOCAL_FILE REMOTE_FILE" in help, help)
384 self.failUnless("% cat FILE | tahoe put" in help, help)
387 help = str(cli.RmOptions())
388 self.failUnless("rm REMOTE_FILE" in help, help)
391 help = str(cli.MvOptions())
392 self.failUnless("mv FROM TO" in help, help)
393 self.failUnless("Use 'tahoe mv' to move files" in help)
396 help = str(cli.LnOptions())
397 self.failUnless("ln FROM TO" in help, help)
399 def test_backup(self):
400 help = str(cli.BackupOptions())
401 self.failUnless("backup FROM ALIAS:TO" in help, help)
403 def test_webopen(self):
404 help = str(cli.WebopenOptions())
405 self.failUnless("webopen [ALIAS:PATH]" in help, help)
407 def test_manifest(self):
408 help = str(cli.ManifestOptions())
409 self.failUnless("manifest [ALIAS:PATH]" in help, help)
411 def test_stats(self):
412 help = str(cli.StatsOptions())
413 self.failUnless("stats [ALIAS:PATH]" in help, help)
415 def test_check(self):
416 help = str(cli.CheckOptions())
417 self.failUnless("check [ALIAS:PATH]" in help, help)
419 def test_deep_check(self):
420 help = str(cli.DeepCheckOptions())
421 self.failUnless("deep-check [ALIAS:PATH]" in help, help)
423 def test_create_alias(self):
424 help = str(cli.CreateAliasOptions())
425 self.failUnless("create-alias ALIAS" in help, help)
427 def test_add_aliases(self):
428 help = str(cli.AddAliasOptions())
429 self.failUnless("add-alias ALIAS DIRCAP" in help, help)
432 def do_cli(self, verb, *args, **kwargs):
434 "--node-directory", self.get_clientdir(),
436 argv = [verb] + nodeargs + list(args)
437 stdin = kwargs.get("stdin", "")
438 stdout, stderr = StringIO(), StringIO()
439 d = threads.deferToThread(runner.runner, argv, run_by_human=False,
440 stdin=StringIO(stdin),
441 stdout=stdout, stderr=stderr)
443 return rc, stdout.getvalue(), stderr.getvalue()
447 class CreateAlias(GridTestMixin, CLITestMixin, unittest.TestCase):
449 def _test_webopen(self, args, expected_url):
450 woo = cli.WebopenOptions()
451 all_args = ["--node-directory", self.get_clientdir()] + list(args)
452 woo.parseOptions(all_args)
454 rc = cli.webopen(woo, urls.append)
455 self.failUnlessEqual(rc, 0)
456 self.failUnlessEqual(len(urls), 1)
457 self.failUnlessEqual(urls[0], expected_url)
459 def test_create(self):
460 self.basedir = "cli/CreateAlias/create"
462 aliasfile = os.path.join(self.get_clientdir(), "private", "aliases")
464 d = self.do_cli("create-alias", "tahoe")
465 def _done((rc,stdout,stderr)):
466 self.failUnless("Alias 'tahoe' created" in stdout)
468 aliases = get_aliases(self.get_clientdir())
469 self.failUnless("tahoe" in aliases)
470 self.failUnless(aliases["tahoe"].startswith("URI:DIR2:"))
472 d.addCallback(lambda res: self.do_cli("create-alias", "two"))
474 def _stash_urls(res):
475 aliases = get_aliases(self.get_clientdir())
476 node_url_file = os.path.join(self.get_clientdir(), "node.url")
477 nodeurl = open(node_url_file, "r").read().strip()
478 self.welcome_url = nodeurl
479 uribase = nodeurl + "uri/"
480 self.tahoe_url = uribase + urllib.quote(aliases["tahoe"])
481 self.tahoe_subdir_url = self.tahoe_url + "/subdir"
482 self.two_url = uribase + urllib.quote(aliases["two"])
483 self.two_uri = aliases["two"]
484 d.addCallback(_stash_urls)
486 d.addCallback(lambda res: self.do_cli("create-alias", "two")) # dup
487 def _check_create_duplicate((rc,stdout,stderr)):
488 self.failIfEqual(rc, 0)
489 self.failUnless("Alias 'two' already exists!" in stderr)
490 aliases = get_aliases(self.get_clientdir())
491 self.failUnlessEqual(aliases["two"], self.two_uri)
492 d.addCallback(_check_create_duplicate)
494 d.addCallback(lambda res: self.do_cli("add-alias", "added", self.two_uri))
495 def _check_add((rc,stdout,stderr)):
496 self.failUnlessEqual(rc, 0)
497 self.failUnless("Alias 'added' added" in stdout)
498 d.addCallback(_check_add)
500 # check add-alias with a duplicate
501 d.addCallback(lambda res: self.do_cli("add-alias", "two", self.two_uri))
502 def _check_add_duplicate((rc,stdout,stderr)):
503 self.failIfEqual(rc, 0)
504 self.failUnless("Alias 'two' already exists!" in stderr)
505 aliases = get_aliases(self.get_clientdir())
506 self.failUnlessEqual(aliases["two"], self.two_uri)
507 d.addCallback(_check_add_duplicate)
509 def _test_urls(junk):
510 self._test_webopen([], self.welcome_url)
511 self._test_webopen(["/"], self.tahoe_url)
512 self._test_webopen(["tahoe:"], self.tahoe_url)
513 self._test_webopen(["tahoe:/"], self.tahoe_url)
514 self._test_webopen(["tahoe:subdir"], self.tahoe_subdir_url)
515 self._test_webopen(["tahoe:subdir/"], self.tahoe_subdir_url + '/')
516 self._test_webopen(["tahoe:subdir/file"], self.tahoe_subdir_url + '/file')
517 # if "file" is indeed a file, then the url produced by webopen in
518 # this case is disallowed by the webui. but by design, webopen
519 # passes through the mistake from the user to the resultant
521 self._test_webopen(["tahoe:subdir/file/"], self.tahoe_subdir_url + '/file/')
522 self._test_webopen(["two:"], self.two_url)
523 d.addCallback(_test_urls)
525 def _remove_trailing_newline_and_create_alias(ign):
526 f = open(aliasfile, "r")
529 # ticket #741 is about a manually-edited alias file (which
530 # doesn't end in a newline) being corrupted by a subsequent
531 # "tahoe create-alias"
532 f = open(aliasfile, "w")
533 f.write(old.rstrip())
535 return self.do_cli("create-alias", "un-corrupted1")
536 d.addCallback(_remove_trailing_newline_and_create_alias)
537 def _check_not_corrupted1((rc,stdout,stderr)):
538 self.failUnless("Alias 'un-corrupted1' created" in stdout, stdout)
540 # the old behavior was to simply append the new record, causing a
541 # line that looked like "NAME1: CAP1NAME2: CAP2". This won't look
542 # like a valid dircap, so get_aliases() will raise an exception.
543 aliases = get_aliases(self.get_clientdir())
544 self.failUnless("added" in aliases)
545 self.failUnless(aliases["added"].startswith("URI:DIR2:"))
546 # to be safe, let's confirm that we don't see "NAME2:" in CAP1.
547 # No chance of a false-negative, because the hyphen in
548 # "un-corrupted1" is not a valid base32 character.
549 self.failIfIn("un-corrupted1:", aliases["added"])
550 self.failUnless("un-corrupted1" in aliases)
551 self.failUnless(aliases["un-corrupted1"].startswith("URI:DIR2:"))
552 d.addCallback(_check_not_corrupted1)
554 def _remove_trailing_newline_and_add_alias(ign):
555 # same thing, but for "tahoe add-alias"
556 f = open(aliasfile, "r")
559 f = open(aliasfile, "w")
560 f.write(old.rstrip())
562 return self.do_cli("add-alias", "un-corrupted2", self.two_uri)
563 d.addCallback(_remove_trailing_newline_and_add_alias)
564 def _check_not_corrupted((rc,stdout,stderr)):
565 self.failUnless("Alias 'un-corrupted2' added" in stdout, stdout)
567 aliases = get_aliases(self.get_clientdir())
568 self.failUnless("un-corrupted1" in aliases)
569 self.failUnless(aliases["un-corrupted1"].startswith("URI:DIR2:"))
570 self.failIfIn("un-corrupted2:", aliases["un-corrupted1"])
571 self.failUnless("un-corrupted2" in aliases)
572 self.failUnless(aliases["un-corrupted2"].startswith("URI:DIR2:"))
573 d.addCallback(_check_not_corrupted)
577 class Put(GridTestMixin, CLITestMixin, unittest.TestCase):
579 def test_unlinked_immutable_stdin(self):
580 # tahoe get `echo DATA | tahoe put`
581 # tahoe get `echo DATA | tahoe put -`
582 self.basedir = "cli/Put/unlinked_immutable_stdin"
585 d = self.do_cli("put", stdin=DATA)
587 (rc, stdout, stderr) = res
588 self.failUnless("waiting for file data on stdin.." in stderr)
589 self.failUnless("200 OK" in stderr, stderr)
590 self.readcap = stdout
591 self.failUnless(self.readcap.startswith("URI:CHK:"))
592 d.addCallback(_uploaded)
593 d.addCallback(lambda res: self.do_cli("get", self.readcap))
594 def _downloaded(res):
595 (rc, stdout, stderr) = res
596 self.failUnlessEqual(stderr, "")
597 self.failUnlessEqual(stdout, DATA)
598 d.addCallback(_downloaded)
599 d.addCallback(lambda res: self.do_cli("put", "-", stdin=DATA))
600 d.addCallback(lambda (rc,stdout,stderr):
601 self.failUnlessEqual(stdout, self.readcap))
604 def test_unlinked_immutable_from_file(self):
606 # tahoe put ./file.txt
607 # tahoe put /tmp/file.txt
608 # tahoe put ~/file.txt
609 self.basedir = "cli/Put/unlinked_immutable_from_file"
612 rel_fn = os.path.join(self.basedir, "DATAFILE")
613 abs_fn = os.path.abspath(rel_fn)
614 # we make the file small enough to fit in a LIT file, for speed
615 f = open(rel_fn, "w")
616 f.write("short file")
618 d = self.do_cli("put", rel_fn)
619 def _uploaded((rc,stdout,stderr)):
621 self.failUnless(readcap.startswith("URI:LIT:"))
622 self.readcap = readcap
623 d.addCallback(_uploaded)
624 d.addCallback(lambda res: self.do_cli("put", "./" + rel_fn))
625 d.addCallback(lambda (rc,stdout,stderr):
626 self.failUnlessEqual(stdout, self.readcap))
627 d.addCallback(lambda res: self.do_cli("put", abs_fn))
628 d.addCallback(lambda (rc,stdout,stderr):
629 self.failUnlessEqual(stdout, self.readcap))
630 # we just have to assume that ~ is handled properly
633 def test_immutable_from_file(self):
634 # tahoe put file.txt uploaded.txt
635 # tahoe - uploaded.txt
636 # tahoe put file.txt subdir/uploaded.txt
637 # tahoe put file.txt tahoe:uploaded.txt
638 # tahoe put file.txt tahoe:subdir/uploaded.txt
639 # tahoe put file.txt DIRCAP:./uploaded.txt
640 # tahoe put file.txt DIRCAP:./subdir/uploaded.txt
641 self.basedir = "cli/Put/immutable_from_file"
644 rel_fn = os.path.join(self.basedir, "DATAFILE")
645 # we make the file small enough to fit in a LIT file, for speed
647 DATA2 = "short file two"
648 f = open(rel_fn, "w")
652 d = self.do_cli("create-alias", "tahoe")
654 d.addCallback(lambda res:
655 self.do_cli("put", rel_fn, "uploaded.txt"))
656 def _uploaded((rc,stdout,stderr)):
657 readcap = stdout.strip()
658 self.failUnless(readcap.startswith("URI:LIT:"))
659 self.failUnless("201 Created" in stderr, stderr)
660 self.readcap = readcap
661 d.addCallback(_uploaded)
662 d.addCallback(lambda res:
663 self.do_cli("get", "tahoe:uploaded.txt"))
664 d.addCallback(lambda (rc,stdout,stderr):
665 self.failUnlessEqual(stdout, DATA))
667 d.addCallback(lambda res:
668 self.do_cli("put", "-", "uploaded.txt", stdin=DATA2))
669 def _replaced((rc,stdout,stderr)):
670 readcap = stdout.strip()
671 self.failUnless(readcap.startswith("URI:LIT:"))
672 self.failUnless("200 OK" in stderr, stderr)
673 d.addCallback(_replaced)
675 d.addCallback(lambda res:
676 self.do_cli("put", rel_fn, "subdir/uploaded2.txt"))
677 d.addCallback(lambda res: self.do_cli("get", "subdir/uploaded2.txt"))
678 d.addCallback(lambda (rc,stdout,stderr):
679 self.failUnlessEqual(stdout, DATA))
681 d.addCallback(lambda res:
682 self.do_cli("put", rel_fn, "tahoe:uploaded3.txt"))
683 d.addCallback(lambda res: self.do_cli("get", "tahoe:uploaded3.txt"))
684 d.addCallback(lambda (rc,stdout,stderr):
685 self.failUnlessEqual(stdout, DATA))
687 d.addCallback(lambda res:
688 self.do_cli("put", rel_fn, "tahoe:subdir/uploaded4.txt"))
689 d.addCallback(lambda res:
690 self.do_cli("get", "tahoe:subdir/uploaded4.txt"))
691 d.addCallback(lambda (rc,stdout,stderr):
692 self.failUnlessEqual(stdout, DATA))
694 def _get_dircap(res):
695 self.dircap = get_aliases(self.get_clientdir())["tahoe"]
696 d.addCallback(_get_dircap)
698 d.addCallback(lambda res:
699 self.do_cli("put", rel_fn,
700 self.dircap+":./uploaded5.txt"))
701 d.addCallback(lambda res:
702 self.do_cli("get", "tahoe:uploaded5.txt"))
703 d.addCallback(lambda (rc,stdout,stderr):
704 self.failUnlessEqual(stdout, DATA))
706 d.addCallback(lambda res:
707 self.do_cli("put", rel_fn,
708 self.dircap+":./subdir/uploaded6.txt"))
709 d.addCallback(lambda res:
710 self.do_cli("get", "tahoe:subdir/uploaded6.txt"))
711 d.addCallback(lambda (rc,stdout,stderr):
712 self.failUnlessEqual(stdout, DATA))
716 def test_mutable_unlinked(self):
717 # FILECAP = `echo DATA | tahoe put --mutable`
718 # tahoe get FILECAP, compare against DATA
719 # echo DATA2 | tahoe put - FILECAP
720 # tahoe get FILECAP, compare against DATA2
721 # tahoe put file.txt FILECAP
722 self.basedir = "cli/Put/mutable_unlinked"
727 rel_fn = os.path.join(self.basedir, "DATAFILE")
728 DATA3 = "three" * 100
729 f = open(rel_fn, "w")
733 d = self.do_cli("put", "--mutable", stdin=DATA)
735 (rc, stdout, stderr) = res
736 self.failUnless("waiting for file data on stdin.." in stderr)
737 self.failUnless("200 OK" in stderr)
738 self.filecap = stdout
739 self.failUnless(self.filecap.startswith("URI:SSK:"))
740 d.addCallback(_created)
741 d.addCallback(lambda res: self.do_cli("get", self.filecap))
742 d.addCallback(lambda (rc,out,err): self.failUnlessEqual(out, DATA))
744 d.addCallback(lambda res: self.do_cli("put", "-", self.filecap, stdin=DATA2))
746 (rc, stdout, stderr) = res
747 self.failUnless("waiting for file data on stdin.." in stderr)
748 self.failUnless("200 OK" in stderr)
749 self.failUnlessEqual(self.filecap, stdout)
750 d.addCallback(_replaced)
751 d.addCallback(lambda res: self.do_cli("get", self.filecap))
752 d.addCallback(lambda (rc,out,err): self.failUnlessEqual(out, DATA2))
754 d.addCallback(lambda res: self.do_cli("put", rel_fn, self.filecap))
756 (rc, stdout, stderr) = res
757 self.failUnless("200 OK" in stderr)
758 self.failUnlessEqual(self.filecap, stdout)
759 d.addCallback(_replaced2)
760 d.addCallback(lambda res: self.do_cli("get", self.filecap))
761 d.addCallback(lambda (rc,out,err): self.failUnlessEqual(out, DATA3))
765 def test_mutable(self):
766 # echo DATA1 | tahoe put --mutable - uploaded.txt
767 # echo DATA2 | tahoe put - uploaded.txt # should modify-in-place
768 # tahoe get uploaded.txt, compare against DATA2
770 self.basedir = "cli/Put/mutable"
774 fn1 = os.path.join(self.basedir, "DATA1")
779 fn2 = os.path.join(self.basedir, "DATA2")
784 d = self.do_cli("create-alias", "tahoe")
785 d.addCallback(lambda res:
786 self.do_cli("put", "--mutable", fn1, "tahoe:uploaded.txt"))
787 d.addCallback(lambda res:
788 self.do_cli("put", fn2, "tahoe:uploaded.txt"))
789 d.addCallback(lambda res:
790 self.do_cli("get", "tahoe:uploaded.txt"))
791 d.addCallback(lambda (rc,out,err): self.failUnlessEqual(out, DATA2))
794 class List(GridTestMixin, CLITestMixin, unittest.TestCase):
796 self.basedir = "cli/List/list"
798 c0 = self.g.clients[0]
800 d = c0.create_dirnode()
801 def _stash_root_and_create_file(n):
803 self.rooturi = n.get_uri()
804 return n.add_file(u"good", upload.Data(small, convergence=""))
805 d.addCallback(_stash_root_and_create_file)
806 def _stash_goodcap(n):
807 self.goodcap = n.get_uri()
808 d.addCallback(_stash_goodcap)
809 d.addCallback(lambda ign: self.rootnode.create_subdirectory(u"1share"))
810 d.addCallback(lambda n:
811 self.delete_shares_numbered(n.get_uri(), range(1,10)))
812 d.addCallback(lambda ign: self.rootnode.create_subdirectory(u"0share"))
813 d.addCallback(lambda n:
814 self.delete_shares_numbered(n.get_uri(), range(0,10)))
815 d.addCallback(lambda ign:
816 self.do_cli("add-alias", "tahoe", self.rooturi))
817 d.addCallback(lambda ign: self.do_cli("ls"))
818 def _check1((rc,out,err)):
819 self.failUnlessEqual(err, "")
820 self.failUnlessEqual(rc, 0)
821 self.failUnlessEqual(out.splitlines(), ["0share", "1share", "good"])
822 d.addCallback(_check1)
823 d.addCallback(lambda ign: self.do_cli("ls", "missing"))
824 def _check2((rc,out,err)):
825 self.failIfEqual(rc, 0)
826 self.failUnlessEqual(err.strip(), "No such file or directory")
827 self.failUnlessEqual(out, "")
828 d.addCallback(_check2)
829 d.addCallback(lambda ign: self.do_cli("ls", "1share"))
830 def _check3((rc,out,err)):
831 self.failIfEqual(rc, 0)
832 self.failUnlessIn("Error during GET: 410 Gone ", err)
833 self.failUnlessIn("UnrecoverableFileError:", err)
834 self.failUnlessIn("could not be retrieved, because there were "
835 "insufficient good shares.", err)
836 self.failUnlessEqual(out, "")
837 d.addCallback(_check3)
838 d.addCallback(lambda ign: self.do_cli("ls", "0share"))
839 d.addCallback(_check3)
840 def _check4((rc, out, err)):
841 # listing a file (as dir/filename) should have the edge metadata,
842 # including the filename
843 self.failUnlessEqual(rc, 0)
844 self.failUnlessIn("good", out)
845 self.failIfIn("-r-- %d -" % len(small), out,
846 "trailing hyphen means unknown date")
847 d.addCallback(lambda ign: self.do_cli("ls", "-l", "good"))
848 d.addCallback(_check4)
849 def _check5((rc, out, err)):
850 # listing a raw filecap should not explode, but it will have no
851 # metadata, just the size
852 self.failUnlessEqual(rc, 0)
853 self.failUnlessEqual("-r-- %d -" % len(small), out.strip())
854 d.addCallback(lambda ign: self.do_cli("ls", "-l", self.goodcap))
855 d.addCallback(_check5)
858 class Mv(GridTestMixin, CLITestMixin, unittest.TestCase):
859 def test_mv_behavior(self):
860 self.basedir = "cli/Mv/mv_behavior"
862 fn1 = os.path.join(self.basedir, "file1")
863 DATA1 = "Nuclear launch codes"
864 open(fn1, "wb").write(DATA1)
865 fn2 = os.path.join(self.basedir, "file2")
866 DATA2 = "UML diagrams"
867 open(fn2, "wb").write(DATA2)
868 # copy both files to the grid
869 d = self.do_cli("create-alias", "tahoe")
870 d.addCallback(lambda res:
871 self.do_cli("cp", fn1, "tahoe:"))
872 d.addCallback(lambda res:
873 self.do_cli("cp", fn2, "tahoe:"))
876 # (we should be able to rename files)
877 d.addCallback(lambda res:
878 self.do_cli("mv", "tahoe:file1", "tahoe:file3"))
879 d.addCallback(lambda (rc, out, err):
880 self.failUnlessIn("OK", out, "mv didn't rename a file"))
883 # (This should succeed without issue)
884 d.addCallback(lambda res:
885 self.do_cli("mv", "tahoe:file3", "tahoe:file2"))
886 # Out should contain "OK" to show that the transfer worked.
887 d.addCallback(lambda (rc,out,err):
888 self.failUnlessIn("OK", out, "mv didn't output OK after mving"))
890 # Next, make a remote directory.
891 d.addCallback(lambda res:
892 self.do_cli("mkdir", "tahoe:directory"))
895 # (should fail with a descriptive error message; the CLI mv
896 # client should support this)
897 d.addCallback(lambda res:
898 self.do_cli("mv", "tahoe:file2", "tahoe:directory"))
899 d.addCallback(lambda (rc, out, err):
901 "Error: You can't overwrite a directory with a file", err,
902 "mv shouldn't overwrite directories" ))
904 # mv file2 directory/
905 # (should succeed by making file2 a child node of directory)
906 d.addCallback(lambda res:
907 self.do_cli("mv", "tahoe:file2", "tahoe:directory/"))
908 # We should see an "OK"...
909 d.addCallback(lambda (rc, out, err):
910 self.failUnlessIn("OK", out,
911 "mv didn't mv a file into a directory"))
912 # ... and be able to GET the file
913 d.addCallback(lambda res:
914 self.do_cli("get", "tahoe:directory/file2", self.basedir + "new"))
915 d.addCallback(lambda (rc, out, err):
916 self.failUnless(os.path.exists(self.basedir + "new"),
917 "mv didn't write the destination file"))
918 # ... and not find the file where it was before.
919 d.addCallback(lambda res:
920 self.do_cli("get", "tahoe:file2", "file2"))
921 d.addCallback(lambda (rc, out, err):
922 self.failUnlessIn("404", err,
923 "mv left the source file intact"))
926 # directory/directory2/some_file
928 d.addCallback(lambda res:
929 self.do_cli("mkdir", "tahoe:directory/directory2"))
930 d.addCallback(lambda res:
931 self.do_cli("cp", fn2, "tahoe:directory/directory2/some_file"))
932 d.addCallback(lambda res:
933 self.do_cli("mkdir", "tahoe:directory3"))
935 # Let's now try to mv directory/directory2/some_file to
936 # directory3/some_file
937 d.addCallback(lambda res:
938 self.do_cli("mv", "tahoe:directory/directory2/some_file",
939 "tahoe:directory3/"))
940 # We should have just some_file in tahoe:directory3
941 d.addCallback(lambda res:
942 self.do_cli("get", "tahoe:directory3/some_file", "some_file"))
943 d.addCallback(lambda (rc, out, err):
944 self.failUnless("404" not in err,
945 "mv didn't handle nested directories correctly"))
946 d.addCallback(lambda res:
947 self.do_cli("get", "tahoe:directory3/directory", "directory"))
948 d.addCallback(lambda (rc, out, err):
949 self.failUnlessIn("404", err,
950 "mv moved the wrong thing"))
953 class Cp(GridTestMixin, CLITestMixin, unittest.TestCase):
955 def test_not_enough_args(self):
957 self.failUnlessRaises(usage.UsageError,
958 o.parseOptions, ["onearg"])
960 def test_unicode_filename(self):
961 self.basedir = "cli/Cp/unicode_filename"
964 fn1 = os.path.join(self.basedir, "Ärtonwall")
965 DATA1 = "unicode file content"
966 open(fn1, "wb").write(DATA1)
968 fn2 = os.path.join(self.basedir, "Metallica")
969 DATA2 = "non-unicode file content"
970 open(fn2, "wb").write(DATA2)
973 # Assure that uploading a file whose name contains unicode character doesn't
974 # prevent further uploads in the same directory
975 d = self.do_cli("create-alias", "tahoe")
976 d.addCallback(lambda res: self.do_cli("cp", fn1, "tahoe:"))
977 d.addCallback(lambda res: self.do_cli("cp", fn2, "tahoe:"))
979 d.addCallback(lambda res: self.do_cli("get", "tahoe:Ärtonwall"))
980 d.addCallback(lambda (rc,out,err): self.failUnlessEqual(out, DATA1))
982 d.addCallback(lambda res: self.do_cli("get", "tahoe:Metallica"))
983 d.addCallback(lambda (rc,out,err): self.failUnlessEqual(out, DATA2))
986 test_unicode_filename.todo = "This behavior is not yet supported, although it does happen to work (for reasons that are ill-understood) on many platforms. See issue ticket #534."
988 def test_dangling_symlink_vs_recursion(self):
989 if not hasattr(os, 'symlink'):
990 raise unittest.SkipTest("There is no symlink on this platform.")
991 # cp -r on a directory containing a dangling symlink shouldn't assert
992 self.basedir = "cli/Cp/dangling_symlink_vs_recursion"
994 dn = os.path.join(self.basedir, "dir")
996 fn = os.path.join(dn, "Fakebandica")
997 ln = os.path.join(dn, "link")
1000 d = self.do_cli("create-alias", "tahoe")
1001 d.addCallback(lambda res: self.do_cli("cp", "--recursive",
1005 def test_copy_using_filecap(self):
1006 self.basedir = "cli/Cp/test_copy_using_filecap"
1008 outdir = os.path.join(self.basedir, "outdir")
1010 self.do_cli("create-alias", "tahoe")
1011 fn1 = os.path.join(self.basedir, "Metallica")
1012 fn2 = os.path.join(outdir, "Not Metallica")
1013 fn3 = os.path.join(outdir, "test2")
1014 DATA1 = "puppies" * 10000
1015 open(fn1, "wb").write(DATA1)
1016 d = self.do_cli("put", fn1)
1017 def _put_file((rc, out, err)):
1018 self.failUnlessEqual(rc, 0)
1019 # keep track of the filecap
1020 self.filecap = out.strip()
1021 d.addCallback(_put_file)
1022 # Let's try copying this to the disk using the filecap
1023 # cp FILECAP filename
1024 d.addCallback(lambda res: self.do_cli("cp", self.filecap, fn2))
1025 def _copy_file((rc, out, err)):
1026 self.failUnlessEqual(rc, 0)
1027 results = open(fn2, "r").read()
1028 self.failUnlessEqual(results, DATA1)
1029 # Test with ./ (see #761)
1030 # cp FILECAP localdir
1031 d.addCallback(lambda res: self.do_cli("cp", self.filecap, outdir))
1032 def _resp((rc, out, err)):
1033 self.failUnlessEqual(rc, 1)
1034 self.failUnlessIn("error: you must specify a destination filename",
1036 d.addCallback(_resp)
1037 # Create a directory, linked at tahoe:test
1038 d.addCallback(lambda res: self.do_cli("mkdir", "tahoe:test"))
1039 def _get_dir((rc, out, err)):
1040 self.failUnlessEqual(rc, 0)
1041 self.dircap = out.strip()
1042 d.addCallback(_get_dir)
1043 # Upload a file to the directory
1044 d.addCallback(lambda res:
1045 self.do_cli("put", fn1, "tahoe:test/test_file"))
1046 d.addCallback(lambda (rc, out, err): self.failUnlessEqual(rc, 0))
1047 # cp DIRCAP/filename localdir
1048 d.addCallback(lambda res:
1049 self.do_cli("cp", self.dircap + "/test_file", outdir))
1050 def _get_resp((rc, out, err)):
1051 self.failUnlessEqual(rc, 0)
1052 results = open(os.path.join(outdir, "test_file"), "r").read()
1053 self.failUnlessEqual(results, DATA1)
1054 d.addCallback(_get_resp)
1055 # cp -r DIRCAP/filename filename2
1056 d.addCallback(lambda res:
1057 self.do_cli("cp", self.dircap + "/test_file", fn3))
1058 def _get_resp2((rc, out, err)):
1059 self.failUnlessEqual(rc, 0)
1060 results = open(fn3, "r").read()
1061 self.failUnlessEqual(results, DATA1)
1062 d.addCallback(_get_resp2)
1065 class Backup(GridTestMixin, CLITestMixin, StallMixin, unittest.TestCase):
1067 def writeto(self, path, data):
1068 d = os.path.dirname(os.path.join(self.basedir, "home", path))
1069 fileutil.make_dirs(d)
1070 f = open(os.path.join(self.basedir, "home", path), "w")
1074 def count_output(self, out):
1075 mo = re.search(r"(\d)+ files uploaded \((\d+) reused\), "
1076 "(\d)+ files skipped, "
1077 "(\d+) directories created \((\d+) reused\), "
1078 "(\d+) directories skipped", out)
1079 return [int(s) for s in mo.groups()]
1081 def count_output2(self, out):
1082 mo = re.search(r"(\d)+ files checked, (\d+) directories checked", out)
1083 return [int(s) for s in mo.groups()]
1085 def test_backup(self):
1086 self.basedir = "cli/Backup/backup"
1089 # is the backupdb available? If so, we test that a second backup does
1090 # not create new directories.
1092 have_bdb = backupdb.get_backupdb(os.path.join(self.basedir, "dbtest"),
1095 # create a small local directory with a couple of files
1096 source = os.path.join(self.basedir, "home")
1097 fileutil.make_dirs(os.path.join(source, "empty"))
1098 self.writeto("parent/subdir/foo.txt", "foo")
1099 self.writeto("parent/subdir/bar.txt", "bar\n" * 1000)
1100 self.writeto("parent/blah.txt", "blah")
1102 def do_backup(verbose=False):
1105 cmd.append("--verbose")
1107 cmd.append("tahoe:backups")
1108 return self.do_cli(*cmd)
1110 d = self.do_cli("create-alias", "tahoe")
1113 d.addCallback(lambda res: self.do_cli("backup", source, "tahoe:backups"))
1114 def _should_complain((rc, out, err)):
1115 self.failUnless("I was unable to import a python sqlite library" in err, err)
1116 d.addCallback(_should_complain)
1117 d.addCallback(self.stall, 1.1) # make sure the backups get distinct timestamps
1119 d.addCallback(lambda res: do_backup())
1120 def _check0((rc, out, err)):
1121 self.failUnlessEqual(err, "")
1122 self.failUnlessEqual(rc, 0)
1123 fu, fr, fs, dc, dr, ds = self.count_output(out)
1124 # foo.txt, bar.txt, blah.txt
1125 self.failUnlessEqual(fu, 3)
1126 self.failUnlessEqual(fr, 0)
1127 self.failUnlessEqual(fs, 0)
1128 # empty, home, home/parent, home/parent/subdir
1129 self.failUnlessEqual(dc, 4)
1130 self.failUnlessEqual(dr, 0)
1131 self.failUnlessEqual(ds, 0)
1132 d.addCallback(_check0)
1134 d.addCallback(lambda res: self.do_cli("ls", "--uri", "tahoe:backups"))
1135 def _check1((rc, out, err)):
1136 self.failUnlessEqual(err, "")
1137 self.failUnlessEqual(rc, 0)
1138 lines = out.split("\n")
1139 children = dict([line.split() for line in lines if line])
1140 latest_uri = children["Latest"]
1141 self.failUnless(latest_uri.startswith("URI:DIR2-CHK:"), latest_uri)
1142 childnames = children.keys()
1143 self.failUnlessEqual(sorted(childnames), ["Archives", "Latest"])
1144 d.addCallback(_check1)
1145 d.addCallback(lambda res: self.do_cli("ls", "tahoe:backups/Latest"))
1146 def _check2((rc, out, err)):
1147 self.failUnlessEqual(err, "")
1148 self.failUnlessEqual(rc, 0)
1149 self.failUnlessEqual(sorted(out.split()), ["empty", "parent"])
1150 d.addCallback(_check2)
1151 d.addCallback(lambda res: self.do_cli("ls", "tahoe:backups/Latest/empty"))
1152 def _check2a((rc, out, err)):
1153 self.failUnlessEqual(err, "")
1154 self.failUnlessEqual(rc, 0)
1155 self.failUnlessEqual(out.strip(), "")
1156 d.addCallback(_check2a)
1157 d.addCallback(lambda res: self.do_cli("get", "tahoe:backups/Latest/parent/subdir/foo.txt"))
1158 def _check3((rc, out, err)):
1159 self.failUnlessEqual(err, "")
1160 self.failUnlessEqual(rc, 0)
1161 self.failUnlessEqual(out, "foo")
1162 d.addCallback(_check3)
1163 d.addCallback(lambda res: self.do_cli("ls", "tahoe:backups/Archives"))
1164 def _check4((rc, out, err)):
1165 self.failUnlessEqual(err, "")
1166 self.failUnlessEqual(rc, 0)
1167 self.old_archives = out.split()
1168 self.failUnlessEqual(len(self.old_archives), 1)
1169 d.addCallback(_check4)
1172 d.addCallback(self.stall, 1.1)
1173 d.addCallback(lambda res: do_backup())
1174 def _check4a((rc, out, err)):
1175 # second backup should reuse everything, if the backupdb is
1177 self.failUnlessEqual(err, "")
1178 self.failUnlessEqual(rc, 0)
1180 fu, fr, fs, dc, dr, ds = self.count_output(out)
1181 # foo.txt, bar.txt, blah.txt
1182 self.failUnlessEqual(fu, 0)
1183 self.failUnlessEqual(fr, 3)
1184 self.failUnlessEqual(fs, 0)
1185 # empty, home, home/parent, home/parent/subdir
1186 self.failUnlessEqual(dc, 0)
1187 self.failUnlessEqual(dr, 4)
1188 self.failUnlessEqual(ds, 0)
1189 d.addCallback(_check4a)
1192 # sneak into the backupdb, crank back the "last checked"
1193 # timestamp to force a check on all files
1194 def _reset_last_checked(res):
1195 dbfile = os.path.join(self.get_clientdir(),
1196 "private", "backupdb.sqlite")
1197 self.failUnless(os.path.exists(dbfile), dbfile)
1198 bdb = backupdb.get_backupdb(dbfile)
1199 bdb.cursor.execute("UPDATE last_upload SET last_checked=0")
1200 bdb.cursor.execute("UPDATE directories SET last_checked=0")
1201 bdb.connection.commit()
1203 d.addCallback(_reset_last_checked)
1205 d.addCallback(self.stall, 1.1)
1206 d.addCallback(lambda res: do_backup(verbose=True))
1207 def _check4b((rc, out, err)):
1208 # we should check all files, and re-use all of them. None of
1209 # the directories should have been changed, so we should
1210 # re-use all of them too.
1211 self.failUnlessEqual(err, "")
1212 self.failUnlessEqual(rc, 0)
1213 fu, fr, fs, dc, dr, ds = self.count_output(out)
1214 fchecked, dchecked = self.count_output2(out)
1215 self.failUnlessEqual(fchecked, 3)
1216 self.failUnlessEqual(fu, 0)
1217 self.failUnlessEqual(fr, 3)
1218 self.failUnlessEqual(fs, 0)
1219 self.failUnlessEqual(dchecked, 4)
1220 self.failUnlessEqual(dc, 0)
1221 self.failUnlessEqual(dr, 4)
1222 self.failUnlessEqual(ds, 0)
1223 d.addCallback(_check4b)
1225 d.addCallback(lambda res: self.do_cli("ls", "tahoe:backups/Archives"))
1226 def _check5((rc, out, err)):
1227 self.failUnlessEqual(err, "")
1228 self.failUnlessEqual(rc, 0)
1229 self.new_archives = out.split()
1233 self.failUnlessEqual(len(self.new_archives), expected_new, out)
1234 # the original backup should still be the oldest (i.e. sorts
1235 # alphabetically towards the beginning)
1236 self.failUnlessEqual(sorted(self.new_archives)[0],
1237 self.old_archives[0])
1238 d.addCallback(_check5)
1240 d.addCallback(self.stall, 1.1)
1242 self.writeto("parent/subdir/foo.txt", "FOOF!")
1243 # and turn a file into a directory
1244 os.unlink(os.path.join(source, "parent/blah.txt"))
1245 os.mkdir(os.path.join(source, "parent/blah.txt"))
1246 self.writeto("parent/blah.txt/surprise file", "surprise")
1247 self.writeto("parent/blah.txt/surprisedir/subfile", "surprise")
1248 # turn a directory into a file
1249 os.rmdir(os.path.join(source, "empty"))
1250 self.writeto("empty", "imagine nothing being here")
1252 d.addCallback(_modify)
1253 def _check5a((rc, out, err)):
1254 # second backup should reuse bar.txt (if backupdb is available),
1255 # and upload the rest. None of the directories can be reused.
1256 self.failUnlessEqual(err, "")
1257 self.failUnlessEqual(rc, 0)
1259 fu, fr, fs, dc, dr, ds = self.count_output(out)
1260 # new foo.txt, surprise file, subfile, empty
1261 self.failUnlessEqual(fu, 4)
1263 self.failUnlessEqual(fr, 1)
1264 self.failUnlessEqual(fs, 0)
1265 # home, parent, subdir, blah.txt, surprisedir
1266 self.failUnlessEqual(dc, 5)
1267 self.failUnlessEqual(dr, 0)
1268 self.failUnlessEqual(ds, 0)
1269 d.addCallback(_check5a)
1270 d.addCallback(lambda res: self.do_cli("ls", "tahoe:backups/Archives"))
1271 def _check6((rc, out, err)):
1272 self.failUnlessEqual(err, "")
1273 self.failUnlessEqual(rc, 0)
1274 self.new_archives = out.split()
1278 self.failUnlessEqual(len(self.new_archives), expected_new)
1279 self.failUnlessEqual(sorted(self.new_archives)[0],
1280 self.old_archives[0])
1281 d.addCallback(_check6)
1282 d.addCallback(lambda res: self.do_cli("get", "tahoe:backups/Latest/parent/subdir/foo.txt"))
1283 def _check7((rc, out, err)):
1284 self.failUnlessEqual(err, "")
1285 self.failUnlessEqual(rc, 0)
1286 self.failUnlessEqual(out, "FOOF!")
1287 # the old snapshot should not be modified
1288 return self.do_cli("get", "tahoe:backups/Archives/%s/parent/subdir/foo.txt" % self.old_archives[0])
1289 d.addCallback(_check7)
1290 def _check8((rc, out, err)):
1291 self.failUnlessEqual(err, "")
1292 self.failUnlessEqual(rc, 0)
1293 self.failUnlessEqual(out, "foo")
1294 d.addCallback(_check8)
1298 # on our old dapper buildslave, this test takes a long time (usually
1299 # 130s), so we have to bump up the default 120s timeout. The create-alias
1300 # and initial backup alone take 60s, probably because of the handful of
1301 # dirnodes being created (RSA key generation). The backup between check4
1302 # and check4a takes 6s, as does the backup before check4b.
1303 test_backup.timeout = 3000
1305 def test_exclude_options(self):
1306 root_listdir = ('lib.a', '_darcs', 'subdir', 'nice_doc.lyx')
1307 subdir_listdir = ('another_doc.lyx', 'run_snake_run.py', 'CVS', '.svn', '_darcs')
1308 basedir = "cli/Backup/exclude_options"
1309 fileutil.make_dirs(basedir)
1310 nodeurl_path = os.path.join(basedir, 'node.url')
1311 nodeurl = file(nodeurl_path, 'w')
1312 nodeurl.write('http://example.net:2357/')
1315 def _check_filtering(filtered, all, included, excluded):
1316 filtered = set(filtered)
1318 included = set(included)
1319 excluded = set(excluded)
1320 self.failUnlessEqual(filtered, included)
1321 self.failUnlessEqual(all.difference(filtered), excluded)
1323 # test simple exclude
1324 backup_options = cli.BackupOptions()
1325 backup_options.parseOptions(['--exclude', '*lyx', '--node-directory',
1326 basedir, 'from', 'to'])
1327 filtered = list(backup_options.filter_listdir(root_listdir))
1328 _check_filtering(filtered, root_listdir, ('lib.a', '_darcs', 'subdir'),
1331 backup_options = cli.BackupOptions()
1332 backup_options.parseOptions(['--exclude', '*lyx', '--exclude', 'lib.?', '--node-directory',
1333 basedir, 'from', 'to'])
1334 filtered = list(backup_options.filter_listdir(root_listdir))
1335 _check_filtering(filtered, root_listdir, ('_darcs', 'subdir'),
1336 ('nice_doc.lyx', 'lib.a'))
1337 # vcs metadata exclusion
1338 backup_options = cli.BackupOptions()
1339 backup_options.parseOptions(['--exclude-vcs', '--node-directory',
1340 basedir, 'from', 'to'])
1341 filtered = list(backup_options.filter_listdir(subdir_listdir))
1342 _check_filtering(filtered, subdir_listdir, ('another_doc.lyx', 'run_snake_run.py',),
1343 ('CVS', '.svn', '_darcs'))
1344 # read exclude patterns from file
1345 exclusion_string = "_darcs\n*py\n.svn"
1346 excl_filepath = os.path.join(basedir, 'exclusion')
1347 excl_file = file(excl_filepath, 'w')
1348 excl_file.write(exclusion_string)
1350 backup_options = cli.BackupOptions()
1351 backup_options.parseOptions(['--exclude-from', excl_filepath, '--node-directory',
1352 basedir, 'from', 'to'])
1353 filtered = list(backup_options.filter_listdir(subdir_listdir))
1354 _check_filtering(filtered, subdir_listdir, ('another_doc.lyx', 'CVS'),
1355 ('.svn', '_darcs', 'run_snake_run.py'))
1356 # text BackupConfigurationError
1357 self.failUnlessRaises(cli.BackupConfigurationError,
1358 backup_options.parseOptions,
1359 ['--exclude-from', excl_filepath + '.no', '--node-directory',
1360 basedir, 'from', 'to'])
1362 # test that an iterator works too
1363 backup_options = cli.BackupOptions()
1364 backup_options.parseOptions(['--exclude', '*lyx', '--node-directory',
1365 basedir, 'from', 'to'])
1366 filtered = list(backup_options.filter_listdir(iter(root_listdir)))
1367 _check_filtering(filtered, root_listdir, ('lib.a', '_darcs', 'subdir'),
1370 def test_ignore_symlinks(self):
1371 if not hasattr(os, 'symlink'):
1372 raise unittest.SkipTest("There is no symlink on this platform.")
1374 self.basedir = os.path.dirname(self.mktemp())
1377 source = os.path.join(self.basedir, "home")
1378 self.writeto("foo.txt", "foo")
1379 os.symlink(os.path.join(source, "foo.txt"), os.path.join(source, "foo2.txt"))
1381 d = self.do_cli("create-alias", "tahoe")
1382 d.addCallback(lambda res: self.do_cli("backup", "--verbose", source, "tahoe:test"))
1384 def _check((rc, out, err)):
1385 self.failUnlessEqual(rc, 2)
1386 self.failUnlessEqual(err, "WARNING: cannot backup special file %s\n" % os.path.join(source, "foo2.txt"))
1388 fu, fr, fs, dc, dr, ds = self.count_output(out)
1390 self.failUnlessEqual(fu, 1)
1391 self.failUnlessEqual(fr, 0)
1393 self.failUnlessEqual(fs, 1)
1395 self.failUnlessEqual(dc, 1)
1396 self.failUnlessEqual(dr, 0)
1397 self.failUnlessEqual(ds, 0)
1399 d.addCallback(_check)
1402 def test_ignore_unreadable_file(self):
1403 self.basedir = os.path.dirname(self.mktemp())
1406 source = os.path.join(self.basedir, "home")
1407 self.writeto("foo.txt", "foo")
1408 os.chmod(os.path.join(source, "foo.txt"), 0000)
1410 d = self.do_cli("create-alias", "tahoe")
1411 d.addCallback(lambda res: self.do_cli("backup", source, "tahoe:test"))
1413 def _check((rc, out, err)):
1414 self.failUnlessEqual(rc, 2)
1415 self.failUnlessEqual(err, "WARNING: permission denied on file %s\n" % os.path.join(source, "foo.txt"))
1417 fu, fr, fs, dc, dr, ds = self.count_output(out)
1418 self.failUnlessEqual(fu, 0)
1419 self.failUnlessEqual(fr, 0)
1421 self.failUnlessEqual(fs, 1)
1423 self.failUnlessEqual(dc, 1)
1424 self.failUnlessEqual(dr, 0)
1425 self.failUnlessEqual(ds, 0)
1426 d.addCallback(_check)
1428 # This is necessary for the temp files to be correctly removed
1430 os.chmod(os.path.join(source, "foo.txt"), 0644)
1431 d.addCallback(_cleanup)
1432 d.addErrback(_cleanup)
1436 def test_ignore_unreadable_directory(self):
1437 self.basedir = os.path.dirname(self.mktemp())
1440 source = os.path.join(self.basedir, "home")
1442 os.mkdir(os.path.join(source, "test"))
1443 os.chmod(os.path.join(source, "test"), 0000)
1445 d = self.do_cli("create-alias", "tahoe")
1446 d.addCallback(lambda res: self.do_cli("backup", source, "tahoe:test"))
1448 def _check((rc, out, err)):
1449 self.failUnlessEqual(rc, 2)
1450 self.failUnlessEqual(err, "WARNING: permission denied on directory %s\n" % os.path.join(source, "test"))
1452 fu, fr, fs, dc, dr, ds = self.count_output(out)
1453 self.failUnlessEqual(fu, 0)
1454 self.failUnlessEqual(fr, 0)
1455 self.failUnlessEqual(fs, 0)
1457 self.failUnlessEqual(dc, 2)
1458 self.failUnlessEqual(dr, 0)
1460 self.failUnlessEqual(ds, 1)
1461 d.addCallback(_check)
1463 # This is necessary for the temp files to be correctly removed
1465 os.chmod(os.path.join(source, "test"), 0655)
1466 d.addCallback(_cleanup)
1467 d.addErrback(_cleanup)
1471 class Check(GridTestMixin, CLITestMixin, unittest.TestCase):
1473 def test_check(self):
1474 self.basedir = "cli/Check/check"
1476 c0 = self.g.clients[0]
1478 d = c0.create_mutable_file(DATA)
1480 self.uri = n.get_uri()
1481 d.addCallback(_stash_uri)
1483 d.addCallback(lambda ign: self.do_cli("check", self.uri))
1484 def _check1((rc, out, err)):
1485 self.failUnlessEqual(err, "")
1486 self.failUnlessEqual(rc, 0)
1487 lines = out.splitlines()
1488 self.failUnless("Summary: Healthy" in lines, out)
1489 self.failUnless(" good-shares: 10 (encoding is 3-of-10)" in lines, out)
1490 d.addCallback(_check1)
1492 d.addCallback(lambda ign: self.do_cli("check", "--raw", self.uri))
1493 def _check2((rc, out, err)):
1494 self.failUnlessEqual(err, "")
1495 self.failUnlessEqual(rc, 0)
1496 data = simplejson.loads(out)
1497 self.failUnlessEqual(data["summary"], "Healthy")
1498 d.addCallback(_check2)
1500 def _clobber_shares(ignored):
1501 # delete one, corrupt a second
1502 shares = self.find_shares(self.uri)
1503 self.failUnlessEqual(len(shares), 10)
1504 os.unlink(shares[0][2])
1505 cso = debug.CorruptShareOptions()
1506 cso.stdout = StringIO()
1507 cso.parseOptions([shares[1][2]])
1508 storage_index = uri.from_string(self.uri).get_storage_index()
1509 self._corrupt_share_line = " server %s, SI %s, shnum %d" % \
1510 (base32.b2a(shares[1][1]),
1511 base32.b2a(storage_index),
1513 debug.corrupt_share(cso)
1514 d.addCallback(_clobber_shares)
1516 d.addCallback(lambda ign: self.do_cli("check", "--verify", self.uri))
1517 def _check3((rc, out, err)):
1518 self.failUnlessEqual(err, "")
1519 self.failUnlessEqual(rc, 0)
1520 lines = out.splitlines()
1521 summary = [l for l in lines if l.startswith("Summary")][0]
1522 self.failUnless("Summary: Unhealthy: 8 shares (enc 3-of-10)"
1523 in summary, summary)
1524 self.failUnless(" good-shares: 8 (encoding is 3-of-10)" in lines, out)
1525 self.failUnless(" corrupt shares:" in lines, out)
1526 self.failUnless(self._corrupt_share_line in lines, out)
1527 d.addCallback(_check3)
1529 d.addCallback(lambda ign:
1530 self.do_cli("check", "--verify", "--repair", self.uri))
1531 def _check4((rc, out, err)):
1532 self.failUnlessEqual(err, "")
1533 self.failUnlessEqual(rc, 0)
1534 lines = out.splitlines()
1535 self.failUnless("Summary: not healthy" in lines, out)
1536 self.failUnless(" good-shares: 8 (encoding is 3-of-10)" in lines, out)
1537 self.failUnless(" corrupt shares:" in lines, out)
1538 self.failUnless(self._corrupt_share_line in lines, out)
1539 self.failUnless(" repair successful" in lines, out)
1540 d.addCallback(_check4)
1542 d.addCallback(lambda ign:
1543 self.do_cli("check", "--verify", "--repair", self.uri))
1544 def _check5((rc, out, err)):
1545 self.failUnlessEqual(err, "")
1546 self.failUnlessEqual(rc, 0)
1547 lines = out.splitlines()
1548 self.failUnless("Summary: healthy" in lines, out)
1549 self.failUnless(" good-shares: 10 (encoding is 3-of-10)" in lines, out)
1550 self.failIf(" corrupt shares:" in lines, out)
1551 d.addCallback(_check5)
1555 def test_deep_check(self):
1556 self.basedir = "cli/Check/deep_check"
1558 c0 = self.g.clients[0]
1562 d = c0.create_dirnode()
1563 def _stash_root_and_create_file(n):
1565 self.rooturi = n.get_uri()
1566 return n.add_file(u"good", upload.Data(DATA, convergence=""))
1567 d.addCallback(_stash_root_and_create_file)
1568 def _stash_uri(fn, which):
1569 self.uris[which] = fn.get_uri()
1571 d.addCallback(_stash_uri, "good")
1572 d.addCallback(lambda ign:
1573 self.rootnode.add_file(u"small",
1574 upload.Data("literal",
1576 d.addCallback(_stash_uri, "small")
1577 d.addCallback(lambda ign: c0.create_mutable_file(DATA+"1"))
1578 d.addCallback(lambda fn: self.rootnode.set_node(u"mutable", fn))
1579 d.addCallback(_stash_uri, "mutable")
1581 d.addCallback(lambda ign: self.do_cli("deep-check", self.rooturi))
1582 def _check1((rc, out, err)):
1583 self.failUnlessEqual(err, "")
1584 self.failUnlessEqual(rc, 0)
1585 lines = out.splitlines()
1586 self.failUnless("done: 4 objects checked, 4 healthy, 0 unhealthy"
1588 d.addCallback(_check1)
1595 d.addCallback(lambda ign: self.do_cli("deep-check", "--verbose",
1597 def _check2((rc, out, err)):
1598 self.failUnlessEqual(err, "")
1599 self.failUnlessEqual(rc, 0)
1600 lines = out.splitlines()
1601 self.failUnless("<root>: Healthy" in lines, out)
1602 self.failUnless("small: Healthy (LIT)" in lines, out)
1603 self.failUnless("good: Healthy" in lines, out)
1604 self.failUnless("mutable: Healthy" in lines, out)
1605 self.failUnless("done: 4 objects checked, 4 healthy, 0 unhealthy"
1607 d.addCallback(_check2)
1609 d.addCallback(lambda ign: self.do_cli("stats", self.rooturi))
1610 def _check_stats((rc, out, err)):
1611 self.failUnlessEqual(err, "")
1612 self.failUnlessEqual(rc, 0)
1613 lines = out.splitlines()
1614 self.failUnlessIn(" count-immutable-files: 1", lines)
1615 self.failUnlessIn(" count-mutable-files: 1", lines)
1616 self.failUnlessIn(" count-literal-files: 1", lines)
1617 self.failUnlessIn(" count-directories: 1", lines)
1618 self.failUnlessIn(" size-immutable-files: 400", lines)
1619 self.failUnlessIn("Size Histogram:", lines)
1620 self.failUnlessIn(" 4-10 : 1 (10 B, 10 B)", lines)
1621 self.failUnlessIn(" 317-1000 : 1 (1000 B, 1000 B)", lines)
1622 d.addCallback(_check_stats)
1624 def _clobber_shares(ignored):
1625 shares = self.find_shares(self.uris["good"])
1626 self.failUnlessEqual(len(shares), 10)
1627 os.unlink(shares[0][2])
1629 shares = self.find_shares(self.uris["mutable"])
1630 cso = debug.CorruptShareOptions()
1631 cso.stdout = StringIO()
1632 cso.parseOptions([shares[1][2]])
1633 storage_index = uri.from_string(self.uris["mutable"]).get_storage_index()
1634 self._corrupt_share_line = " corrupt: server %s, SI %s, shnum %d" % \
1635 (base32.b2a(shares[1][1]),
1636 base32.b2a(storage_index),
1638 debug.corrupt_share(cso)
1639 d.addCallback(_clobber_shares)
1642 # root/good [9 shares]
1644 # root/mutable [1 corrupt share]
1646 d.addCallback(lambda ign:
1647 self.do_cli("deep-check", "--verbose", self.rooturi))
1648 def _check3((rc, out, err)):
1649 self.failUnlessEqual(err, "")
1650 self.failUnlessEqual(rc, 0)
1651 lines = out.splitlines()
1652 self.failUnless("<root>: Healthy" in lines, out)
1653 self.failUnless("small: Healthy (LIT)" in lines, out)
1654 self.failUnless("mutable: Healthy" in lines, out) # needs verifier
1655 self.failUnless("good: Not Healthy: 9 shares (enc 3-of-10)"
1657 self.failIf(self._corrupt_share_line in lines, out)
1658 self.failUnless("done: 4 objects checked, 3 healthy, 1 unhealthy"
1660 d.addCallback(_check3)
1662 d.addCallback(lambda ign:
1663 self.do_cli("deep-check", "--verbose", "--verify",
1665 def _check4((rc, out, err)):
1666 self.failUnlessEqual(err, "")
1667 self.failUnlessEqual(rc, 0)
1668 lines = out.splitlines()
1669 self.failUnless("<root>: Healthy" in lines, out)
1670 self.failUnless("small: Healthy (LIT)" in lines, out)
1671 mutable = [l for l in lines if l.startswith("mutable")][0]
1672 self.failUnless(mutable.startswith("mutable: Unhealthy: 9 shares (enc 3-of-10)"),
1674 self.failUnless(self._corrupt_share_line in lines, out)
1675 self.failUnless("good: Not Healthy: 9 shares (enc 3-of-10)"
1677 self.failUnless("done: 4 objects checked, 2 healthy, 2 unhealthy"
1679 d.addCallback(_check4)
1681 d.addCallback(lambda ign:
1682 self.do_cli("deep-check", "--raw",
1684 def _check5((rc, out, err)):
1685 self.failUnlessEqual(err, "")
1686 self.failUnlessEqual(rc, 0)
1687 lines = out.splitlines()
1688 units = [simplejson.loads(line) for line in lines]
1689 # root, small, good, mutable, stats
1690 self.failUnlessEqual(len(units), 4+1)
1691 d.addCallback(_check5)
1693 d.addCallback(lambda ign:
1694 self.do_cli("deep-check",
1695 "--verbose", "--verify", "--repair",
1697 def _check6((rc, out, err)):
1698 self.failUnlessEqual(err, "")
1699 self.failUnlessEqual(rc, 0)
1700 lines = out.splitlines()
1701 self.failUnless("<root>: healthy" in lines, out)
1702 self.failUnless("small: healthy" in lines, out)
1703 self.failUnless("mutable: not healthy" in lines, out)
1704 self.failUnless(self._corrupt_share_line in lines, out)
1705 self.failUnless("good: not healthy" in lines, out)
1706 self.failUnless("done: 4 objects checked" in lines, out)
1707 self.failUnless(" pre-repair: 2 healthy, 2 unhealthy" in lines, out)
1708 self.failUnless(" 2 repairs attempted, 2 successful, 0 failed"
1710 self.failUnless(" post-repair: 4 healthy, 0 unhealthy" in lines,out)
1711 d.addCallback(_check6)
1713 # now add a subdir, and a file below that, then make the subdir
1716 d.addCallback(lambda ign: self.rootnode.create_subdirectory(u"subdir"))
1717 d.addCallback(_stash_uri, "subdir")
1718 d.addCallback(lambda fn:
1719 fn.add_file(u"subfile", upload.Data(DATA+"2", "")))
1720 d.addCallback(lambda ign:
1721 self.delete_shares_numbered(self.uris["subdir"],
1728 # root/subdir [unrecoverable: 0 shares]
1731 d.addCallback(lambda ign: self.do_cli("manifest", self.rooturi))
1732 def _manifest_failed((rc, out, err)):
1733 self.failIfEqual(rc, 0)
1734 self.failUnlessIn("ERROR: UnrecoverableFileError", err)
1735 # the fatal directory should still show up, as the last line
1736 self.failUnlessIn(" subdir\n", out)
1737 d.addCallback(_manifest_failed)
1739 d.addCallback(lambda ign: self.do_cli("deep-check", self.rooturi))
1740 def _deep_check_failed((rc, out, err)):
1741 self.failIfEqual(rc, 0)
1742 self.failUnlessIn("ERROR: UnrecoverableFileError", err)
1743 # we want to make sure that the error indication is the last
1744 # thing that gets emitted
1745 self.failIf("done:" in out, out)
1746 d.addCallback(_deep_check_failed)
1748 # this test is disabled until the deep-repair response to an
1749 # unrepairable directory is fixed. The failure-to-repair should not
1750 # throw an exception, but the failure-to-traverse that follows
1751 # should throw UnrecoverableFileError.
1753 #d.addCallback(lambda ign:
1754 # self.do_cli("deep-check", "--repair", self.rooturi))
1755 #def _deep_check_repair_failed((rc, out, err)):
1756 # self.failIfEqual(rc, 0)
1758 # self.failUnlessIn("ERROR: UnrecoverableFileError", err)
1759 # self.failIf("done:" in out, out)
1760 #d.addCallback(_deep_check_repair_failed)
1764 class Errors(GridTestMixin, CLITestMixin, unittest.TestCase):
1766 self.basedir = "cli/Errors/get"
1768 c0 = self.g.clients[0]
1771 d = c0.upload(upload.Data(DATA, convergence=""))
1773 self.uri_1share = ur.uri
1774 self.delete_shares_numbered(ur.uri, range(1,10))
1775 d.addCallback(_stash_bad)
1777 d.addCallback(lambda ign: self.do_cli("get", self.uri_1share))
1778 def _check1((rc, out, err)):
1779 self.failIfEqual(rc, 0)
1780 self.failUnless("410 Gone" in err, err)
1781 self.failUnlessIn("NotEnoughSharesError: ", err)
1782 self.failUnlessIn("Failed to get enough shareholders: have 1, need 3", err)
1783 d.addCallback(_check1)
1785 targetf = os.path.join(self.basedir, "output")
1786 d.addCallback(lambda ign: self.do_cli("get", self.uri_1share, targetf))
1787 def _check2((rc, out, err)):
1788 self.failIfEqual(rc, 0)
1789 self.failUnless("410 Gone" in err, err)
1790 self.failUnlessIn("NotEnoughSharesError: ", err)
1791 self.failUnlessIn("Failed to get enough shareholders: have 1, need 3", err)
1792 self.failIf(os.path.exists(targetf))
1793 d.addCallback(_check2)
1797 class Stats(GridTestMixin, CLITestMixin, unittest.TestCase):
1798 def test_empty_directory(self):
1799 self.basedir = "cli/Stats/empty_directory"
1801 c0 = self.g.clients[0]
1803 d = c0.create_dirnode()
1806 self.rooturi = n.get_uri()
1807 d.addCallback(_stash_root)
1809 # make sure we can get stats on an empty directory too
1810 d.addCallback(lambda ign: self.do_cli("stats", self.rooturi))
1811 def _check_stats((rc, out, err)):
1812 self.failUnlessEqual(err, "")
1813 self.failUnlessEqual(rc, 0)
1814 lines = out.splitlines()
1815 self.failUnlessIn(" count-immutable-files: 0", lines)
1816 self.failUnlessIn(" count-mutable-files: 0", lines)
1817 self.failUnlessIn(" count-literal-files: 0", lines)
1818 self.failUnlessIn(" count-directories: 1", lines)
1819 self.failUnlessIn(" size-immutable-files: 0", lines)
1820 self.failIfIn("Size Histogram:", lines)
1821 d.addCallback(_check_stats)