4 from twisted.trial import unittest
5 from cStringIO import StringIO
10 from allmydata.util import fileutil, hashutil, base32
11 from allmydata import uri
12 from allmydata.immutable import upload
14 # Test that the scripts can be imported -- although the actual tests of their functionality are
15 # done by invoking them in a subprocess.
16 from allmydata.scripts import tahoe_ls, tahoe_get, tahoe_put, tahoe_rm, tahoe_cp
17 _hush_pyflakes = [tahoe_ls, tahoe_get, tahoe_put, tahoe_rm, tahoe_cp]
19 from allmydata.scripts import common
20 from allmydata.scripts.common import DEFAULT_ALIAS, get_aliases, get_alias, \
23 from allmydata.scripts import cli, debug, runner, backupdb
24 from allmydata.test.common_util import StallMixin
25 from allmydata.test.no_network import GridTestMixin
26 from twisted.internet import threads # CLI tests use deferToThread
27 from twisted.python import usage
29 class CLI(unittest.TestCase):
30 # this test case only looks at argument-processing and simple stuff.
31 def test_options(self):
32 fileutil.rm_dir("cli/test_options")
33 fileutil.make_dirs("cli/test_options")
34 fileutil.make_dirs("cli/test_options/private")
35 open("cli/test_options/node.url","w").write("http://localhost:8080/\n")
36 filenode_uri = uri.WriteableSSKFileURI(writekey="\x00"*16,
37 fingerprint="\x00"*32)
38 private_uri = uri.NewDirectoryURI(filenode_uri).to_string()
39 open("cli/test_options/private/root_dir.cap", "w").write(private_uri + "\n")
41 o.parseOptions(["--node-directory", "cli/test_options"])
42 self.failUnlessEqual(o['node-url'], "http://localhost:8080/")
43 self.failUnlessEqual(o.aliases[DEFAULT_ALIAS], private_uri)
44 self.failUnlessEqual(o.where, "")
47 o.parseOptions(["--node-directory", "cli/test_options",
48 "--node-url", "http://example.org:8111/"])
49 self.failUnlessEqual(o['node-url'], "http://example.org:8111/")
50 self.failUnlessEqual(o.aliases[DEFAULT_ALIAS], private_uri)
51 self.failUnlessEqual(o.where, "")
54 o.parseOptions(["--node-directory", "cli/test_options",
56 self.failUnlessEqual(o['node-url'], "http://localhost:8080/")
57 self.failUnlessEqual(o.aliases[DEFAULT_ALIAS], "root")
58 self.failUnlessEqual(o.where, "")
61 other_filenode_uri = uri.WriteableSSKFileURI(writekey="\x11"*16,
62 fingerprint="\x11"*32)
63 other_uri = uri.NewDirectoryURI(other_filenode_uri).to_string()
64 o.parseOptions(["--node-directory", "cli/test_options",
65 "--dir-cap", other_uri])
66 self.failUnlessEqual(o['node-url'], "http://localhost:8080/")
67 self.failUnlessEqual(o.aliases[DEFAULT_ALIAS], other_uri)
68 self.failUnlessEqual(o.where, "")
71 o.parseOptions(["--node-directory", "cli/test_options",
72 "--dir-cap", other_uri, "subdir"])
73 self.failUnlessEqual(o['node-url'], "http://localhost:8080/")
74 self.failUnlessEqual(o.aliases[DEFAULT_ALIAS], other_uri)
75 self.failUnlessEqual(o.where, "subdir")
78 self.failUnlessRaises(usage.UsageError,
80 ["--node-directory", "cli/test_options",
81 "--node-url", "NOT-A-URL"])
84 o.parseOptions(["--node-directory", "cli/test_options",
85 "--node-url", "http://localhost:8080"])
86 self.failUnlessEqual(o["node-url"], "http://localhost:8080/")
88 def _dump_cap(self, *args):
89 config = debug.DumpCapOptions()
90 config.stdout,config.stderr = StringIO(), StringIO()
91 config.parseOptions(args)
92 debug.dump_cap(config)
93 self.failIf(config.stderr.getvalue())
94 output = config.stdout.getvalue()
97 def test_dump_cap_chk(self):
98 key = "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f"
99 storage_index = hashutil.storage_index_hash(key)
100 uri_extension_hash = hashutil.uri_extension_hash("stuff")
104 u = uri.CHKFileURI(key=key,
105 uri_extension_hash=uri_extension_hash,
106 needed_shares=needed_shares,
107 total_shares=total_shares,
109 output = self._dump_cap(u.to_string())
110 self.failUnless("CHK File:" in output, output)
111 self.failUnless("key: aaaqeayeaudaocajbifqydiob4" in output, output)
112 self.failUnless("UEB hash: nf3nimquen7aeqm36ekgxomalstenpkvsdmf6fplj7swdatbv5oa" in output, output)
113 self.failUnless("size: 1234" in output, output)
114 self.failUnless("k/N: 25/100" in output, output)
115 self.failUnless("storage index: hdis5iaveku6lnlaiccydyid7q" in output, output)
117 output = self._dump_cap("--client-secret", "5s33nk3qpvnj2fw3z4mnm2y6fa",
119 self.failUnless("client renewal secret: znxmki5zdibb5qlt46xbdvk2t55j7hibejq3i5ijyurkr6m6jkhq" in output, output)
121 output = self._dump_cap(u.get_verify_cap().to_string())
122 self.failIf("key: " in output, output)
123 self.failUnless("UEB hash: nf3nimquen7aeqm36ekgxomalstenpkvsdmf6fplj7swdatbv5oa" in output, output)
124 self.failUnless("size: 1234" in output, output)
125 self.failUnless("k/N: 25/100" in output, output)
126 self.failUnless("storage index: hdis5iaveku6lnlaiccydyid7q" in output, output)
128 prefixed_u = "http://127.0.0.1/uri/%s" % urllib.quote(u.to_string())
129 output = self._dump_cap(prefixed_u)
130 self.failUnless("CHK File:" in output, output)
131 self.failUnless("key: aaaqeayeaudaocajbifqydiob4" in output, output)
132 self.failUnless("UEB hash: nf3nimquen7aeqm36ekgxomalstenpkvsdmf6fplj7swdatbv5oa" in output, output)
133 self.failUnless("size: 1234" in output, output)
134 self.failUnless("k/N: 25/100" in output, output)
135 self.failUnless("storage index: hdis5iaveku6lnlaiccydyid7q" in output, output)
137 def test_dump_cap_lit(self):
138 u = uri.LiteralFileURI("this is some data")
139 output = self._dump_cap(u.to_string())
140 self.failUnless("Literal File URI:" in output, output)
141 self.failUnless("data: this is some data" in output, output)
143 def test_dump_cap_ssk(self):
144 writekey = "\x01" * 16
145 fingerprint = "\xfe" * 32
146 u = uri.WriteableSSKFileURI(writekey, fingerprint)
148 output = self._dump_cap(u.to_string())
149 self.failUnless("SSK Writeable URI:" in output, output)
150 self.failUnless("writekey: aeaqcaibaeaqcaibaeaqcaibae" in output, output)
151 self.failUnless("readkey: nvgh5vj2ekzzkim5fgtb4gey5y" in output, output)
152 self.failUnless("storage index: nt4fwemuw7flestsezvo2eveke" in output, output)
153 self.failUnless("fingerprint: 737p57x6737p57x6737p57x6737p57x6737p57x6737p57x6737a" in output, output)
155 output = self._dump_cap("--client-secret", "5s33nk3qpvnj2fw3z4mnm2y6fa",
157 self.failUnless("file renewal secret: arpszxzc2t6kb4okkg7sp765xgkni5z7caavj7lta73vmtymjlxq" in output, output)
159 fileutil.make_dirs("cli/test_dump_cap/private")
160 f = open("cli/test_dump_cap/private/secret", "w")
161 f.write("5s33nk3qpvnj2fw3z4mnm2y6fa\n")
163 output = self._dump_cap("--client-dir", "cli/test_dump_cap",
165 self.failUnless("file renewal secret: arpszxzc2t6kb4okkg7sp765xgkni5z7caavj7lta73vmtymjlxq" in output, output)
167 output = self._dump_cap("--client-dir", "cli/test_dump_cap_BOGUS",
169 self.failIf("file renewal secret:" in output, output)
171 output = self._dump_cap("--nodeid", "tqc35esocrvejvg4mablt6aowg6tl43j",
173 self.failUnless("write_enabler: mgcavriox2wlb5eer26unwy5cw56elh3sjweffckkmivvsxtaknq" in output, output)
174 self.failIf("file renewal secret:" in output, output)
176 output = self._dump_cap("--nodeid", "tqc35esocrvejvg4mablt6aowg6tl43j",
177 "--client-secret", "5s33nk3qpvnj2fw3z4mnm2y6fa",
179 self.failUnless("write_enabler: mgcavriox2wlb5eer26unwy5cw56elh3sjweffckkmivvsxtaknq" in output, output)
180 self.failUnless("file renewal secret: arpszxzc2t6kb4okkg7sp765xgkni5z7caavj7lta73vmtymjlxq" in output, output)
181 self.failUnless("lease renewal secret: 7pjtaumrb7znzkkbvekkmuwpqfjyfyamznfz4bwwvmh4nw33lorq" in output, output)
184 output = self._dump_cap(u.to_string())
185 self.failUnless("SSK Read-only URI:" in output, output)
186 self.failUnless("readkey: nvgh5vj2ekzzkim5fgtb4gey5y" in output, output)
187 self.failUnless("storage index: nt4fwemuw7flestsezvo2eveke" in output, output)
188 self.failUnless("fingerprint: 737p57x6737p57x6737p57x6737p57x6737p57x6737p57x6737a" in output, output)
190 u = u.get_verify_cap()
191 output = self._dump_cap(u.to_string())
192 self.failUnless("SSK Verifier URI:" in output, output)
193 self.failUnless("storage index: nt4fwemuw7flestsezvo2eveke" in output, output)
194 self.failUnless("fingerprint: 737p57x6737p57x6737p57x6737p57x6737p57x6737p57x6737a" in output, output)
196 def test_dump_cap_directory(self):
197 writekey = "\x01" * 16
198 fingerprint = "\xfe" * 32
199 u1 = uri.WriteableSSKFileURI(writekey, fingerprint)
200 u = uri.NewDirectoryURI(u1)
202 output = self._dump_cap(u.to_string())
203 self.failUnless("Directory Writeable URI:" in output, output)
204 self.failUnless("writekey: aeaqcaibaeaqcaibaeaqcaibae" in output,
206 self.failUnless("readkey: nvgh5vj2ekzzkim5fgtb4gey5y" in output, output)
207 self.failUnless("storage index: nt4fwemuw7flestsezvo2eveke" in output,
209 self.failUnless("fingerprint: 737p57x6737p57x6737p57x6737p57x6737p57x6737p57x6737a" in output, output)
211 output = self._dump_cap("--client-secret", "5s33nk3qpvnj2fw3z4mnm2y6fa",
213 self.failUnless("file renewal secret: arpszxzc2t6kb4okkg7sp765xgkni5z7caavj7lta73vmtymjlxq" in output, output)
215 output = self._dump_cap("--nodeid", "tqc35esocrvejvg4mablt6aowg6tl43j",
217 self.failUnless("write_enabler: mgcavriox2wlb5eer26unwy5cw56elh3sjweffckkmivvsxtaknq" in output, output)
218 self.failIf("file renewal secret:" in output, output)
220 output = self._dump_cap("--nodeid", "tqc35esocrvejvg4mablt6aowg6tl43j",
221 "--client-secret", "5s33nk3qpvnj2fw3z4mnm2y6fa",
223 self.failUnless("write_enabler: mgcavriox2wlb5eer26unwy5cw56elh3sjweffckkmivvsxtaknq" in output, output)
224 self.failUnless("file renewal secret: arpszxzc2t6kb4okkg7sp765xgkni5z7caavj7lta73vmtymjlxq" in output, output)
225 self.failUnless("lease renewal secret: 7pjtaumrb7znzkkbvekkmuwpqfjyfyamznfz4bwwvmh4nw33lorq" in output, output)
228 output = self._dump_cap(u.to_string())
229 self.failUnless("Directory Read-only URI:" in output, output)
230 self.failUnless("readkey: nvgh5vj2ekzzkim5fgtb4gey5y" in output, output)
231 self.failUnless("storage index: nt4fwemuw7flestsezvo2eveke" in output, output)
232 self.failUnless("fingerprint: 737p57x6737p57x6737p57x6737p57x6737p57x6737p57x6737a" in output, output)
234 u = u.get_verify_cap()
235 output = self._dump_cap(u.to_string())
236 self.failUnless("Directory Verifier URI:" in output, output)
237 self.failUnless("storage index: nt4fwemuw7flestsezvo2eveke" in output, output)
238 self.failUnless("fingerprint: 737p57x6737p57x6737p57x6737p57x6737p57x6737p57x6737a" in output, output)
240 def _catalog_shares(self, *basedirs):
241 o = debug.CatalogSharesOptions()
242 o.stdout,o.stderr = StringIO(), StringIO()
243 args = list(basedirs)
245 debug.catalog_shares(o)
246 out = o.stdout.getvalue()
247 err = o.stderr.getvalue()
250 def test_catalog_shares_error(self):
251 nodedir1 = "cli/test_catalog_shares/node1"
252 sharedir = os.path.join(nodedir1, "storage", "shares", "mq", "mqfblse6m5a6dh45isu2cg7oji")
253 fileutil.make_dirs(sharedir)
254 f = open(os.path.join(sharedir, "8"), "wb")
255 open("cli/test_catalog_shares/node1/storage/shares/mq/not-a-dir", "wb").close()
256 # write a bogus share that looks a little bit like CHK
257 f.write("\x00\x00\x00\x01" + "\xff" * 200) # this triggers an assert
260 nodedir2 = "cli/test_catalog_shares/node2"
261 fileutil.make_dirs(nodedir2)
262 open("cli/test_catalog_shares/node1/storage/shares/not-a-dir", "wb").close()
264 # now make sure that the 'catalog-shares' commands survives the error
265 out, err = self._catalog_shares(nodedir1, nodedir2)
266 self.failUnlessEqual(out, "", out)
267 self.failUnless("Error processing " in err,
268 "didn't see 'error processing' in '%s'" % err)
269 #self.failUnless(nodedir1 in err,
270 # "didn't see '%s' in '%s'" % (nodedir1, err))
271 # windows mangles the path, and os.path.join isn't enough to make
272 # up for it, so just look for individual strings
273 self.failUnless("node1" in err,
274 "didn't see 'node1' in '%s'" % err)
275 self.failUnless("mqfblse6m5a6dh45isu2cg7oji" in err,
276 "didn't see 'mqfblse6m5a6dh45isu2cg7oji' in '%s'" % err)
278 def test_alias(self):
279 aliases = {"tahoe": "TA",
283 return get_alias(aliases, path, "tahoe")
284 uses_lettercolon = common.platform_uses_lettercolon_drivename()
285 self.failUnlessEqual(ga1("bare"), ("TA", "bare"))
286 self.failUnlessEqual(ga1("baredir/file"), ("TA", "baredir/file"))
287 self.failUnlessEqual(ga1("baredir/file:7"), ("TA", "baredir/file:7"))
288 self.failUnlessEqual(ga1("tahoe:"), ("TA", ""))
289 self.failUnlessEqual(ga1("tahoe:file"), ("TA", "file"))
290 self.failUnlessEqual(ga1("tahoe:dir/file"), ("TA", "dir/file"))
291 self.failUnlessEqual(ga1("work:"), ("WA", ""))
292 self.failUnlessEqual(ga1("work:file"), ("WA", "file"))
293 self.failUnlessEqual(ga1("work:dir/file"), ("WA", "dir/file"))
294 # default != None means we really expect a tahoe path, regardless of
295 # whether we're on windows or not. This is what 'tahoe get' uses.
296 self.failUnlessEqual(ga1("c:"), ("CA", ""))
297 self.failUnlessEqual(ga1("c:file"), ("CA", "file"))
298 self.failUnlessEqual(ga1("c:dir/file"), ("CA", "dir/file"))
299 self.failUnlessEqual(ga1("URI:stuff"), ("URI:stuff", ""))
300 self.failUnlessEqual(ga1("URI:stuff:./file"), ("URI:stuff", "file"))
301 self.failUnlessEqual(ga1("URI:stuff:./dir/file"),
302 ("URI:stuff", "dir/file"))
303 self.failUnlessRaises(common.UnknownAliasError, ga1, "missing:")
304 self.failUnlessRaises(common.UnknownAliasError, ga1, "missing:dir")
305 self.failUnlessRaises(common.UnknownAliasError, ga1, "missing:dir/file")
308 return get_alias(aliases, path, None)
309 self.failUnlessEqual(ga2("bare"), (DefaultAliasMarker, "bare"))
310 self.failUnlessEqual(ga2("baredir/file"),
311 (DefaultAliasMarker, "baredir/file"))
312 self.failUnlessEqual(ga2("baredir/file:7"),
313 (DefaultAliasMarker, "baredir/file:7"))
314 self.failUnlessEqual(ga2("baredir/sub:1/file:7"),
315 (DefaultAliasMarker, "baredir/sub:1/file:7"))
316 self.failUnlessEqual(ga2("tahoe:"), ("TA", ""))
317 self.failUnlessEqual(ga2("tahoe:file"), ("TA", "file"))
318 self.failUnlessEqual(ga2("tahoe:dir/file"), ("TA", "dir/file"))
319 # on windows, we really want c:foo to indicate a local file.
320 # default==None is what 'tahoe cp' uses.
322 self.failUnlessEqual(ga2("c:"), (DefaultAliasMarker, "c:"))
323 self.failUnlessEqual(ga2("c:file"), (DefaultAliasMarker, "c:file"))
324 self.failUnlessEqual(ga2("c:dir/file"),
325 (DefaultAliasMarker, "c:dir/file"))
327 self.failUnlessEqual(ga2("c:"), ("CA", ""))
328 self.failUnlessEqual(ga2("c:file"), ("CA", "file"))
329 self.failUnlessEqual(ga2("c:dir/file"), ("CA", "dir/file"))
330 self.failUnlessEqual(ga2("work:"), ("WA", ""))
331 self.failUnlessEqual(ga2("work:file"), ("WA", "file"))
332 self.failUnlessEqual(ga2("work:dir/file"), ("WA", "dir/file"))
333 self.failUnlessEqual(ga2("URI:stuff"), ("URI:stuff", ""))
334 self.failUnlessEqual(ga2("URI:stuff:./file"), ("URI:stuff", "file"))
335 self.failUnlessEqual(ga2("URI:stuff:./dir/file"), ("URI:stuff", "dir/file"))
336 self.failUnlessRaises(common.UnknownAliasError, ga2, "missing:")
337 self.failUnlessRaises(common.UnknownAliasError, ga2, "missing:dir")
338 self.failUnlessRaises(common.UnknownAliasError, ga2, "missing:dir/file")
341 old = common.pretend_platform_uses_lettercolon
343 common.pretend_platform_uses_lettercolon = True
344 retval = get_alias(aliases, path, None)
346 common.pretend_platform_uses_lettercolon = old
348 self.failUnlessEqual(ga3("bare"), (DefaultAliasMarker, "bare"))
349 self.failUnlessEqual(ga3("baredir/file"),
350 (DefaultAliasMarker, "baredir/file"))
351 self.failUnlessEqual(ga3("baredir/file:7"),
352 (DefaultAliasMarker, "baredir/file:7"))
353 self.failUnlessEqual(ga3("baredir/sub:1/file:7"),
354 (DefaultAliasMarker, "baredir/sub:1/file:7"))
355 self.failUnlessEqual(ga3("tahoe:"), ("TA", ""))
356 self.failUnlessEqual(ga3("tahoe:file"), ("TA", "file"))
357 self.failUnlessEqual(ga3("tahoe:dir/file"), ("TA", "dir/file"))
358 self.failUnlessEqual(ga3("c:"), (DefaultAliasMarker, "c:"))
359 self.failUnlessEqual(ga3("c:file"), (DefaultAliasMarker, "c:file"))
360 self.failUnlessEqual(ga3("c:dir/file"),
361 (DefaultAliasMarker, "c:dir/file"))
362 self.failUnlessEqual(ga3("work:"), ("WA", ""))
363 self.failUnlessEqual(ga3("work:file"), ("WA", "file"))
364 self.failUnlessEqual(ga3("work:dir/file"), ("WA", "dir/file"))
365 self.failUnlessEqual(ga3("URI:stuff"), ("URI:stuff", ""))
366 self.failUnlessEqual(ga3("URI:stuff:./file"), ("URI:stuff", "file"))
367 self.failUnlessEqual(ga3("URI:stuff:./dir/file"), ("URI:stuff", "dir/file"))
368 self.failUnlessRaises(common.UnknownAliasError, ga3, "missing:")
369 self.failUnlessRaises(common.UnknownAliasError, ga3, "missing:dir")
370 self.failUnlessRaises(common.UnknownAliasError, ga3, "missing:dir/file")
373 class Help(unittest.TestCase):
376 help = str(cli.GetOptions())
377 self.failUnless("get VDRIVE_FILE LOCAL_FILE" in help, help)
378 self.failUnless("% tahoe get FOO |less" in help, help)
381 help = str(cli.PutOptions())
382 self.failUnless("put LOCAL_FILE VDRIVE_FILE" in help, help)
383 self.failUnless("% cat FILE | tahoe put" in help, help)
386 help = str(cli.RmOptions())
387 self.failUnless("rm VDRIVE_FILE" in help, help)
390 help = str(cli.MvOptions())
391 self.failUnless("mv FROM TO" in help, help)
394 help = str(cli.LnOptions())
395 self.failUnless("ln FROM TO" in help, help)
397 def test_backup(self):
398 help = str(cli.BackupOptions())
399 self.failUnless("backup FROM ALIAS:TO" in help, help)
401 def test_webopen(self):
402 help = str(cli.WebopenOptions())
403 self.failUnless("webopen [ALIAS:PATH]" in help, help)
405 def test_manifest(self):
406 help = str(cli.ManifestOptions())
407 self.failUnless("manifest [ALIAS:PATH]" in help, help)
409 def test_stats(self):
410 help = str(cli.StatsOptions())
411 self.failUnless("stats [ALIAS:PATH]" in help, help)
413 def test_check(self):
414 help = str(cli.CheckOptions())
415 self.failUnless("check [ALIAS:PATH]" in help, help)
417 def test_deep_check(self):
418 help = str(cli.DeepCheckOptions())
419 self.failUnless("deep-check [ALIAS:PATH]" in help, help)
421 def test_create_alias(self):
422 help = str(cli.CreateAliasOptions())
423 self.failUnless("create-alias ALIAS" in help, help)
425 def test_add_aliases(self):
426 help = str(cli.AddAliasOptions())
427 self.failUnless("add-alias ALIAS DIRCAP" in help, help)
430 def do_cli(self, verb, *args, **kwargs):
432 "--node-directory", self.get_clientdir(),
434 argv = [verb] + nodeargs + list(args)
435 stdin = kwargs.get("stdin", "")
436 stdout, stderr = StringIO(), StringIO()
437 d = threads.deferToThread(runner.runner, argv, run_by_human=False,
438 stdin=StringIO(stdin),
439 stdout=stdout, stderr=stderr)
441 return rc, stdout.getvalue(), stderr.getvalue()
445 class CreateAlias(GridTestMixin, CLITestMixin, unittest.TestCase):
447 def _test_webopen(self, args, expected_url):
448 woo = cli.WebopenOptions()
449 all_args = ["--node-directory", self.get_clientdir()] + list(args)
450 woo.parseOptions(all_args)
452 rc = cli.webopen(woo, urls.append)
453 self.failUnlessEqual(rc, 0)
454 self.failUnlessEqual(len(urls), 1)
455 self.failUnlessEqual(urls[0], expected_url)
457 def test_create(self):
458 self.basedir = "cli/CreateAlias/create"
461 d = self.do_cli("create-alias", "tahoe")
462 def _done((rc,stdout,stderr)):
463 self.failUnless("Alias 'tahoe' created" in stdout)
465 aliases = get_aliases(self.get_clientdir())
466 self.failUnless("tahoe" in aliases)
467 self.failUnless(aliases["tahoe"].startswith("URI:DIR2:"))
469 d.addCallback(lambda res: self.do_cli("create-alias", "two"))
471 def _stash_urls(res):
472 aliases = get_aliases(self.get_clientdir())
473 node_url_file = os.path.join(self.get_clientdir(), "node.url")
474 nodeurl = open(node_url_file, "r").read().strip()
475 uribase = nodeurl + "uri/"
476 self.tahoe_url = uribase + urllib.quote(aliases["tahoe"])
477 self.tahoe_subdir_url = self.tahoe_url + "/subdir"
478 self.two_url = uribase + urllib.quote(aliases["two"])
479 self.two_uri = aliases["two"]
480 d.addCallback(_stash_urls)
482 d.addCallback(lambda res: self.do_cli("create-alias", "two")) # dup
483 def _check_create_duplicate((rc,stdout,stderr)):
484 self.failIfEqual(rc, 0)
485 self.failUnless("Alias 'two' already exists!" in stderr)
486 aliases = get_aliases(self.get_clientdir())
487 self.failUnlessEqual(aliases["two"], self.two_uri)
488 d.addCallback(_check_create_duplicate)
490 d.addCallback(lambda res: self.do_cli("add-alias", "added", self.two_uri))
491 def _check_add((rc,stdout,stderr)):
492 self.failUnlessEqual(rc, 0)
493 self.failUnless("Alias 'added' added" in stdout)
494 d.addCallback(_check_add)
496 # check add-alias with a duplicate
497 d.addCallback(lambda res: self.do_cli("add-alias", "two", self.two_uri))
498 def _check_add_duplicate((rc,stdout,stderr)):
499 self.failIfEqual(rc, 0)
500 self.failUnless("Alias 'two' already exists!" in stderr)
501 aliases = get_aliases(self.get_clientdir())
502 self.failUnlessEqual(aliases["two"], self.two_uri)
503 d.addCallback(_check_add_duplicate)
505 def _test_urls(junk):
506 self._test_webopen([], self.tahoe_url)
507 self._test_webopen(["/"], self.tahoe_url)
508 self._test_webopen(["tahoe:"], self.tahoe_url)
509 self._test_webopen(["tahoe:/"], self.tahoe_url)
510 self._test_webopen(["tahoe:subdir"], self.tahoe_subdir_url)
511 self._test_webopen(["tahoe:subdir/"], self.tahoe_subdir_url + '/')
512 self._test_webopen(["tahoe:subdir/file"], self.tahoe_subdir_url + '/file')
513 # if "file" is indeed a file, then the url produced by webopen in
514 # this case is disallowed by the webui. but by design, webopen
515 # passes through the mistake from the user to the resultant
517 self._test_webopen(["tahoe:subdir/file/"], self.tahoe_subdir_url + '/file/')
518 self._test_webopen(["two:"], self.two_url)
519 d.addCallback(_test_urls)
523 class Put(GridTestMixin, CLITestMixin, unittest.TestCase):
525 def test_unlinked_immutable_stdin(self):
526 # tahoe get `echo DATA | tahoe put`
527 # tahoe get `echo DATA | tahoe put -`
528 self.basedir = "cli/Put/unlinked_immutable_stdin"
531 d = self.do_cli("put", stdin=DATA)
533 (rc, stdout, stderr) = res
534 self.failUnless("waiting for file data on stdin.." in stderr)
535 self.failUnless("200 OK" in stderr, stderr)
536 self.readcap = stdout
537 self.failUnless(self.readcap.startswith("URI:CHK:"))
538 d.addCallback(_uploaded)
539 d.addCallback(lambda res: self.do_cli("get", self.readcap))
540 def _downloaded(res):
541 (rc, stdout, stderr) = res
542 self.failUnlessEqual(stderr, "")
543 self.failUnlessEqual(stdout, DATA)
544 d.addCallback(_downloaded)
545 d.addCallback(lambda res: self.do_cli("put", "-", stdin=DATA))
546 d.addCallback(lambda (rc,stdout,stderr):
547 self.failUnlessEqual(stdout, self.readcap))
550 def test_unlinked_immutable_from_file(self):
552 # tahoe put ./file.txt
553 # tahoe put /tmp/file.txt
554 # tahoe put ~/file.txt
555 self.basedir = "cli/Put/unlinked_immutable_from_file"
558 rel_fn = os.path.join(self.basedir, "DATAFILE")
559 abs_fn = os.path.abspath(rel_fn)
560 # we make the file small enough to fit in a LIT file, for speed
561 f = open(rel_fn, "w")
562 f.write("short file")
564 d = self.do_cli("put", rel_fn)
565 def _uploaded((rc,stdout,stderr)):
567 self.failUnless(readcap.startswith("URI:LIT:"))
568 self.readcap = readcap
569 d.addCallback(_uploaded)
570 d.addCallback(lambda res: self.do_cli("put", "./" + rel_fn))
571 d.addCallback(lambda (rc,stdout,stderr):
572 self.failUnlessEqual(stdout, self.readcap))
573 d.addCallback(lambda res: self.do_cli("put", abs_fn))
574 d.addCallback(lambda (rc,stdout,stderr):
575 self.failUnlessEqual(stdout, self.readcap))
576 # we just have to assume that ~ is handled properly
579 def test_immutable_from_file(self):
580 # tahoe put file.txt uploaded.txt
581 # tahoe - uploaded.txt
582 # tahoe put file.txt subdir/uploaded.txt
583 # tahoe put file.txt tahoe:uploaded.txt
584 # tahoe put file.txt tahoe:subdir/uploaded.txt
585 # tahoe put file.txt DIRCAP:./uploaded.txt
586 # tahoe put file.txt DIRCAP:./subdir/uploaded.txt
587 self.basedir = "cli/Put/immutable_from_file"
590 rel_fn = os.path.join(self.basedir, "DATAFILE")
591 abs_fn = os.path.abspath(rel_fn)
592 # we make the file small enough to fit in a LIT file, for speed
594 DATA2 = "short file two"
595 f = open(rel_fn, "w")
599 d = self.do_cli("create-alias", "tahoe")
601 d.addCallback(lambda res:
602 self.do_cli("put", rel_fn, "uploaded.txt"))
603 def _uploaded((rc,stdout,stderr)):
604 readcap = stdout.strip()
605 self.failUnless(readcap.startswith("URI:LIT:"))
606 self.failUnless("201 Created" in stderr, stderr)
607 self.readcap = readcap
608 d.addCallback(_uploaded)
609 d.addCallback(lambda res:
610 self.do_cli("get", "tahoe:uploaded.txt"))
611 d.addCallback(lambda (rc,stdout,stderr):
612 self.failUnlessEqual(stdout, DATA))
614 d.addCallback(lambda res:
615 self.do_cli("put", "-", "uploaded.txt", stdin=DATA2))
616 def _replaced((rc,stdout,stderr)):
617 readcap = stdout.strip()
618 self.failUnless(readcap.startswith("URI:LIT:"))
619 self.failUnless("200 OK" in stderr, stderr)
620 d.addCallback(_replaced)
622 d.addCallback(lambda res:
623 self.do_cli("put", rel_fn, "subdir/uploaded2.txt"))
624 d.addCallback(lambda res: self.do_cli("get", "subdir/uploaded2.txt"))
625 d.addCallback(lambda (rc,stdout,stderr):
626 self.failUnlessEqual(stdout, DATA))
628 d.addCallback(lambda res:
629 self.do_cli("put", rel_fn, "tahoe:uploaded3.txt"))
630 d.addCallback(lambda res: self.do_cli("get", "tahoe:uploaded3.txt"))
631 d.addCallback(lambda (rc,stdout,stderr):
632 self.failUnlessEqual(stdout, DATA))
634 d.addCallback(lambda res:
635 self.do_cli("put", rel_fn, "tahoe:subdir/uploaded4.txt"))
636 d.addCallback(lambda res:
637 self.do_cli("get", "tahoe:subdir/uploaded4.txt"))
638 d.addCallback(lambda (rc,stdout,stderr):
639 self.failUnlessEqual(stdout, DATA))
641 def _get_dircap(res):
642 self.dircap = get_aliases(self.get_clientdir())["tahoe"]
643 d.addCallback(_get_dircap)
645 d.addCallback(lambda res:
646 self.do_cli("put", rel_fn,
647 self.dircap+":./uploaded5.txt"))
648 d.addCallback(lambda res:
649 self.do_cli("get", "tahoe:uploaded5.txt"))
650 d.addCallback(lambda (rc,stdout,stderr):
651 self.failUnlessEqual(stdout, DATA))
653 d.addCallback(lambda res:
654 self.do_cli("put", rel_fn,
655 self.dircap+":./subdir/uploaded6.txt"))
656 d.addCallback(lambda res:
657 self.do_cli("get", "tahoe:subdir/uploaded6.txt"))
658 d.addCallback(lambda (rc,stdout,stderr):
659 self.failUnlessEqual(stdout, DATA))
663 def test_mutable_unlinked(self):
664 # FILECAP = `echo DATA | tahoe put --mutable`
665 # tahoe get FILECAP, compare against DATA
666 # echo DATA2 | tahoe put - FILECAP
667 # tahoe get FILECAP, compare against DATA2
668 # tahoe put file.txt FILECAP
669 self.basedir = "cli/Put/mutable_unlinked"
674 rel_fn = os.path.join(self.basedir, "DATAFILE")
675 abs_fn = os.path.abspath(rel_fn)
676 DATA3 = "three" * 100
677 f = open(rel_fn, "w")
681 d = self.do_cli("put", "--mutable", stdin=DATA)
683 (rc, stdout, stderr) = res
684 self.failUnless("waiting for file data on stdin.." in stderr)
685 self.failUnless("200 OK" in stderr)
686 self.filecap = stdout
687 self.failUnless(self.filecap.startswith("URI:SSK:"))
688 d.addCallback(_created)
689 d.addCallback(lambda res: self.do_cli("get", self.filecap))
690 d.addCallback(lambda (rc,out,err): self.failUnlessEqual(out, DATA))
692 d.addCallback(lambda res: self.do_cli("put", "-", self.filecap, stdin=DATA2))
694 (rc, stdout, stderr) = res
695 self.failUnless("waiting for file data on stdin.." in stderr)
696 self.failUnless("200 OK" in stderr)
697 self.failUnlessEqual(self.filecap, stdout)
698 d.addCallback(_replaced)
699 d.addCallback(lambda res: self.do_cli("get", self.filecap))
700 d.addCallback(lambda (rc,out,err): self.failUnlessEqual(out, DATA2))
702 d.addCallback(lambda res: self.do_cli("put", rel_fn, self.filecap))
704 (rc, stdout, stderr) = res
705 self.failUnless("200 OK" in stderr)
706 self.failUnlessEqual(self.filecap, stdout)
707 d.addCallback(_replaced2)
708 d.addCallback(lambda res: self.do_cli("get", self.filecap))
709 d.addCallback(lambda (rc,out,err): self.failUnlessEqual(out, DATA3))
713 def test_mutable(self):
714 # echo DATA1 | tahoe put --mutable - uploaded.txt
715 # echo DATA2 | tahoe put - uploaded.txt # should modify-in-place
716 # tahoe get uploaded.txt, compare against DATA2
718 self.basedir = "cli/Put/mutable"
722 fn1 = os.path.join(self.basedir, "DATA1")
727 fn2 = os.path.join(self.basedir, "DATA2")
732 d = self.do_cli("create-alias", "tahoe")
733 d.addCallback(lambda res:
734 self.do_cli("put", "--mutable", fn1, "tahoe:uploaded.txt"))
735 d.addCallback(lambda res:
736 self.do_cli("put", fn2, "tahoe:uploaded.txt"))
737 d.addCallback(lambda res:
738 self.do_cli("get", "tahoe:uploaded.txt"))
739 d.addCallback(lambda (rc,out,err): self.failUnlessEqual(out, DATA2))
742 class List(GridTestMixin, CLITestMixin, unittest.TestCase):
744 self.basedir = "cli/List/list"
746 c0 = self.g.clients[0]
747 d = c0.create_empty_dirnode()
748 def _stash_root_and_create_file(n):
750 self.rooturi = n.get_uri()
751 return n.add_file(u"good", upload.Data("small", convergence=""))
752 d.addCallback(_stash_root_and_create_file)
753 d.addCallback(lambda ign:
754 self.rootnode.create_empty_directory(u"1share"))
755 d.addCallback(lambda n:
756 self.delete_shares_numbered(n.get_uri(), range(1,10)))
757 d.addCallback(lambda ign:
758 self.rootnode.create_empty_directory(u"0share"))
759 d.addCallback(lambda n:
760 self.delete_shares_numbered(n.get_uri(), range(0,10)))
761 d.addCallback(lambda ign:
762 self.do_cli("add-alias", "tahoe", self.rooturi))
763 d.addCallback(lambda ign: self.do_cli("ls"))
764 def _check1((rc,out,err)):
765 self.failUnlessEqual(err, "")
766 self.failUnlessEqual(rc, 0)
767 self.failUnlessEqual(out.splitlines(), ["0share", "1share", "good"])
768 d.addCallback(_check1)
769 d.addCallback(lambda ign: self.do_cli("ls", "missing"))
770 def _check2((rc,out,err)):
771 self.failIfEqual(rc, 0)
772 self.failUnlessEqual(err.strip(), "No such file or directory")
773 self.failUnlessEqual(out, "")
774 d.addCallback(_check2)
775 d.addCallback(lambda ign: self.do_cli("ls", "1share"))
776 def _check3((rc,out,err)):
777 self.failIfEqual(rc, 0)
778 self.failUnlessIn("Error during GET: 410 Gone ", err)
779 self.failUnlessIn("UnrecoverableFileError:", err)
780 self.failUnlessIn("could not be retrieved, because there were "
781 "insufficient good shares.", err)
782 self.failUnlessEqual(out, "")
783 d.addCallback(_check3)
784 d.addCallback(lambda ign: self.do_cli("ls", "0share"))
785 d.addCallback(_check3)
788 class Cp(GridTestMixin, CLITestMixin, unittest.TestCase):
790 def test_not_enough_args(self):
792 self.failUnlessRaises(usage.UsageError,
793 o.parseOptions, ["onearg"])
795 def test_unicode_filename(self):
796 self.basedir = "cli/Cp/unicode_filename"
799 fn1 = os.path.join(self.basedir, "Ärtonwall")
800 DATA1 = "unicode file content"
801 open(fn1, "wb").write(DATA1)
803 fn2 = os.path.join(self.basedir, "Metallica")
804 DATA2 = "non-unicode file content"
805 open(fn2, "wb").write(DATA2)
808 # Assure that uploading a file whose name contains unicode character doesn't
809 # prevent further uploads in the same directory
810 d = self.do_cli("create-alias", "tahoe")
811 d.addCallback(lambda res: self.do_cli("cp", fn1, "tahoe:"))
812 d.addCallback(lambda res: self.do_cli("cp", fn2, "tahoe:"))
814 d.addCallback(lambda res: self.do_cli("get", "tahoe:Ärtonwall"))
815 d.addCallback(lambda (rc,out,err): self.failUnlessEqual(out, DATA1))
817 d.addCallback(lambda res: self.do_cli("get", "tahoe:Metallica"))
818 d.addCallback(lambda (rc,out,err): self.failUnlessEqual(out, DATA2))
821 test_unicode_filename.todo = "This behavior is not yet supported, although it does happen to work (for reasons that are ill-understood) on many platforms. See issue ticket #534."
823 def test_dangling_symlink_vs_recursion(self):
824 if not hasattr(os, 'symlink'):
825 raise unittest.SkipTest("There is no symlink on this platform.")
826 # cp -r on a directory containing a dangling symlink shouldn't assert
827 self.basedir = "cli/Cp/dangling_symlink_vs_recursion"
829 dn = os.path.join(self.basedir, "dir")
831 fn = os.path.join(dn, "Fakebandica")
832 ln = os.path.join(dn, "link")
835 d = self.do_cli("create-alias", "tahoe")
836 d.addCallback(lambda res: self.do_cli("cp", "--recursive",
840 class Backup(GridTestMixin, CLITestMixin, StallMixin, unittest.TestCase):
842 def writeto(self, path, data):
843 d = os.path.dirname(os.path.join(self.basedir, "home", path))
844 fileutil.make_dirs(d)
845 f = open(os.path.join(self.basedir, "home", path), "w")
849 def count_output(self, out):
850 mo = re.search(r"(\d)+ files uploaded \((\d+) reused\), (\d+) directories created \((\d+) reused\)", out)
851 return [int(s) for s in mo.groups()]
853 def count_output2(self, out):
854 mo = re.search(r"(\d)+ files checked, (\d+) directories checked, (\d+) directories read", out)
855 return [int(s) for s in mo.groups()]
857 def test_backup(self):
858 self.basedir = "cli/Backup/backup"
861 # is the backupdb available? If so, we test that a second backup does
862 # not create new directories.
864 have_bdb = backupdb.get_backupdb(os.path.join(self.basedir, "dbtest"),
867 # create a small local directory with a couple of files
868 source = os.path.join(self.basedir, "home")
869 fileutil.make_dirs(os.path.join(source, "empty"))
870 self.writeto("parent/subdir/foo.txt", "foo")
871 self.writeto("parent/subdir/bar.txt", "bar\n" * 1000)
872 self.writeto("parent/blah.txt", "blah")
874 def do_backup(use_backupdb=True, verbose=False):
876 if not have_bdb or not use_backupdb:
877 cmd.append("--no-backupdb")
879 cmd.append("--verbose")
881 cmd.append("tahoe:backups")
882 return self.do_cli(*cmd)
884 d = self.do_cli("create-alias", "tahoe")
887 d.addCallback(lambda res: self.do_cli("backup", source, "tahoe:backups"))
888 def _should_complain((rc, out, err)):
889 self.failUnless("I was unable to import a python sqlite library" in err, err)
890 d.addCallback(_should_complain)
891 d.addCallback(self.stall, 1.1) # make sure the backups get distinct timestamps
893 d.addCallback(lambda res: do_backup())
894 def _check0((rc, out, err)):
895 self.failUnlessEqual(err, "")
896 self.failUnlessEqual(rc, 0)
897 fu, fr, dc, dr = self.count_output(out)
898 # foo.txt, bar.txt, blah.txt
899 self.failUnlessEqual(fu, 3)
900 self.failUnlessEqual(fr, 0)
901 # empty, home, home/parent, home/parent/subdir
902 self.failUnlessEqual(dc, 4)
903 self.failUnlessEqual(dr, 0)
904 d.addCallback(_check0)
906 d.addCallback(lambda res: self.do_cli("ls", "tahoe:backups"))
907 def _check1((rc, out, err)):
908 self.failUnlessEqual(err, "")
909 self.failUnlessEqual(rc, 0)
910 self.failUnlessEqual(sorted(out.split()), ["Archives", "Latest"])
911 d.addCallback(_check1)
912 d.addCallback(lambda res: self.do_cli("ls", "tahoe:backups/Latest"))
913 def _check2((rc, out, err)):
914 self.failUnlessEqual(err, "")
915 self.failUnlessEqual(rc, 0)
916 self.failUnlessEqual(sorted(out.split()), ["empty", "parent"])
917 d.addCallback(_check2)
918 d.addCallback(lambda res: self.do_cli("ls", "tahoe:backups/Latest/empty"))
919 def _check2a((rc, out, err)):
920 self.failUnlessEqual(err, "")
921 self.failUnlessEqual(rc, 0)
922 self.failUnlessEqual(out.strip(), "")
923 d.addCallback(_check2a)
924 d.addCallback(lambda res: self.do_cli("get", "tahoe:backups/Latest/parent/subdir/foo.txt"))
925 def _check3((rc, out, err)):
926 self.failUnlessEqual(err, "")
927 self.failUnlessEqual(rc, 0)
928 self.failUnlessEqual(out, "foo")
929 d.addCallback(_check3)
930 d.addCallback(lambda res: self.do_cli("ls", "tahoe:backups/Archives"))
931 def _check4((rc, out, err)):
932 self.failUnlessEqual(err, "")
933 self.failUnlessEqual(rc, 0)
934 self.old_archives = out.split()
935 self.failUnlessEqual(len(self.old_archives), 1)
936 d.addCallback(_check4)
939 d.addCallback(self.stall, 1.1)
940 d.addCallback(lambda res: do_backup())
941 def _check4a((rc, out, err)):
942 # second backup should reuse everything, if the backupdb is
944 self.failUnlessEqual(err, "")
945 self.failUnlessEqual(rc, 0)
947 fu, fr, dc, dr = self.count_output(out)
948 # foo.txt, bar.txt, blah.txt
949 self.failUnlessEqual(fu, 0)
950 self.failUnlessEqual(fr, 3)
951 # empty, home, home/parent, home/parent/subdir
952 self.failUnlessEqual(dc, 0)
953 self.failUnlessEqual(dr, 4)
954 d.addCallback(_check4a)
957 # sneak into the backupdb, crank back the "last checked"
958 # timestamp to force a check on all files
959 def _reset_last_checked(res):
960 dbfile = os.path.join(self.get_clientdir(),
961 "private", "backupdb.sqlite")
962 self.failUnless(os.path.exists(dbfile), dbfile)
963 bdb = backupdb.get_backupdb(dbfile)
964 bdb.cursor.execute("UPDATE last_upload SET last_checked=0")
965 bdb.connection.commit()
967 d.addCallback(_reset_last_checked)
969 d.addCallback(self.stall, 1.1)
970 d.addCallback(lambda res: do_backup(verbose=True))
971 def _check4b((rc, out, err)):
972 # we should check all files, and re-use all of them. None of
973 # the directories should have been changed.
974 self.failUnlessEqual(err, "")
975 self.failUnlessEqual(rc, 0)
976 fu, fr, dc, dr = self.count_output(out)
977 fchecked, dchecked, dread = self.count_output2(out)
978 self.failUnlessEqual(fchecked, 3)
979 self.failUnlessEqual(fu, 0)
980 self.failUnlessEqual(fr, 3)
981 # TODO: backupdb doesn't do dirs yet; when it does, this will
982 # change to dchecked=4, and maybe dread=0
983 self.failUnlessEqual(dchecked, 0)
984 self.failUnlessEqual(dread, 4)
985 self.failUnlessEqual(dc, 0)
986 self.failUnlessEqual(dr, 4)
987 d.addCallback(_check4b)
989 d.addCallback(lambda res: self.do_cli("ls", "tahoe:backups/Archives"))
990 def _check5((rc, out, err)):
991 self.failUnlessEqual(err, "")
992 self.failUnlessEqual(rc, 0)
993 self.new_archives = out.split()
997 self.failUnlessEqual(len(self.new_archives), expected_new, out)
998 # the original backup should still be the oldest (i.e. sorts
999 # alphabetically towards the beginning)
1000 self.failUnlessEqual(sorted(self.new_archives)[0],
1001 self.old_archives[0])
1002 d.addCallback(_check5)
1004 d.addCallback(self.stall, 1.1)
1006 self.writeto("parent/subdir/foo.txt", "FOOF!")
1007 # and turn a file into a directory
1008 os.unlink(os.path.join(source, "parent/blah.txt"))
1009 os.mkdir(os.path.join(source, "parent/blah.txt"))
1010 self.writeto("parent/blah.txt/surprise file", "surprise")
1011 self.writeto("parent/blah.txt/surprisedir/subfile", "surprise")
1012 # turn a directory into a file
1013 os.rmdir(os.path.join(source, "empty"))
1014 self.writeto("empty", "imagine nothing being here")
1016 d.addCallback(_modify)
1017 def _check5a((rc, out, err)):
1018 # second backup should reuse bar.txt (if backupdb is available),
1019 # and upload the rest. None of the directories can be reused.
1020 self.failUnlessEqual(err, "")
1021 self.failUnlessEqual(rc, 0)
1023 fu, fr, dc, dr = self.count_output(out)
1024 # new foo.txt, surprise file, subfile, empty
1025 self.failUnlessEqual(fu, 4)
1027 self.failUnlessEqual(fr, 1)
1028 # home, parent, subdir, blah.txt, surprisedir
1029 self.failUnlessEqual(dc, 5)
1030 self.failUnlessEqual(dr, 0)
1031 d.addCallback(_check5a)
1032 d.addCallback(lambda res: self.do_cli("ls", "tahoe:backups/Archives"))
1033 def _check6((rc, out, err)):
1034 self.failUnlessEqual(err, "")
1035 self.failUnlessEqual(rc, 0)
1036 self.new_archives = out.split()
1040 self.failUnlessEqual(len(self.new_archives), expected_new)
1041 self.failUnlessEqual(sorted(self.new_archives)[0],
1042 self.old_archives[0])
1043 d.addCallback(_check6)
1044 d.addCallback(lambda res: self.do_cli("get", "tahoe:backups/Latest/parent/subdir/foo.txt"))
1045 def _check7((rc, out, err)):
1046 self.failUnlessEqual(err, "")
1047 self.failUnlessEqual(rc, 0)
1048 self.failUnlessEqual(out, "FOOF!")
1049 # the old snapshot should not be modified
1050 return self.do_cli("get", "tahoe:backups/Archives/%s/parent/subdir/foo.txt" % self.old_archives[0])
1051 d.addCallback(_check7)
1052 def _check8((rc, out, err)):
1053 self.failUnlessEqual(err, "")
1054 self.failUnlessEqual(rc, 0)
1055 self.failUnlessEqual(out, "foo")
1056 d.addCallback(_check8)
1058 d.addCallback(self.stall, 1.1)
1059 d.addCallback(lambda res: do_backup(use_backupdb=False))
1060 def _check9((rc, out, err)):
1061 # --no-backupdb means re-upload everything. We still get to
1062 # re-use the directories, since nothing changed.
1063 self.failUnlessEqual(err, "")
1064 self.failUnlessEqual(rc, 0)
1065 fu, fr, dc, dr = self.count_output(out)
1066 self.failUnlessEqual(fu, 5)
1067 self.failUnlessEqual(fr, 0)
1068 self.failUnlessEqual(dc, 0)
1069 self.failUnlessEqual(dr, 5)
1070 d.addCallback(_check9)
1074 # on our old dapper buildslave, this test takes a long time (usually
1075 # 130s), so we have to bump up the default 120s timeout. The create-alias
1076 # and initial backup alone take 60s, probably because of the handful of
1077 # dirnodes being created (RSA key generation). The backup between check4
1078 # and check4a takes 6s, as does the backup before check4b.
1079 test_backup.timeout = 300
1081 def test_exclude_options(self):
1082 root_listdir = ('lib.a', '_darcs', 'subdir', 'nice_doc.lyx')
1083 subdir_listdir = ('another_doc.lyx', 'run_snake_run.py', 'CVS', '.svn', '_darcs')
1084 basedir = "cli/Backup/exclude_options"
1085 fileutil.make_dirs(basedir)
1086 nodeurl_path = os.path.join(basedir, 'node.url')
1087 nodeurl = file(nodeurl_path, 'w')
1088 nodeurl.write('http://example.net:2357/')
1091 def _check_filtering(filtered, all, included, excluded):
1092 filtered = set(filtered)
1094 included = set(included)
1095 excluded = set(excluded)
1096 self.failUnlessEqual(filtered, included)
1097 self.failUnlessEqual(all.difference(filtered), excluded)
1099 # test simple exclude
1100 backup_options = cli.BackupOptions()
1101 backup_options.parseOptions(['--exclude', '*lyx', '--node-directory',
1102 basedir, 'from', 'to'])
1103 filtered = list(backup_options.filter_listdir(root_listdir))
1104 _check_filtering(filtered, root_listdir, ('lib.a', '_darcs', 'subdir'),
1107 backup_options = cli.BackupOptions()
1108 backup_options.parseOptions(['--exclude', '*lyx', '--exclude', 'lib.?', '--node-directory',
1109 basedir, 'from', 'to'])
1110 filtered = list(backup_options.filter_listdir(root_listdir))
1111 _check_filtering(filtered, root_listdir, ('_darcs', 'subdir'),
1112 ('nice_doc.lyx', 'lib.a'))
1113 # vcs metadata exclusion
1114 backup_options = cli.BackupOptions()
1115 backup_options.parseOptions(['--exclude-vcs', '--node-directory',
1116 basedir, 'from', 'to'])
1117 filtered = list(backup_options.filter_listdir(subdir_listdir))
1118 _check_filtering(filtered, subdir_listdir, ('another_doc.lyx', 'run_snake_run.py',),
1119 ('CVS', '.svn', '_darcs'))
1120 # read exclude patterns from file
1121 exclusion_string = "_darcs\n*py\n.svn"
1122 excl_filepath = os.path.join(basedir, 'exclusion')
1123 excl_file = file(excl_filepath, 'w')
1124 excl_file.write(exclusion_string)
1126 backup_options = cli.BackupOptions()
1127 backup_options.parseOptions(['--exclude-from', excl_filepath, '--node-directory',
1128 basedir, 'from', 'to'])
1129 filtered = list(backup_options.filter_listdir(subdir_listdir))
1130 _check_filtering(filtered, subdir_listdir, ('another_doc.lyx', 'CVS'),
1131 ('.svn', '_darcs', 'run_snake_run.py'))
1132 # text BackupConfigurationError
1133 self.failUnlessRaises(cli.BackupConfigurationError,
1134 backup_options.parseOptions,
1135 ['--exclude-from', excl_filepath + '.no', '--node-directory',
1136 basedir, 'from', 'to'])
1138 # test that an iterator works too
1139 backup_options = cli.BackupOptions()
1140 backup_options.parseOptions(['--exclude', '*lyx', '--node-directory',
1141 basedir, 'from', 'to'])
1142 filtered = list(backup_options.filter_listdir(iter(root_listdir)))
1143 _check_filtering(filtered, root_listdir, ('lib.a', '_darcs', 'subdir'),
1146 class Check(GridTestMixin, CLITestMixin, unittest.TestCase):
1148 def test_check(self):
1149 self.basedir = "cli/Check/check"
1151 c0 = self.g.clients[0]
1153 d = c0.create_mutable_file(DATA)
1155 self.uri = n.get_uri()
1156 d.addCallback(_stash_uri)
1158 d.addCallback(lambda ign: self.do_cli("check", self.uri))
1159 def _check1((rc, out, err)):
1160 self.failUnlessEqual(err, "")
1161 self.failUnlessEqual(rc, 0)
1162 lines = out.splitlines()
1163 self.failUnless("Summary: Healthy" in lines, out)
1164 self.failUnless(" good-shares: 10 (encoding is 3-of-10)" in lines, out)
1165 d.addCallback(_check1)
1167 d.addCallback(lambda ign: self.do_cli("check", "--raw", self.uri))
1168 def _check2((rc, out, err)):
1169 self.failUnlessEqual(err, "")
1170 self.failUnlessEqual(rc, 0)
1171 data = simplejson.loads(out)
1172 self.failUnlessEqual(data["summary"], "Healthy")
1173 d.addCallback(_check2)
1175 def _clobber_shares(ignored):
1176 # delete one, corrupt a second
1177 shares = self.find_shares(self.uri)
1178 self.failUnlessEqual(len(shares), 10)
1179 os.unlink(shares[0][2])
1180 cso = debug.CorruptShareOptions()
1181 cso.stdout = StringIO()
1182 cso.parseOptions([shares[1][2]])
1183 storage_index = uri.from_string(self.uri).get_storage_index()
1184 self._corrupt_share_line = " server %s, SI %s, shnum %d" % \
1185 (base32.b2a(shares[1][1]),
1186 base32.b2a(storage_index),
1188 debug.corrupt_share(cso)
1189 d.addCallback(_clobber_shares)
1191 d.addCallback(lambda ign: self.do_cli("check", "--verify", self.uri))
1192 def _check3((rc, out, err)):
1193 self.failUnlessEqual(err, "")
1194 self.failUnlessEqual(rc, 0)
1195 lines = out.splitlines()
1196 summary = [l for l in lines if l.startswith("Summary")][0]
1197 self.failUnless("Summary: Unhealthy: 8 shares (enc 3-of-10)"
1198 in summary, summary)
1199 self.failUnless(" good-shares: 8 (encoding is 3-of-10)" in lines, out)
1200 self.failUnless(" corrupt shares:" in lines, out)
1201 self.failUnless(self._corrupt_share_line in lines, out)
1202 d.addCallback(_check3)
1204 d.addCallback(lambda ign:
1205 self.do_cli("check", "--verify", "--repair", self.uri))
1206 def _check4((rc, out, err)):
1207 self.failUnlessEqual(err, "")
1208 self.failUnlessEqual(rc, 0)
1209 lines = out.splitlines()
1210 self.failUnless("Summary: not healthy" in lines, out)
1211 self.failUnless(" good-shares: 8 (encoding is 3-of-10)" in lines, out)
1212 self.failUnless(" corrupt shares:" in lines, out)
1213 self.failUnless(self._corrupt_share_line in lines, out)
1214 self.failUnless(" repair successful" in lines, out)
1215 d.addCallback(_check4)
1217 d.addCallback(lambda ign:
1218 self.do_cli("check", "--verify", "--repair", self.uri))
1219 def _check5((rc, out, err)):
1220 self.failUnlessEqual(err, "")
1221 self.failUnlessEqual(rc, 0)
1222 lines = out.splitlines()
1223 self.failUnless("Summary: healthy" in lines, out)
1224 self.failUnless(" good-shares: 10 (encoding is 3-of-10)" in lines, out)
1225 self.failIf(" corrupt shares:" in lines, out)
1226 d.addCallback(_check5)
1230 def test_deep_check(self):
1231 self.basedir = "cli/Check/deep_check"
1233 c0 = self.g.clients[0]
1237 d = c0.create_empty_dirnode()
1238 def _stash_root_and_create_file(n):
1240 self.rooturi = n.get_uri()
1241 return n.add_file(u"good", upload.Data(DATA, convergence=""))
1242 d.addCallback(_stash_root_and_create_file)
1243 def _stash_uri(fn, which):
1244 self.uris[which] = fn.get_uri()
1246 d.addCallback(_stash_uri, "good")
1247 d.addCallback(lambda ign:
1248 self.rootnode.add_file(u"small",
1249 upload.Data("literal",
1251 d.addCallback(_stash_uri, "small")
1252 d.addCallback(lambda ign: c0.create_mutable_file(DATA+"1"))
1253 d.addCallback(lambda fn: self.rootnode.set_node(u"mutable", fn))
1254 d.addCallback(_stash_uri, "mutable")
1256 d.addCallback(lambda ign: self.do_cli("deep-check", self.rooturi))
1257 def _check1((rc, out, err)):
1258 self.failUnlessEqual(err, "")
1259 self.failUnlessEqual(rc, 0)
1260 lines = out.splitlines()
1261 self.failUnless("done: 4 objects checked, 4 healthy, 0 unhealthy"
1263 d.addCallback(_check1)
1270 d.addCallback(lambda ign: self.do_cli("deep-check", "--verbose",
1272 def _check2((rc, out, err)):
1273 self.failUnlessEqual(err, "")
1274 self.failUnlessEqual(rc, 0)
1275 lines = out.splitlines()
1276 self.failUnless("<root>: Healthy" in lines, out)
1277 self.failUnless("small: Healthy (LIT)" in lines, out)
1278 self.failUnless("good: Healthy" in lines, out)
1279 self.failUnless("mutable: Healthy" in lines, out)
1280 self.failUnless("done: 4 objects checked, 4 healthy, 0 unhealthy"
1282 d.addCallback(_check2)
1284 def _clobber_shares(ignored):
1285 shares = self.find_shares(self.uris["good"])
1286 self.failUnlessEqual(len(shares), 10)
1287 os.unlink(shares[0][2])
1289 shares = self.find_shares(self.uris["mutable"])
1290 cso = debug.CorruptShareOptions()
1291 cso.stdout = StringIO()
1292 cso.parseOptions([shares[1][2]])
1293 storage_index = uri.from_string(self.uris["mutable"]).get_storage_index()
1294 self._corrupt_share_line = " corrupt: server %s, SI %s, shnum %d" % \
1295 (base32.b2a(shares[1][1]),
1296 base32.b2a(storage_index),
1298 debug.corrupt_share(cso)
1299 d.addCallback(_clobber_shares)
1302 # root/good [9 shares]
1304 # root/mutable [1 corrupt share]
1306 d.addCallback(lambda ign:
1307 self.do_cli("deep-check", "--verbose", self.rooturi))
1308 def _check3((rc, out, err)):
1309 self.failUnlessEqual(err, "")
1310 self.failUnlessEqual(rc, 0)
1311 lines = out.splitlines()
1312 self.failUnless("<root>: Healthy" in lines, out)
1313 self.failUnless("small: Healthy (LIT)" in lines, out)
1314 self.failUnless("mutable: Healthy" in lines, out) # needs verifier
1315 self.failUnless("good: Not Healthy: 9 shares (enc 3-of-10)"
1317 self.failIf(self._corrupt_share_line in lines, out)
1318 self.failUnless("done: 4 objects checked, 3 healthy, 1 unhealthy"
1320 d.addCallback(_check3)
1322 d.addCallback(lambda ign:
1323 self.do_cli("deep-check", "--verbose", "--verify",
1325 def _check4((rc, out, err)):
1326 self.failUnlessEqual(err, "")
1327 self.failUnlessEqual(rc, 0)
1328 lines = out.splitlines()
1329 self.failUnless("<root>: Healthy" in lines, out)
1330 self.failUnless("small: Healthy (LIT)" in lines, out)
1331 mutable = [l for l in lines if l.startswith("mutable")][0]
1332 self.failUnless(mutable.startswith("mutable: Unhealthy: 9 shares (enc 3-of-10)"),
1334 self.failUnless(self._corrupt_share_line in lines, out)
1335 self.failUnless("good: Not Healthy: 9 shares (enc 3-of-10)"
1337 self.failUnless("done: 4 objects checked, 2 healthy, 2 unhealthy"
1339 d.addCallback(_check4)
1341 d.addCallback(lambda ign:
1342 self.do_cli("deep-check", "--raw",
1344 def _check5((rc, out, err)):
1345 self.failUnlessEqual(err, "")
1346 self.failUnlessEqual(rc, 0)
1347 lines = out.splitlines()
1348 units = [simplejson.loads(line) for line in lines]
1349 # root, small, good, mutable, stats
1350 self.failUnlessEqual(len(units), 4+1)
1351 d.addCallback(_check5)
1353 d.addCallback(lambda ign:
1354 self.do_cli("deep-check",
1355 "--verbose", "--verify", "--repair",
1357 def _check6((rc, out, err)):
1358 self.failUnlessEqual(err, "")
1359 self.failUnlessEqual(rc, 0)
1360 lines = out.splitlines()
1361 self.failUnless("<root>: healthy" in lines, out)
1362 self.failUnless("small: healthy" in lines, out)
1363 self.failUnless("mutable: not healthy" in lines, out)
1364 self.failUnless(self._corrupt_share_line in lines, out)
1365 self.failUnless("good: not healthy" in lines, out)
1366 self.failUnless("done: 4 objects checked" in lines, out)
1367 self.failUnless(" pre-repair: 2 healthy, 2 unhealthy" in lines, out)
1368 self.failUnless(" 2 repairs attempted, 2 successful, 0 failed"
1370 self.failUnless(" post-repair: 4 healthy, 0 unhealthy" in lines,out)
1371 d.addCallback(_check6)
1373 # now add a subdir, and a file below that, then make the subdir
1376 d.addCallback(lambda ign:
1377 self.rootnode.create_empty_directory(u"subdir"))
1378 d.addCallback(_stash_uri, "subdir")
1379 d.addCallback(lambda fn:
1380 fn.add_file(u"subfile", upload.Data(DATA+"2", "")))
1381 d.addCallback(lambda ign:
1382 self.delete_shares_numbered(self.uris["subdir"],
1389 # root/subdir [unrecoverable: 0 shares]
1392 d.addCallback(lambda ign: self.do_cli("manifest", self.rooturi))
1393 def _manifest_failed((rc, out, err)):
1394 self.failIfEqual(rc, 0)
1395 self.failUnlessIn("ERROR: UnrecoverableFileError", err)
1396 # the fatal directory should still show up, as the last line
1397 self.failUnlessIn(" subdir\n", out)
1398 d.addCallback(_manifest_failed)
1400 d.addCallback(lambda ign: self.do_cli("deep-check", self.rooturi))
1401 def _deep_check_failed((rc, out, err)):
1402 self.failIfEqual(rc, 0)
1403 self.failUnlessIn("ERROR: UnrecoverableFileError", err)
1404 # we want to make sure that the error indication is the last
1405 # thing that gets emitted
1406 self.failIf("done:" in out, out)
1407 d.addCallback(_deep_check_failed)
1409 # this test is disabled until the deep-repair response to an
1410 # unrepairable directory is fixed. The failure-to-repair should not
1411 # throw an exception, but the failure-to-traverse that follows
1412 # should throw UnrecoverableFileError.
1414 #d.addCallback(lambda ign:
1415 # self.do_cli("deep-check", "--repair", self.rooturi))
1416 #def _deep_check_repair_failed((rc, out, err)):
1417 # self.failIfEqual(rc, 0)
1419 # self.failUnlessIn("ERROR: UnrecoverableFileError", err)
1420 # self.failIf("done:" in out, out)
1421 #d.addCallback(_deep_check_repair_failed)
1425 class Errors(GridTestMixin, CLITestMixin, unittest.TestCase):
1426 def test_check(self):
1427 self.basedir = "cli/Check/check"
1429 c0 = self.g.clients[0]
1432 d = c0.upload(upload.Data(DATA, convergence=""))
1434 self.uri_1share = ur.uri
1435 self.delete_shares_numbered(ur.uri, range(1,10))
1436 d.addCallback(_stash_bad)
1438 d.addCallback(lambda ign: self.do_cli("get", self.uri_1share))
1439 def _check1((rc, out, err)):
1440 self.failIfEqual(rc, 0)
1441 self.failUnless("410 Gone" in err, err)
1442 self.failUnless("NotEnoughSharesError: 1 share found, but we need 3" in err,
1444 d.addCallback(_check1)