4 from twisted.trial import unittest
5 from cStringIO import StringIO
9 from allmydata.util import fileutil, hashutil
10 from allmydata import uri
12 # Test that the scripts can be imported -- although the actual tests of their functionality are
13 # done by invoking them in a subprocess.
14 from allmydata.scripts import tahoe_ls, tahoe_get, tahoe_put, tahoe_rm, tahoe_cp
15 _hush_pyflakes = [tahoe_ls, tahoe_get, tahoe_put, tahoe_rm, tahoe_cp]
17 from allmydata.scripts.common import DEFAULT_ALIAS, get_aliases
19 from allmydata.scripts import cli, debug, runner, backupdb
20 from allmydata.test.common import SystemTestMixin
21 from allmydata.test.common_util import StallMixin
22 from twisted.internet import threads # CLI tests use deferToThread
23 from twisted.python import usage
25 class CLI(unittest.TestCase):
26 # this test case only looks at argument-processing and simple stuff.
27 def test_options(self):
28 fileutil.rm_dir("cli/test_options")
29 fileutil.make_dirs("cli/test_options")
30 fileutil.make_dirs("cli/test_options/private")
31 open("cli/test_options/node.url","w").write("http://localhost:8080/\n")
32 filenode_uri = uri.WriteableSSKFileURI(writekey="\x00"*16,
33 fingerprint="\x00"*32)
34 private_uri = uri.NewDirectoryURI(filenode_uri).to_string()
35 open("cli/test_options/private/root_dir.cap", "w").write(private_uri + "\n")
37 o.parseOptions(["--node-directory", "cli/test_options"])
38 self.failUnlessEqual(o['node-url'], "http://localhost:8080/")
39 self.failUnlessEqual(o.aliases[DEFAULT_ALIAS], private_uri)
40 self.failUnlessEqual(o.where, "")
43 o.parseOptions(["--node-directory", "cli/test_options",
44 "--node-url", "http://example.org:8111/"])
45 self.failUnlessEqual(o['node-url'], "http://example.org:8111/")
46 self.failUnlessEqual(o.aliases[DEFAULT_ALIAS], private_uri)
47 self.failUnlessEqual(o.where, "")
50 o.parseOptions(["--node-directory", "cli/test_options",
52 self.failUnlessEqual(o['node-url'], "http://localhost:8080/")
53 self.failUnlessEqual(o.aliases[DEFAULT_ALIAS], "root")
54 self.failUnlessEqual(o.where, "")
57 other_filenode_uri = uri.WriteableSSKFileURI(writekey="\x11"*16,
58 fingerprint="\x11"*32)
59 other_uri = uri.NewDirectoryURI(other_filenode_uri).to_string()
60 o.parseOptions(["--node-directory", "cli/test_options",
61 "--dir-cap", other_uri])
62 self.failUnlessEqual(o['node-url'], "http://localhost:8080/")
63 self.failUnlessEqual(o.aliases[DEFAULT_ALIAS], other_uri)
64 self.failUnlessEqual(o.where, "")
67 o.parseOptions(["--node-directory", "cli/test_options",
68 "--dir-cap", other_uri, "subdir"])
69 self.failUnlessEqual(o['node-url'], "http://localhost:8080/")
70 self.failUnlessEqual(o.aliases[DEFAULT_ALIAS], other_uri)
71 self.failUnlessEqual(o.where, "subdir")
74 self.failUnlessRaises(usage.UsageError,
76 ["--node-directory", "cli/test_options",
77 "--node-url", "NOT-A-URL"])
80 o.parseOptions(["--node-directory", "cli/test_options",
81 "--node-url", "http://localhost:8080"])
82 self.failUnlessEqual(o["node-url"], "http://localhost:8080/")
84 def _dump_cap(self, *args):
85 config = debug.DumpCapOptions()
86 config.stdout,config.stderr = StringIO(), StringIO()
87 config.parseOptions(args)
88 debug.dump_cap(config)
89 self.failIf(config.stderr.getvalue())
90 output = config.stdout.getvalue()
93 def test_dump_cap_chk(self):
94 key = "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f"
95 storage_index = hashutil.storage_index_hash(key)
96 uri_extension_hash = hashutil.uri_extension_hash("stuff")
100 u = uri.CHKFileURI(key=key,
101 uri_extension_hash=uri_extension_hash,
102 needed_shares=needed_shares,
103 total_shares=total_shares,
105 output = self._dump_cap(u.to_string())
106 self.failUnless("CHK File:" in output, output)
107 self.failUnless("key: aaaqeayeaudaocajbifqydiob4" in output, output)
108 self.failUnless("UEB hash: nf3nimquen7aeqm36ekgxomalstenpkvsdmf6fplj7swdatbv5oa" in output, output)
109 self.failUnless("size: 1234" in output, output)
110 self.failUnless("k/N: 25/100" in output, output)
111 self.failUnless("storage index: hdis5iaveku6lnlaiccydyid7q" in output, output)
113 output = self._dump_cap("--client-secret", "5s33nk3qpvnj2fw3z4mnm2y6fa",
115 self.failUnless("client renewal secret: znxmki5zdibb5qlt46xbdvk2t55j7hibejq3i5ijyurkr6m6jkhq" in output, output)
117 output = self._dump_cap(u.get_verify_cap().to_string())
118 self.failIf("key: " in output, output)
119 self.failUnless("UEB hash: nf3nimquen7aeqm36ekgxomalstenpkvsdmf6fplj7swdatbv5oa" in output, output)
120 self.failUnless("size: 1234" in output, output)
121 self.failUnless("k/N: 25/100" in output, output)
122 self.failUnless("storage index: hdis5iaveku6lnlaiccydyid7q" in output, output)
124 prefixed_u = "http://127.0.0.1/uri/%s" % urllib.quote(u.to_string())
125 output = self._dump_cap(prefixed_u)
126 self.failUnless("CHK File:" in output, output)
127 self.failUnless("key: aaaqeayeaudaocajbifqydiob4" in output, output)
128 self.failUnless("UEB hash: nf3nimquen7aeqm36ekgxomalstenpkvsdmf6fplj7swdatbv5oa" in output, output)
129 self.failUnless("size: 1234" in output, output)
130 self.failUnless("k/N: 25/100" in output, output)
131 self.failUnless("storage index: hdis5iaveku6lnlaiccydyid7q" in output, output)
133 def test_dump_cap_lit(self):
134 u = uri.LiteralFileURI("this is some data")
135 output = self._dump_cap(u.to_string())
136 self.failUnless("Literal File URI:" in output, output)
137 self.failUnless("data: this is some data" in output, output)
139 def test_dump_cap_ssk(self):
140 writekey = "\x01" * 16
141 fingerprint = "\xfe" * 32
142 u = uri.WriteableSSKFileURI(writekey, fingerprint)
144 output = self._dump_cap(u.to_string())
145 self.failUnless("SSK Writeable URI:" in output, output)
146 self.failUnless("writekey: aeaqcaibaeaqcaibaeaqcaibae" in output, output)
147 self.failUnless("readkey: nvgh5vj2ekzzkim5fgtb4gey5y" in output, output)
148 self.failUnless("storage index: nt4fwemuw7flestsezvo2eveke" in output, output)
149 self.failUnless("fingerprint: 737p57x6737p57x6737p57x6737p57x6737p57x6737p57x6737a" in output, output)
151 output = self._dump_cap("--client-secret", "5s33nk3qpvnj2fw3z4mnm2y6fa",
153 self.failUnless("file renewal secret: arpszxzc2t6kb4okkg7sp765xgkni5z7caavj7lta73vmtymjlxq" in output, output)
155 fileutil.make_dirs("cli/test_dump_cap/private")
156 f = open("cli/test_dump_cap/private/secret", "w")
157 f.write("5s33nk3qpvnj2fw3z4mnm2y6fa\n")
159 output = self._dump_cap("--client-dir", "cli/test_dump_cap",
161 self.failUnless("file renewal secret: arpszxzc2t6kb4okkg7sp765xgkni5z7caavj7lta73vmtymjlxq" in output, output)
163 output = self._dump_cap("--client-dir", "cli/test_dump_cap_BOGUS",
165 self.failIf("file renewal secret:" in output, output)
167 output = self._dump_cap("--nodeid", "tqc35esocrvejvg4mablt6aowg6tl43j",
169 self.failUnless("write_enabler: mgcavriox2wlb5eer26unwy5cw56elh3sjweffckkmivvsxtaknq" in output, output)
170 self.failIf("file renewal secret:" in output, output)
172 output = self._dump_cap("--nodeid", "tqc35esocrvejvg4mablt6aowg6tl43j",
173 "--client-secret", "5s33nk3qpvnj2fw3z4mnm2y6fa",
175 self.failUnless("write_enabler: mgcavriox2wlb5eer26unwy5cw56elh3sjweffckkmivvsxtaknq" in output, output)
176 self.failUnless("file renewal secret: arpszxzc2t6kb4okkg7sp765xgkni5z7caavj7lta73vmtymjlxq" in output, output)
177 self.failUnless("lease renewal secret: 7pjtaumrb7znzkkbvekkmuwpqfjyfyamznfz4bwwvmh4nw33lorq" in output, output)
180 output = self._dump_cap(u.to_string())
181 self.failUnless("SSK Read-only URI:" in output, output)
182 self.failUnless("readkey: nvgh5vj2ekzzkim5fgtb4gey5y" in output, output)
183 self.failUnless("storage index: nt4fwemuw7flestsezvo2eveke" in output, output)
184 self.failUnless("fingerprint: 737p57x6737p57x6737p57x6737p57x6737p57x6737p57x6737a" in output, output)
186 u = u.get_verify_cap()
187 output = self._dump_cap(u.to_string())
188 self.failUnless("SSK Verifier URI:" in output, output)
189 self.failUnless("storage index: nt4fwemuw7flestsezvo2eveke" in output, output)
190 self.failUnless("fingerprint: 737p57x6737p57x6737p57x6737p57x6737p57x6737p57x6737a" in output, output)
192 def test_dump_cap_directory(self):
193 writekey = "\x01" * 16
194 fingerprint = "\xfe" * 32
195 u1 = uri.WriteableSSKFileURI(writekey, fingerprint)
196 u = uri.NewDirectoryURI(u1)
198 output = self._dump_cap(u.to_string())
199 self.failUnless("Directory Writeable URI:" in output, output)
200 self.failUnless("writekey: aeaqcaibaeaqcaibaeaqcaibae" in output,
202 self.failUnless("readkey: nvgh5vj2ekzzkim5fgtb4gey5y" in output, output)
203 self.failUnless("storage index: nt4fwemuw7flestsezvo2eveke" in output,
205 self.failUnless("fingerprint: 737p57x6737p57x6737p57x6737p57x6737p57x6737p57x6737a" in output, output)
207 output = self._dump_cap("--client-secret", "5s33nk3qpvnj2fw3z4mnm2y6fa",
209 self.failUnless("file renewal secret: arpszxzc2t6kb4okkg7sp765xgkni5z7caavj7lta73vmtymjlxq" in output, output)
211 output = self._dump_cap("--nodeid", "tqc35esocrvejvg4mablt6aowg6tl43j",
213 self.failUnless("write_enabler: mgcavriox2wlb5eer26unwy5cw56elh3sjweffckkmivvsxtaknq" in output, output)
214 self.failIf("file renewal secret:" in output, output)
216 output = self._dump_cap("--nodeid", "tqc35esocrvejvg4mablt6aowg6tl43j",
217 "--client-secret", "5s33nk3qpvnj2fw3z4mnm2y6fa",
219 self.failUnless("write_enabler: mgcavriox2wlb5eer26unwy5cw56elh3sjweffckkmivvsxtaknq" in output, output)
220 self.failUnless("file renewal secret: arpszxzc2t6kb4okkg7sp765xgkni5z7caavj7lta73vmtymjlxq" in output, output)
221 self.failUnless("lease renewal secret: 7pjtaumrb7znzkkbvekkmuwpqfjyfyamznfz4bwwvmh4nw33lorq" in output, output)
224 output = self._dump_cap(u.to_string())
225 self.failUnless("Directory Read-only URI:" in output, output)
226 self.failUnless("readkey: nvgh5vj2ekzzkim5fgtb4gey5y" in output, output)
227 self.failUnless("storage index: nt4fwemuw7flestsezvo2eveke" in output, output)
228 self.failUnless("fingerprint: 737p57x6737p57x6737p57x6737p57x6737p57x6737p57x6737a" in output, output)
230 u = u.get_verify_cap()
231 output = self._dump_cap(u.to_string())
232 self.failUnless("Directory Verifier URI:" in output, output)
233 self.failUnless("storage index: nt4fwemuw7flestsezvo2eveke" in output, output)
234 self.failUnless("fingerprint: 737p57x6737p57x6737p57x6737p57x6737p57x6737p57x6737a" in output, output)
236 def _catalog_shares(self, *basedirs):
237 o = debug.CatalogSharesOptions()
238 o.stdout,o.stderr = StringIO(), StringIO()
239 args = list(basedirs)
241 debug.catalog_shares(o)
242 out = o.stdout.getvalue()
243 err = o.stderr.getvalue()
246 def test_catalog_shares_error(self):
247 nodedir1 = "cli/test_catalog_shares/node1"
248 sharedir = os.path.join(nodedir1, "storage", "shares", "mq", "mqfblse6m5a6dh45isu2cg7oji")
249 fileutil.make_dirs(sharedir)
250 f = open(os.path.join(sharedir, "8"), "wb")
251 open("cli/test_catalog_shares/node1/storage/shares/mq/not-a-dir", "wb").close()
252 # write a bogus share that looks a little bit like CHK
253 f.write("\x00\x00\x00\x01" + "\xff" * 200) # this triggers an assert
256 nodedir2 = "cli/test_catalog_shares/node2"
257 fileutil.make_dirs(nodedir2)
258 open("cli/test_catalog_shares/node1/storage/shares/not-a-dir", "wb").close()
260 # now make sure that the 'catalog-shares' commands survives the error
261 out, err = self._catalog_shares(nodedir1, nodedir2)
262 self.failUnlessEqual(out, "", out)
263 self.failUnless("Error processing " in err,
264 "didn't see 'error processing' in '%s'" % err)
265 #self.failUnless(nodedir1 in err,
266 # "didn't see '%s' in '%s'" % (nodedir1, err))
267 # windows mangles the path, and os.path.join isn't enough to make
268 # up for it, so just look for individual strings
269 self.failUnless("node1" in err,
270 "didn't see 'node1' in '%s'" % err)
271 self.failUnless("mqfblse6m5a6dh45isu2cg7oji" in err,
272 "didn't see 'mqfblse6m5a6dh45isu2cg7oji' in '%s'" % err)
275 class Help(unittest.TestCase):
278 help = str(cli.GetOptions())
279 self.failUnless("get VDRIVE_FILE LOCAL_FILE" in help, help)
280 self.failUnless("% tahoe get FOO |less" in help, help)
283 help = str(cli.PutOptions())
284 self.failUnless("put LOCAL_FILE VDRIVE_FILE" in help, help)
285 self.failUnless("% cat FILE | tahoe put" in help, help)
288 help = str(cli.RmOptions())
289 self.failUnless("rm VDRIVE_FILE" in help, help)
292 help = str(cli.MvOptions())
293 self.failUnless("mv FROM TO" in help, help)
296 help = str(cli.LnOptions())
297 self.failUnless("ln FROM TO" in help, help)
299 def test_backup(self):
300 help = str(cli.BackupOptions())
301 self.failUnless("backup FROM ALIAS:TO" in help, help)
303 def test_webopen(self):
304 help = str(cli.WebopenOptions())
305 self.failUnless("webopen [ALIAS:PATH]" in help, help)
307 def test_manifest(self):
308 help = str(cli.ManifestOptions())
309 self.failUnless("manifest [ALIAS:PATH]" in help, help)
311 def test_stats(self):
312 help = str(cli.StatsOptions())
313 self.failUnless("stats [ALIAS:PATH]" in help, help)
315 def test_check(self):
316 help = str(cli.CheckOptions())
317 self.failUnless("check [ALIAS:PATH]" in help, help)
319 def test_deep_check(self):
320 help = str(cli.DeepCheckOptions())
321 self.failUnless("deep-check [ALIAS:PATH]" in help, help)
324 def do_cli(self, verb, *args, **kwargs):
326 "--node-directory", self.getdir("client0"),
328 argv = [verb] + nodeargs + list(args)
329 stdin = kwargs.get("stdin", "")
330 stdout, stderr = StringIO(), StringIO()
331 d = threads.deferToThread(runner.runner, argv, run_by_human=False,
332 stdin=StringIO(stdin),
333 stdout=stdout, stderr=stderr)
335 return rc, stdout.getvalue(), stderr.getvalue()
339 class CreateAlias(SystemTestMixin, CLITestMixin, unittest.TestCase):
341 def _test_webopen(self, args, expected_url):
342 woo = cli.WebopenOptions()
343 all_args = ["--node-directory", self.getdir("client0")] + list(args)
344 woo.parseOptions(all_args)
346 rc = cli.webopen(woo, urls.append)
347 self.failUnlessEqual(rc, 0)
348 self.failUnlessEqual(len(urls), 1)
349 self.failUnlessEqual(urls[0], expected_url)
351 def test_create(self):
352 self.basedir = os.path.dirname(self.mktemp())
353 d = self.set_up_nodes()
354 d.addCallback(lambda res: self.do_cli("create-alias", "tahoe"))
355 def _done((rc,stdout,stderr)):
356 self.failUnless("Alias 'tahoe' created" in stdout)
358 aliases = get_aliases(self.getdir("client0"))
359 self.failUnless("tahoe" in aliases)
360 self.failUnless(aliases["tahoe"].startswith("URI:DIR2:"))
362 d.addCallback(lambda res: self.do_cli("create-alias", "two"))
364 def _stash_urls(res):
365 aliases = get_aliases(self.getdir("client0"))
366 node_url_file = os.path.join(self.getdir("client0"), "node.url")
367 nodeurl = open(node_url_file, "r").read().strip()
368 uribase = nodeurl + "uri/"
369 self.tahoe_url = uribase + urllib.quote(aliases["tahoe"])
370 self.tahoe_subdir_url = self.tahoe_url + "/subdir"
371 self.two_url = uribase + urllib.quote(aliases["two"])
372 self.two_uri = aliases["two"]
373 d.addCallback(_stash_urls)
375 d.addCallback(lambda res: self.do_cli("create-alias", "two")) # dup
376 def _check_create_duplicate((rc,stdout,stderr)):
377 self.failIfEqual(rc, 0)
378 self.failUnless("Alias 'two' already exists!" in stderr)
379 aliases = get_aliases(self.getdir("client0"))
380 self.failUnlessEqual(aliases["two"], self.two_uri)
381 d.addCallback(_check_create_duplicate)
383 d.addCallback(lambda res: self.do_cli("add-alias", "added", self.two_uri))
384 def _check_add((rc,stdout,stderr)):
385 self.failUnlessEqual(rc, 0)
386 self.failUnless("Alias 'added' added" in stdout)
387 d.addCallback(_check_add)
389 # check add-alias with a duplicate
390 d.addCallback(lambda res: self.do_cli("add-alias", "two", self.two_uri))
391 def _check_add_duplicate((rc,stdout,stderr)):
392 self.failIfEqual(rc, 0)
393 self.failUnless("Alias 'two' already exists!" in stderr)
394 aliases = get_aliases(self.getdir("client0"))
395 self.failUnlessEqual(aliases["two"], self.two_uri)
396 d.addCallback(_check_add_duplicate)
398 def _test_urls(junk):
399 self._test_webopen([], self.tahoe_url)
400 self._test_webopen(["/"], self.tahoe_url)
401 self._test_webopen(["tahoe:"], self.tahoe_url)
402 self._test_webopen(["tahoe:/"], self.tahoe_url)
403 self._test_webopen(["tahoe:subdir"], self.tahoe_subdir_url)
404 self._test_webopen(["tahoe:subdir/"], self.tahoe_subdir_url + '/')
405 self._test_webopen(["tahoe:subdir/file"], self.tahoe_subdir_url + '/file')
406 # if "file" is indeed a file, then the url produced by webopen in
407 # this case is disallowed by the webui. but by design, webopen
408 # passes through the mistake from the user to the resultant
410 self._test_webopen(["tahoe:subdir/file/"], self.tahoe_subdir_url + '/file/')
411 self._test_webopen(["two:"], self.two_url)
412 d.addCallback(_test_urls)
416 class Put(SystemTestMixin, CLITestMixin, unittest.TestCase):
418 def test_unlinked_immutable_stdin(self):
419 # tahoe get `echo DATA | tahoe put`
420 # tahoe get `echo DATA | tahoe put -`
422 self.basedir = self.mktemp()
424 d = self.set_up_nodes()
425 d.addCallback(lambda res: self.do_cli("put", stdin=DATA))
427 (rc, stdout, stderr) = res
428 self.failUnless("waiting for file data on stdin.." in stderr)
429 self.failUnless("200 OK" in stderr, stderr)
430 self.readcap = stdout
431 self.failUnless(self.readcap.startswith("URI:CHK:"))
432 d.addCallback(_uploaded)
433 d.addCallback(lambda res: self.do_cli("get", self.readcap))
434 def _downloaded(res):
435 (rc, stdout, stderr) = res
436 self.failUnlessEqual(stderr, "")
437 self.failUnlessEqual(stdout, DATA)
438 d.addCallback(_downloaded)
439 d.addCallback(lambda res: self.do_cli("put", "-", stdin=DATA))
440 d.addCallback(lambda (rc,stdout,stderr):
441 self.failUnlessEqual(stdout, self.readcap))
444 def test_unlinked_immutable_from_file(self):
446 # tahoe put ./file.txt
447 # tahoe put /tmp/file.txt
448 # tahoe put ~/file.txt
449 self.basedir = os.path.dirname(self.mktemp())
450 # this will be "allmydata.test.test_cli/Put/test_put_from_file/RANDOM"
451 # and the RANDOM directory will exist. Raw mktemp returns a filename.
453 rel_fn = os.path.join(self.basedir, "DATAFILE")
454 abs_fn = os.path.abspath(rel_fn)
455 # we make the file small enough to fit in a LIT file, for speed
456 f = open(rel_fn, "w")
457 f.write("short file")
459 d = self.set_up_nodes()
460 d.addCallback(lambda res: self.do_cli("put", rel_fn))
461 def _uploaded((rc,stdout,stderr)):
463 self.failUnless(readcap.startswith("URI:LIT:"))
464 self.readcap = readcap
465 d.addCallback(_uploaded)
466 d.addCallback(lambda res: self.do_cli("put", "./" + rel_fn))
467 d.addCallback(lambda (rc,stdout,stderr):
468 self.failUnlessEqual(stdout, self.readcap))
469 d.addCallback(lambda res: self.do_cli("put", abs_fn))
470 d.addCallback(lambda (rc,stdout,stderr):
471 self.failUnlessEqual(stdout, self.readcap))
472 # we just have to assume that ~ is handled properly
475 def test_immutable_from_file(self):
476 # tahoe put file.txt uploaded.txt
477 # tahoe - uploaded.txt
478 # tahoe put file.txt subdir/uploaded.txt
479 # tahoe put file.txt tahoe:uploaded.txt
480 # tahoe put file.txt tahoe:subdir/uploaded.txt
481 # tahoe put file.txt DIRCAP:./uploaded.txt
482 # tahoe put file.txt DIRCAP:./subdir/uploaded.txt
483 self.basedir = os.path.dirname(self.mktemp())
485 rel_fn = os.path.join(self.basedir, "DATAFILE")
486 abs_fn = os.path.abspath(rel_fn)
487 # we make the file small enough to fit in a LIT file, for speed
489 DATA2 = "short file two"
490 f = open(rel_fn, "w")
494 d = self.set_up_nodes()
495 d.addCallback(lambda res: self.do_cli("create-alias", "tahoe"))
497 d.addCallback(lambda res:
498 self.do_cli("put", rel_fn, "uploaded.txt"))
499 def _uploaded((rc,stdout,stderr)):
500 readcap = stdout.strip()
501 self.failUnless(readcap.startswith("URI:LIT:"))
502 self.failUnless("201 Created" in stderr, stderr)
503 self.readcap = readcap
504 d.addCallback(_uploaded)
505 d.addCallback(lambda res:
506 self.do_cli("get", "tahoe:uploaded.txt"))
507 d.addCallback(lambda (rc,stdout,stderr):
508 self.failUnlessEqual(stdout, DATA))
510 d.addCallback(lambda res:
511 self.do_cli("put", "-", "uploaded.txt", stdin=DATA2))
512 def _replaced((rc,stdout,stderr)):
513 readcap = stdout.strip()
514 self.failUnless(readcap.startswith("URI:LIT:"))
515 self.failUnless("200 OK" in stderr, stderr)
516 d.addCallback(_replaced)
518 d.addCallback(lambda res:
519 self.do_cli("put", rel_fn, "subdir/uploaded2.txt"))
520 d.addCallback(lambda res: self.do_cli("get", "subdir/uploaded2.txt"))
521 d.addCallback(lambda (rc,stdout,stderr):
522 self.failUnlessEqual(stdout, DATA))
524 d.addCallback(lambda res:
525 self.do_cli("put", rel_fn, "tahoe:uploaded3.txt"))
526 d.addCallback(lambda res: self.do_cli("get", "tahoe:uploaded3.txt"))
527 d.addCallback(lambda (rc,stdout,stderr):
528 self.failUnlessEqual(stdout, DATA))
530 d.addCallback(lambda res:
531 self.do_cli("put", rel_fn, "tahoe:subdir/uploaded4.txt"))
532 d.addCallback(lambda res:
533 self.do_cli("get", "tahoe:subdir/uploaded4.txt"))
534 d.addCallback(lambda (rc,stdout,stderr):
535 self.failUnlessEqual(stdout, DATA))
537 def _get_dircap(res):
538 self.dircap = get_aliases(self.getdir("client0"))["tahoe"]
539 d.addCallback(_get_dircap)
541 d.addCallback(lambda res:
542 self.do_cli("put", rel_fn,
543 self.dircap+":./uploaded5.txt"))
544 d.addCallback(lambda res:
545 self.do_cli("get", "tahoe:uploaded5.txt"))
546 d.addCallback(lambda (rc,stdout,stderr):
547 self.failUnlessEqual(stdout, DATA))
549 d.addCallback(lambda res:
550 self.do_cli("put", rel_fn,
551 self.dircap+":./subdir/uploaded6.txt"))
552 d.addCallback(lambda res:
553 self.do_cli("get", "tahoe:subdir/uploaded6.txt"))
554 d.addCallback(lambda (rc,stdout,stderr):
555 self.failUnlessEqual(stdout, DATA))
559 def test_mutable_unlinked(self):
560 # FILECAP = `echo DATA | tahoe put --mutable`
561 # tahoe get FILECAP, compare against DATA
562 # echo DATA2 | tahoe put - FILECAP
563 # tahoe get FILECAP, compare against DATA2
564 # tahoe put file.txt FILECAP
565 self.basedir = os.path.dirname(self.mktemp())
568 rel_fn = os.path.join(self.basedir, "DATAFILE")
569 abs_fn = os.path.abspath(rel_fn)
570 DATA3 = "three" * 100
571 f = open(rel_fn, "w")
575 d = self.set_up_nodes()
577 d.addCallback(lambda res: self.do_cli("put", "--mutable", stdin=DATA))
579 (rc, stdout, stderr) = res
580 self.failUnless("waiting for file data on stdin.." in stderr)
581 self.failUnless("200 OK" in stderr)
582 self.filecap = stdout
583 self.failUnless(self.filecap.startswith("URI:SSK:"))
584 d.addCallback(_created)
585 d.addCallback(lambda res: self.do_cli("get", self.filecap))
586 d.addCallback(lambda (rc,out,err): self.failUnlessEqual(out, DATA))
588 d.addCallback(lambda res: self.do_cli("put", "-", self.filecap, stdin=DATA2))
590 (rc, stdout, stderr) = res
591 self.failUnless("waiting for file data on stdin.." in stderr)
592 self.failUnless("200 OK" in stderr)
593 self.failUnlessEqual(self.filecap, stdout)
594 d.addCallback(_replaced)
595 d.addCallback(lambda res: self.do_cli("get", self.filecap))
596 d.addCallback(lambda (rc,out,err): self.failUnlessEqual(out, DATA2))
598 d.addCallback(lambda res: self.do_cli("put", rel_fn, self.filecap))
600 (rc, stdout, stderr) = res
601 self.failUnless("200 OK" in stderr)
602 self.failUnlessEqual(self.filecap, stdout)
603 d.addCallback(_replaced2)
604 d.addCallback(lambda res: self.do_cli("get", self.filecap))
605 d.addCallback(lambda (rc,out,err): self.failUnlessEqual(out, DATA3))
609 def test_mutable(self):
610 # echo DATA1 | tahoe put --mutable - uploaded.txt
611 # echo DATA2 | tahoe put - uploaded.txt # should modify-in-place
612 # tahoe get uploaded.txt, compare against DATA2
614 self.basedir = os.path.dirname(self.mktemp())
616 fn1 = os.path.join(self.basedir, "DATA1")
621 fn2 = os.path.join(self.basedir, "DATA2")
626 d = self.set_up_nodes()
627 d.addCallback(lambda res: self.do_cli("create-alias", "tahoe"))
628 d.addCallback(lambda res:
629 self.do_cli("put", "--mutable", fn1, "tahoe:uploaded.txt"))
630 d.addCallback(lambda res:
631 self.do_cli("put", fn2, "tahoe:uploaded.txt"))
632 d.addCallback(lambda res:
633 self.do_cli("get", "tahoe:uploaded.txt"))
634 d.addCallback(lambda (rc,out,err): self.failUnlessEqual(out, DATA2))
637 class Cp(SystemTestMixin, CLITestMixin, unittest.TestCase):
638 def test_not_enough_args(self):
640 self.failUnlessRaises(usage.UsageError,
641 o.parseOptions, ["onearg"])
643 def test_unicode_filename(self):
644 self.basedir = os.path.dirname(self.mktemp())
646 fn1 = os.path.join(self.basedir, "Ärtonwall")
647 DATA1 = "unicode file content"
648 open(fn1, "wb").write(DATA1)
650 fn2 = os.path.join(self.basedir, "Metallica")
651 DATA2 = "non-unicode file content"
652 open(fn2, "wb").write(DATA2)
655 # Assure that uploading a file whose name contains unicode character doesn't
656 # prevent further uploads in the same directory
657 d = self.set_up_nodes()
658 d.addCallback(lambda res: self.do_cli("create-alias", "tahoe"))
659 d.addCallback(lambda res: self.do_cli("cp", fn1, "tahoe:"))
660 d.addCallback(lambda res: self.do_cli("cp", fn2, "tahoe:"))
662 d.addCallback(lambda res: self.do_cli("get", "tahoe:Ärtonwall"))
663 d.addCallback(lambda (rc,out,err): self.failUnlessEqual(out, DATA1))
665 d.addCallback(lambda res: self.do_cli("get", "tahoe:Metallica"))
666 d.addCallback(lambda (rc,out,err): self.failUnlessEqual(out, DATA2))
669 test_unicode_filename.todo = "This behavior is not yet supported, although it does happen to work (for reasons that are ill-understood) on many platforms. See issue ticket #534."
671 def test_dangling_symlink_vs_recursion(self):
672 if not hasattr(os, 'symlink'):
673 raise unittest.SkipTest("There is no symlink on this platform.")
674 # cp -r on a directory containing a dangling symlink shouldn't assert
675 self.basedir = os.path.dirname(self.mktemp())
676 dn = os.path.join(self.basedir, "dir")
678 fn = os.path.join(dn, "Fakebandica")
679 ln = os.path.join(dn, "link")
682 d = self.set_up_nodes()
683 d.addCallback(lambda res: self.do_cli("create-alias", "tahoe"))
684 d.addCallback(lambda res: self.do_cli("cp", "--recursive",
688 class Backup(SystemTestMixin, CLITestMixin, StallMixin, unittest.TestCase):
689 def writeto(self, path, data):
690 d = os.path.dirname(os.path.join(self.basedir, "home", path))
691 fileutil.make_dirs(d)
692 f = open(os.path.join(self.basedir, "home", path), "w")
696 def count_output(self, out):
697 mo = re.search(r"(\d)+ files uploaded \((\d+) reused\), (\d+) directories created \((\d+) reused\)", out)
698 return [int(s) for s in mo.groups()]
700 def count_output2(self, out):
701 mo = re.search(r"(\d)+ files checked, (\d+) directories checked, (\d+) directories read", out)
702 return [int(s) for s in mo.groups()]
704 def test_backup(self):
705 self.basedir = os.path.dirname(self.mktemp())
707 # is the backupdb available? If so, we test that a second backup does
708 # not create new directories.
710 have_bdb = backupdb.get_backupdb(os.path.join(self.basedir, "dbtest"),
713 # create a small local directory with a couple of files
714 source = os.path.join(self.basedir, "home")
715 fileutil.make_dirs(os.path.join(source, "empty"))
716 self.writeto("parent/subdir/foo.txt", "foo")
717 self.writeto("parent/subdir/bar.txt", "bar\n" * 1000)
718 self.writeto("parent/blah.txt", "blah")
720 def do_backup(use_backupdb=True, verbose=False):
722 if not have_bdb or not use_backupdb:
723 cmd.append("--no-backupdb")
725 cmd.append("--verbose")
727 cmd.append("tahoe:backups")
728 return self.do_cli(*cmd)
730 d = self.set_up_nodes()
731 d.addCallback(lambda res: self.do_cli("create-alias", "tahoe"))
734 d.addCallback(lambda res: self.do_cli("backup", source, "tahoe:backups"))
735 def _should_complain((rc, out, err)):
736 self.failUnless("I was unable to import a python sqlite library" in err, err)
737 d.addCallback(_should_complain)
738 d.addCallback(self.stall, 1.1) # make sure the backups get distinct timestamps
740 d.addCallback(lambda res: do_backup())
741 def _check0((rc, out, err)):
742 self.failUnlessEqual(err, "")
743 self.failUnlessEqual(rc, 0)
744 fu, fr, dc, dr = self.count_output(out)
745 # foo.txt, bar.txt, blah.txt
746 self.failUnlessEqual(fu, 3)
747 self.failUnlessEqual(fr, 0)
748 # empty, home, home/parent, home/parent/subdir
749 self.failUnlessEqual(dc, 4)
750 self.failUnlessEqual(dr, 0)
751 d.addCallback(_check0)
753 d.addCallback(lambda res: self.do_cli("ls", "tahoe:backups"))
754 def _check1((rc, out, err)):
755 self.failUnlessEqual(err, "")
756 self.failUnlessEqual(rc, 0)
757 self.failUnlessEqual(sorted(out.split()), ["Archives", "Latest"])
758 d.addCallback(_check1)
759 d.addCallback(lambda res: self.do_cli("ls", "tahoe:backups/Latest"))
760 def _check2((rc, out, err)):
761 self.failUnlessEqual(err, "")
762 self.failUnlessEqual(rc, 0)
763 self.failUnlessEqual(sorted(out.split()), ["empty", "parent"])
764 d.addCallback(_check2)
765 d.addCallback(lambda res: self.do_cli("ls", "tahoe:backups/Latest/empty"))
766 def _check2a((rc, out, err)):
767 self.failUnlessEqual(err, "")
768 self.failUnlessEqual(rc, 0)
769 self.failUnlessEqual(out.strip(), "")
770 d.addCallback(_check2a)
771 d.addCallback(lambda res: self.do_cli("get", "tahoe:backups/Latest/parent/subdir/foo.txt"))
772 def _check3((rc, out, err)):
773 self.failUnlessEqual(err, "")
774 self.failUnlessEqual(rc, 0)
775 self.failUnlessEqual(out, "foo")
776 d.addCallback(_check3)
777 d.addCallback(lambda res: self.do_cli("ls", "tahoe:backups/Archives"))
778 def _check4((rc, out, err)):
779 self.failUnlessEqual(err, "")
780 self.failUnlessEqual(rc, 0)
781 self.old_archives = out.split()
782 self.failUnlessEqual(len(self.old_archives), 1)
783 d.addCallback(_check4)
786 d.addCallback(self.stall, 1.1)
787 d.addCallback(lambda res: do_backup())
788 def _check4a((rc, out, err)):
789 # second backup should reuse everything, if the backupdb is
791 self.failUnlessEqual(err, "")
792 self.failUnlessEqual(rc, 0)
794 fu, fr, dc, dr = self.count_output(out)
795 # foo.txt, bar.txt, blah.txt
796 self.failUnlessEqual(fu, 0)
797 self.failUnlessEqual(fr, 3)
798 # empty, home, home/parent, home/parent/subdir
799 self.failUnlessEqual(dc, 0)
800 self.failUnlessEqual(dr, 4)
801 d.addCallback(_check4a)
804 # sneak into the backupdb, crank back the "last checked"
805 # timestamp to force a check on all files
806 def _reset_last_checked(res):
807 dbfile = os.path.join(self.basedir,
808 "client0", "private", "backupdb.sqlite")
809 self.failUnless(os.path.exists(dbfile), dbfile)
810 bdb = backupdb.get_backupdb(dbfile)
811 bdb.cursor.execute("UPDATE last_upload SET last_checked=0")
812 bdb.connection.commit()
814 d.addCallback(_reset_last_checked)
816 d.addCallback(self.stall, 1.1)
817 d.addCallback(lambda res: do_backup(verbose=True))
818 def _check4b((rc, out, err)):
819 # we should check all files, and re-use all of them. None of
820 # the directories should have been changed.
821 self.failUnlessEqual(err, "")
822 self.failUnlessEqual(rc, 0)
823 fu, fr, dc, dr = self.count_output(out)
824 fchecked, dchecked, dread = self.count_output2(out)
825 self.failUnlessEqual(fchecked, 3)
826 self.failUnlessEqual(fu, 0)
827 self.failUnlessEqual(fr, 3)
828 # TODO: backupdb doesn't do dirs yet; when it does, this will
829 # change to dchecked=4, and maybe dread=0
830 self.failUnlessEqual(dchecked, 0)
831 self.failUnlessEqual(dread, 4)
832 self.failUnlessEqual(dc, 0)
833 self.failUnlessEqual(dr, 4)
834 d.addCallback(_check4b)
836 d.addCallback(lambda res: self.do_cli("ls", "tahoe:backups/Archives"))
837 def _check5((rc, out, err)):
838 self.failUnlessEqual(err, "")
839 self.failUnlessEqual(rc, 0)
840 self.new_archives = out.split()
844 self.failUnlessEqual(len(self.new_archives), expected_new, out)
845 # the original backup should still be the oldest (i.e. sorts
846 # alphabetically towards the beginning)
847 self.failUnlessEqual(sorted(self.new_archives)[0],
848 self.old_archives[0])
849 d.addCallback(_check5)
851 d.addCallback(self.stall, 1.1)
853 self.writeto("parent/subdir/foo.txt", "FOOF!")
854 # and turn a file into a directory
855 os.unlink(os.path.join(source, "parent/blah.txt"))
856 os.mkdir(os.path.join(source, "parent/blah.txt"))
857 self.writeto("parent/blah.txt/surprise file", "surprise")
858 self.writeto("parent/blah.txt/surprisedir/subfile", "surprise")
859 # turn a directory into a file
860 os.rmdir(os.path.join(source, "empty"))
861 self.writeto("empty", "imagine nothing being here")
863 d.addCallback(_modify)
864 def _check5a((rc, out, err)):
865 # second backup should reuse bar.txt (if backupdb is available),
866 # and upload the rest. None of the directories can be reused.
867 self.failUnlessEqual(err, "")
868 self.failUnlessEqual(rc, 0)
870 fu, fr, dc, dr = self.count_output(out)
871 # new foo.txt, surprise file, subfile, empty
872 self.failUnlessEqual(fu, 4)
874 self.failUnlessEqual(fr, 1)
875 # home, parent, subdir, blah.txt, surprisedir
876 self.failUnlessEqual(dc, 5)
877 self.failUnlessEqual(dr, 0)
878 d.addCallback(_check5a)
879 d.addCallback(lambda res: self.do_cli("ls", "tahoe:backups/Archives"))
880 def _check6((rc, out, err)):
881 self.failUnlessEqual(err, "")
882 self.failUnlessEqual(rc, 0)
883 self.new_archives = out.split()
887 self.failUnlessEqual(len(self.new_archives), expected_new)
888 self.failUnlessEqual(sorted(self.new_archives)[0],
889 self.old_archives[0])
890 d.addCallback(_check6)
891 d.addCallback(lambda res: self.do_cli("get", "tahoe:backups/Latest/parent/subdir/foo.txt"))
892 def _check7((rc, out, err)):
893 self.failUnlessEqual(err, "")
894 self.failUnlessEqual(rc, 0)
895 self.failUnlessEqual(out, "FOOF!")
896 # the old snapshot should not be modified
897 return self.do_cli("get", "tahoe:backups/Archives/%s/parent/subdir/foo.txt" % self.old_archives[0])
898 d.addCallback(_check7)
899 def _check8((rc, out, err)):
900 self.failUnlessEqual(err, "")
901 self.failUnlessEqual(rc, 0)
902 self.failUnlessEqual(out, "foo")
903 d.addCallback(_check8)
905 d.addCallback(self.stall, 1.1)
906 d.addCallback(lambda res: do_backup(use_backupdb=False))
907 def _check9((rc, out, err)):
908 # --no-backupdb means re-upload everything. We still get to
909 # re-use the directories, since nothing changed.
910 self.failUnlessEqual(err, "")
911 self.failUnlessEqual(rc, 0)
912 fu, fr, dc, dr = self.count_output(out)
913 self.failUnlessEqual(fu, 5)
914 self.failUnlessEqual(fr, 0)
915 self.failUnlessEqual(dc, 0)
916 self.failUnlessEqual(dr, 5)
917 d.addCallback(_check9)
921 # on our old dapper buildslave, this test takes a long time (usually
922 # 130s), so we have to bump up the default 120s timeout. The create-alias
923 # and initial backup alone take 60s, probably because of the handful of
924 # dirnodes being created (RSA key generation). The backup between check4
925 # and check4a takes 6s, as does the backup before check4b.
926 test_backup.timeout = 300