4 from twisted.trial import unittest
5 from cStringIO import StringIO
9 from allmydata.util import fileutil, hashutil
10 from allmydata import uri
12 # Test that the scripts can be imported -- although the actual tests of their functionality are
13 # done by invoking them in a subprocess.
14 from allmydata.scripts import tahoe_ls, tahoe_get, tahoe_put, tahoe_rm, tahoe_cp
15 _hush_pyflakes = [tahoe_ls, tahoe_get, tahoe_put, tahoe_rm, tahoe_cp]
17 from allmydata.scripts.common import DEFAULT_ALIAS, get_aliases
19 from allmydata.scripts import cli, debug, runner, backupdb
20 from allmydata.test.common import SystemTestMixin
21 from allmydata.test.common_util import StallMixin
22 from twisted.internet import threads # CLI tests use deferToThread
23 from twisted.python import usage
25 class CLI(unittest.TestCase):
26 # this test case only looks at argument-processing and simple stuff.
27 def test_options(self):
28 fileutil.rm_dir("cli/test_options")
29 fileutil.make_dirs("cli/test_options")
30 fileutil.make_dirs("cli/test_options/private")
31 open("cli/test_options/node.url","w").write("http://localhost:8080/\n")
32 filenode_uri = uri.WriteableSSKFileURI(writekey="\x00"*16,
33 fingerprint="\x00"*32)
34 private_uri = uri.NewDirectoryURI(filenode_uri).to_string()
35 open("cli/test_options/private/root_dir.cap", "w").write(private_uri + "\n")
37 o.parseOptions(["--node-directory", "cli/test_options"])
38 self.failUnlessEqual(o['node-url'], "http://localhost:8080/")
39 self.failUnlessEqual(o.aliases[DEFAULT_ALIAS], private_uri)
40 self.failUnlessEqual(o.where, "")
43 o.parseOptions(["--node-directory", "cli/test_options",
44 "--node-url", "http://example.org:8111/"])
45 self.failUnlessEqual(o['node-url'], "http://example.org:8111/")
46 self.failUnlessEqual(o.aliases[DEFAULT_ALIAS], private_uri)
47 self.failUnlessEqual(o.where, "")
50 o.parseOptions(["--node-directory", "cli/test_options",
52 self.failUnlessEqual(o['node-url'], "http://localhost:8080/")
53 self.failUnlessEqual(o.aliases[DEFAULT_ALIAS], "root")
54 self.failUnlessEqual(o.where, "")
57 other_filenode_uri = uri.WriteableSSKFileURI(writekey="\x11"*16,
58 fingerprint="\x11"*32)
59 other_uri = uri.NewDirectoryURI(other_filenode_uri).to_string()
60 o.parseOptions(["--node-directory", "cli/test_options",
61 "--dir-cap", other_uri])
62 self.failUnlessEqual(o['node-url'], "http://localhost:8080/")
63 self.failUnlessEqual(o.aliases[DEFAULT_ALIAS], other_uri)
64 self.failUnlessEqual(o.where, "")
67 o.parseOptions(["--node-directory", "cli/test_options",
68 "--dir-cap", other_uri, "subdir"])
69 self.failUnlessEqual(o['node-url'], "http://localhost:8080/")
70 self.failUnlessEqual(o.aliases[DEFAULT_ALIAS], other_uri)
71 self.failUnlessEqual(o.where, "subdir")
74 self.failUnlessRaises(usage.UsageError,
76 ["--node-directory", "cli/test_options",
77 "--node-url", "NOT-A-URL"])
80 o.parseOptions(["--node-directory", "cli/test_options",
81 "--node-url", "http://localhost:8080"])
82 self.failUnlessEqual(o["node-url"], "http://localhost:8080/")
84 def _dump_cap(self, *args):
85 config = debug.DumpCapOptions()
86 config.stdout,config.stderr = StringIO(), StringIO()
87 config.parseOptions(args)
88 debug.dump_cap(config)
89 self.failIf(config.stderr.getvalue())
90 output = config.stdout.getvalue()
93 def test_dump_cap_chk(self):
94 key = "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f"
95 storage_index = hashutil.storage_index_hash(key)
96 uri_extension_hash = hashutil.uri_extension_hash("stuff")
100 u = uri.CHKFileURI(key=key,
101 uri_extension_hash=uri_extension_hash,
102 needed_shares=needed_shares,
103 total_shares=total_shares,
105 output = self._dump_cap(u.to_string())
106 self.failUnless("CHK File:" in output, output)
107 self.failUnless("key: aaaqeayeaudaocajbifqydiob4" in output, output)
108 self.failUnless("UEB hash: nf3nimquen7aeqm36ekgxomalstenpkvsdmf6fplj7swdatbv5oa" in output, output)
109 self.failUnless("size: 1234" in output, output)
110 self.failUnless("k/N: 25/100" in output, output)
111 self.failUnless("storage index: hdis5iaveku6lnlaiccydyid7q" in output, output)
113 output = self._dump_cap("--client-secret", "5s33nk3qpvnj2fw3z4mnm2y6fa",
115 self.failUnless("client renewal secret: znxmki5zdibb5qlt46xbdvk2t55j7hibejq3i5ijyurkr6m6jkhq" in output, output)
117 output = self._dump_cap(u.get_verify_cap().to_string())
118 self.failIf("key: " in output, output)
119 self.failUnless("UEB hash: nf3nimquen7aeqm36ekgxomalstenpkvsdmf6fplj7swdatbv5oa" in output, output)
120 self.failUnless("size: 1234" in output, output)
121 self.failUnless("k/N: 25/100" in output, output)
122 self.failUnless("storage index: hdis5iaveku6lnlaiccydyid7q" in output, output)
124 prefixed_u = "http://127.0.0.1/uri/%s" % urllib.quote(u.to_string())
125 output = self._dump_cap(prefixed_u)
126 self.failUnless("CHK File:" in output, output)
127 self.failUnless("key: aaaqeayeaudaocajbifqydiob4" in output, output)
128 self.failUnless("UEB hash: nf3nimquen7aeqm36ekgxomalstenpkvsdmf6fplj7swdatbv5oa" in output, output)
129 self.failUnless("size: 1234" in output, output)
130 self.failUnless("k/N: 25/100" in output, output)
131 self.failUnless("storage index: hdis5iaveku6lnlaiccydyid7q" in output, output)
133 def test_dump_cap_lit(self):
134 u = uri.LiteralFileURI("this is some data")
135 output = self._dump_cap(u.to_string())
136 self.failUnless("Literal File URI:" in output, output)
137 self.failUnless("data: this is some data" in output, output)
139 def test_dump_cap_ssk(self):
140 writekey = "\x01" * 16
141 fingerprint = "\xfe" * 32
142 u = uri.WriteableSSKFileURI(writekey, fingerprint)
144 output = self._dump_cap(u.to_string())
145 self.failUnless("SSK Writeable URI:" in output, output)
146 self.failUnless("writekey: aeaqcaibaeaqcaibaeaqcaibae" in output, output)
147 self.failUnless("readkey: nvgh5vj2ekzzkim5fgtb4gey5y" in output, output)
148 self.failUnless("storage index: nt4fwemuw7flestsezvo2eveke" in output, output)
149 self.failUnless("fingerprint: 737p57x6737p57x6737p57x6737p57x6737p57x6737p57x6737a" in output, output)
151 output = self._dump_cap("--client-secret", "5s33nk3qpvnj2fw3z4mnm2y6fa",
153 self.failUnless("file renewal secret: arpszxzc2t6kb4okkg7sp765xgkni5z7caavj7lta73vmtymjlxq" in output, output)
155 fileutil.make_dirs("cli/test_dump_cap/private")
156 f = open("cli/test_dump_cap/private/secret", "w")
157 f.write("5s33nk3qpvnj2fw3z4mnm2y6fa\n")
159 output = self._dump_cap("--client-dir", "cli/test_dump_cap",
161 self.failUnless("file renewal secret: arpszxzc2t6kb4okkg7sp765xgkni5z7caavj7lta73vmtymjlxq" in output, output)
163 output = self._dump_cap("--client-dir", "cli/test_dump_cap_BOGUS",
165 self.failIf("file renewal secret:" in output, output)
167 output = self._dump_cap("--nodeid", "tqc35esocrvejvg4mablt6aowg6tl43j",
169 self.failUnless("write_enabler: mgcavriox2wlb5eer26unwy5cw56elh3sjweffckkmivvsxtaknq" in output, output)
170 self.failIf("file renewal secret:" in output, output)
172 output = self._dump_cap("--nodeid", "tqc35esocrvejvg4mablt6aowg6tl43j",
173 "--client-secret", "5s33nk3qpvnj2fw3z4mnm2y6fa",
175 self.failUnless("write_enabler: mgcavriox2wlb5eer26unwy5cw56elh3sjweffckkmivvsxtaknq" in output, output)
176 self.failUnless("file renewal secret: arpszxzc2t6kb4okkg7sp765xgkni5z7caavj7lta73vmtymjlxq" in output, output)
177 self.failUnless("lease renewal secret: 7pjtaumrb7znzkkbvekkmuwpqfjyfyamznfz4bwwvmh4nw33lorq" in output, output)
180 output = self._dump_cap(u.to_string())
181 self.failUnless("SSK Read-only URI:" in output, output)
182 self.failUnless("readkey: nvgh5vj2ekzzkim5fgtb4gey5y" in output, output)
183 self.failUnless("storage index: nt4fwemuw7flestsezvo2eveke" in output, output)
184 self.failUnless("fingerprint: 737p57x6737p57x6737p57x6737p57x6737p57x6737p57x6737a" in output, output)
186 u = u.get_verify_cap()
187 output = self._dump_cap(u.to_string())
188 self.failUnless("SSK Verifier URI:" in output, output)
189 self.failUnless("storage index: nt4fwemuw7flestsezvo2eveke" in output, output)
190 self.failUnless("fingerprint: 737p57x6737p57x6737p57x6737p57x6737p57x6737p57x6737a" in output, output)
192 def test_dump_cap_directory(self):
193 writekey = "\x01" * 16
194 fingerprint = "\xfe" * 32
195 u1 = uri.WriteableSSKFileURI(writekey, fingerprint)
196 u = uri.NewDirectoryURI(u1)
198 output = self._dump_cap(u.to_string())
199 self.failUnless("Directory Writeable URI:" in output, output)
200 self.failUnless("writekey: aeaqcaibaeaqcaibaeaqcaibae" in output,
202 self.failUnless("readkey: nvgh5vj2ekzzkim5fgtb4gey5y" in output, output)
203 self.failUnless("storage index: nt4fwemuw7flestsezvo2eveke" in output,
205 self.failUnless("fingerprint: 737p57x6737p57x6737p57x6737p57x6737p57x6737p57x6737a" in output, output)
207 output = self._dump_cap("--client-secret", "5s33nk3qpvnj2fw3z4mnm2y6fa",
209 self.failUnless("file renewal secret: arpszxzc2t6kb4okkg7sp765xgkni5z7caavj7lta73vmtymjlxq" in output, output)
211 output = self._dump_cap("--nodeid", "tqc35esocrvejvg4mablt6aowg6tl43j",
213 self.failUnless("write_enabler: mgcavriox2wlb5eer26unwy5cw56elh3sjweffckkmivvsxtaknq" in output, output)
214 self.failIf("file renewal secret:" in output, output)
216 output = self._dump_cap("--nodeid", "tqc35esocrvejvg4mablt6aowg6tl43j",
217 "--client-secret", "5s33nk3qpvnj2fw3z4mnm2y6fa",
219 self.failUnless("write_enabler: mgcavriox2wlb5eer26unwy5cw56elh3sjweffckkmivvsxtaknq" in output, output)
220 self.failUnless("file renewal secret: arpszxzc2t6kb4okkg7sp765xgkni5z7caavj7lta73vmtymjlxq" in output, output)
221 self.failUnless("lease renewal secret: 7pjtaumrb7znzkkbvekkmuwpqfjyfyamznfz4bwwvmh4nw33lorq" in output, output)
224 output = self._dump_cap(u.to_string())
225 self.failUnless("Directory Read-only URI:" in output, output)
226 self.failUnless("readkey: nvgh5vj2ekzzkim5fgtb4gey5y" in output, output)
227 self.failUnless("storage index: nt4fwemuw7flestsezvo2eveke" in output, output)
228 self.failUnless("fingerprint: 737p57x6737p57x6737p57x6737p57x6737p57x6737p57x6737a" in output, output)
230 u = u.get_verify_cap()
231 output = self._dump_cap(u.to_string())
232 self.failUnless("Directory Verifier URI:" in output, output)
233 self.failUnless("storage index: nt4fwemuw7flestsezvo2eveke" in output, output)
234 self.failUnless("fingerprint: 737p57x6737p57x6737p57x6737p57x6737p57x6737p57x6737a" in output, output)
236 def _catalog_shares(self, *basedirs):
237 o = debug.CatalogSharesOptions()
238 o.stdout,o.stderr = StringIO(), StringIO()
239 args = list(basedirs)
241 debug.catalog_shares(o)
242 out = o.stdout.getvalue()
243 err = o.stderr.getvalue()
246 def test_catalog_shares_error(self):
247 nodedir1 = "cli/test_catalog_shares/node1"
248 sharedir = os.path.join(nodedir1, "storage", "shares", "mq", "mqfblse6m5a6dh45isu2cg7oji")
249 fileutil.make_dirs(sharedir)
250 f = open(os.path.join(sharedir, "8"), "wb")
251 open("cli/test_catalog_shares/node1/storage/shares/mq/not-a-dir", "wb").close()
252 # write a bogus share that looks a little bit like CHK
253 f.write("\x00\x00\x00\x01" + "\xff" * 200) # this triggers an assert
256 nodedir2 = "cli/test_catalog_shares/node2"
257 fileutil.make_dirs(nodedir2)
258 open("cli/test_catalog_shares/node1/storage/shares/not-a-dir", "wb").close()
260 # now make sure that the 'catalog-shares' commands survives the error
261 out, err = self._catalog_shares(nodedir1, nodedir2)
262 self.failUnlessEqual(out, "", out)
263 self.failUnless("Error processing " in err,
264 "didn't see 'error processing' in '%s'" % err)
265 #self.failUnless(nodedir1 in err,
266 # "didn't see '%s' in '%s'" % (nodedir1, err))
267 # windows mangles the path, and os.path.join isn't enough to make
268 # up for it, so just look for individual strings
269 self.failUnless("node1" in err,
270 "didn't see 'node1' in '%s'" % err)
271 self.failUnless("mqfblse6m5a6dh45isu2cg7oji" in err,
272 "didn't see 'mqfblse6m5a6dh45isu2cg7oji' in '%s'" % err)
276 def do_cli(self, verb, *args, **kwargs):
278 "--node-directory", self.getdir("client0"),
280 argv = [verb] + nodeargs + list(args)
281 stdin = kwargs.get("stdin", "")
282 stdout, stderr = StringIO(), StringIO()
283 d = threads.deferToThread(runner.runner, argv, run_by_human=False,
284 stdin=StringIO(stdin),
285 stdout=stdout, stderr=stderr)
287 return rc, stdout.getvalue(), stderr.getvalue()
291 class CreateAlias(SystemTestMixin, CLITestMixin, unittest.TestCase):
293 def _test_webopen(self, args, expected_url):
294 woo = cli.WebopenOptions()
295 all_args = ["--node-directory", self.getdir("client0")] + list(args)
296 woo.parseOptions(all_args)
298 rc = cli.webopen(woo, urls.append)
299 self.failUnlessEqual(rc, 0)
300 self.failUnlessEqual(len(urls), 1)
301 self.failUnlessEqual(urls[0], expected_url)
303 def test_create(self):
304 self.basedir = os.path.dirname(self.mktemp())
305 d = self.set_up_nodes()
306 d.addCallback(lambda res: self.do_cli("create-alias", "tahoe"))
307 def _done((rc,stdout,stderr)):
308 self.failUnless("Alias 'tahoe' created" in stdout)
310 aliases = get_aliases(self.getdir("client0"))
311 self.failUnless("tahoe" in aliases)
312 self.failUnless(aliases["tahoe"].startswith("URI:DIR2:"))
314 d.addCallback(lambda res: self.do_cli("create-alias", "two"))
316 def _stash_urls(res):
317 aliases = get_aliases(self.getdir("client0"))
318 node_url_file = os.path.join(self.getdir("client0"), "node.url")
319 nodeurl = open(node_url_file, "r").read().strip()
320 uribase = nodeurl + "uri/"
321 self.tahoe_url = uribase + urllib.quote(aliases["tahoe"])
322 self.tahoe_subdir_url = self.tahoe_url + "/subdir"
323 self.two_url = uribase + urllib.quote(aliases["two"])
324 self.two_uri = aliases["two"]
325 d.addCallback(_stash_urls)
327 d.addCallback(lambda res: self.do_cli("create-alias", "two")) # dup
328 def _check_create_duplicate((rc,stdout,stderr)):
329 self.failIfEqual(rc, 0)
330 self.failUnless("Alias 'two' already exists!" in stderr)
331 aliases = get_aliases(self.getdir("client0"))
332 self.failUnlessEqual(aliases["two"], self.two_uri)
333 d.addCallback(_check_create_duplicate)
335 d.addCallback(lambda res: self.do_cli("add-alias", "added", self.two_uri))
336 def _check_add((rc,stdout,stderr)):
337 self.failUnlessEqual(rc, 0)
338 self.failUnless("Alias 'added' added" in stdout)
339 d.addCallback(_check_add)
341 # check add-alias with a duplicate
342 d.addCallback(lambda res: self.do_cli("add-alias", "two", self.two_uri))
343 def _check_add_duplicate((rc,stdout,stderr)):
344 self.failIfEqual(rc, 0)
345 self.failUnless("Alias 'two' already exists!" in stderr)
346 aliases = get_aliases(self.getdir("client0"))
347 self.failUnlessEqual(aliases["two"], self.two_uri)
348 d.addCallback(_check_add_duplicate)
350 def _test_urls(junk):
351 self._test_webopen([], self.tahoe_url)
352 self._test_webopen(["/"], self.tahoe_url)
353 self._test_webopen(["tahoe:"], self.tahoe_url)
354 self._test_webopen(["tahoe:/"], self.tahoe_url)
355 self._test_webopen(["tahoe:subdir"], self.tahoe_subdir_url)
356 self._test_webopen(["tahoe:subdir/"], self.tahoe_subdir_url + '/')
357 self._test_webopen(["tahoe:subdir/file"], self.tahoe_subdir_url + '/file')
358 # if "file" is indeed a file, then the url produced by webopen in
359 # this case is disallowed by the webui. but by design, webopen
360 # passes through the mistake from the user to the resultant
362 self._test_webopen(["tahoe:subdir/file/"], self.tahoe_subdir_url + '/file/')
363 self._test_webopen(["two:"], self.two_url)
364 d.addCallback(_test_urls)
368 class Put(SystemTestMixin, CLITestMixin, unittest.TestCase):
370 def test_unlinked_immutable_stdin(self):
371 # tahoe get `echo DATA | tahoe put`
372 # tahoe get `echo DATA | tahoe put -`
374 self.basedir = self.mktemp()
376 d = self.set_up_nodes()
377 d.addCallback(lambda res: self.do_cli("put", stdin=DATA))
379 (rc, stdout, stderr) = res
380 self.failUnless("waiting for file data on stdin.." in stderr)
381 self.failUnless("200 OK" in stderr, stderr)
382 self.readcap = stdout
383 self.failUnless(self.readcap.startswith("URI:CHK:"))
384 d.addCallback(_uploaded)
385 d.addCallback(lambda res: self.do_cli("get", self.readcap))
386 def _downloaded(res):
387 (rc, stdout, stderr) = res
388 self.failUnlessEqual(stderr, "")
389 self.failUnlessEqual(stdout, DATA)
390 d.addCallback(_downloaded)
391 d.addCallback(lambda res: self.do_cli("put", "-", stdin=DATA))
392 d.addCallback(lambda (rc,stdout,stderr):
393 self.failUnlessEqual(stdout, self.readcap))
396 def test_unlinked_immutable_from_file(self):
398 # tahoe put ./file.txt
399 # tahoe put /tmp/file.txt
400 # tahoe put ~/file.txt
401 self.basedir = os.path.dirname(self.mktemp())
402 # this will be "allmydata.test.test_cli/Put/test_put_from_file/RANDOM"
403 # and the RANDOM directory will exist. Raw mktemp returns a filename.
405 rel_fn = os.path.join(self.basedir, "DATAFILE")
406 abs_fn = os.path.abspath(rel_fn)
407 # we make the file small enough to fit in a LIT file, for speed
408 f = open(rel_fn, "w")
409 f.write("short file")
411 d = self.set_up_nodes()
412 d.addCallback(lambda res: self.do_cli("put", rel_fn))
413 def _uploaded((rc,stdout,stderr)):
415 self.failUnless(readcap.startswith("URI:LIT:"))
416 self.readcap = readcap
417 d.addCallback(_uploaded)
418 d.addCallback(lambda res: self.do_cli("put", "./" + rel_fn))
419 d.addCallback(lambda (rc,stdout,stderr):
420 self.failUnlessEqual(stdout, self.readcap))
421 d.addCallback(lambda res: self.do_cli("put", abs_fn))
422 d.addCallback(lambda (rc,stdout,stderr):
423 self.failUnlessEqual(stdout, self.readcap))
424 # we just have to assume that ~ is handled properly
427 def test_immutable_from_file(self):
428 # tahoe put file.txt uploaded.txt
429 # tahoe - uploaded.txt
430 # tahoe put file.txt subdir/uploaded.txt
431 # tahoe put file.txt tahoe:uploaded.txt
432 # tahoe put file.txt tahoe:subdir/uploaded.txt
433 # tahoe put file.txt DIRCAP:./uploaded.txt
434 # tahoe put file.txt DIRCAP:./subdir/uploaded.txt
435 self.basedir = os.path.dirname(self.mktemp())
437 rel_fn = os.path.join(self.basedir, "DATAFILE")
438 abs_fn = os.path.abspath(rel_fn)
439 # we make the file small enough to fit in a LIT file, for speed
441 DATA2 = "short file two"
442 f = open(rel_fn, "w")
446 d = self.set_up_nodes()
447 d.addCallback(lambda res: self.do_cli("create-alias", "tahoe"))
449 d.addCallback(lambda res:
450 self.do_cli("put", rel_fn, "uploaded.txt"))
451 def _uploaded((rc,stdout,stderr)):
452 readcap = stdout.strip()
453 self.failUnless(readcap.startswith("URI:LIT:"))
454 self.failUnless("201 Created" in stderr, stderr)
455 self.readcap = readcap
456 d.addCallback(_uploaded)
457 d.addCallback(lambda res:
458 self.do_cli("get", "tahoe:uploaded.txt"))
459 d.addCallback(lambda (rc,stdout,stderr):
460 self.failUnlessEqual(stdout, DATA))
462 d.addCallback(lambda res:
463 self.do_cli("put", "-", "uploaded.txt", stdin=DATA2))
464 def _replaced((rc,stdout,stderr)):
465 readcap = stdout.strip()
466 self.failUnless(readcap.startswith("URI:LIT:"))
467 self.failUnless("200 OK" in stderr, stderr)
468 d.addCallback(_replaced)
470 d.addCallback(lambda res:
471 self.do_cli("put", rel_fn, "subdir/uploaded2.txt"))
472 d.addCallback(lambda res: self.do_cli("get", "subdir/uploaded2.txt"))
473 d.addCallback(lambda (rc,stdout,stderr):
474 self.failUnlessEqual(stdout, DATA))
476 d.addCallback(lambda res:
477 self.do_cli("put", rel_fn, "tahoe:uploaded3.txt"))
478 d.addCallback(lambda res: self.do_cli("get", "tahoe:uploaded3.txt"))
479 d.addCallback(lambda (rc,stdout,stderr):
480 self.failUnlessEqual(stdout, DATA))
482 d.addCallback(lambda res:
483 self.do_cli("put", rel_fn, "tahoe:subdir/uploaded4.txt"))
484 d.addCallback(lambda res:
485 self.do_cli("get", "tahoe:subdir/uploaded4.txt"))
486 d.addCallback(lambda (rc,stdout,stderr):
487 self.failUnlessEqual(stdout, DATA))
489 def _get_dircap(res):
490 self.dircap = get_aliases(self.getdir("client0"))["tahoe"]
491 d.addCallback(_get_dircap)
493 d.addCallback(lambda res:
494 self.do_cli("put", rel_fn,
495 self.dircap+":./uploaded5.txt"))
496 d.addCallback(lambda res:
497 self.do_cli("get", "tahoe:uploaded5.txt"))
498 d.addCallback(lambda (rc,stdout,stderr):
499 self.failUnlessEqual(stdout, DATA))
501 d.addCallback(lambda res:
502 self.do_cli("put", rel_fn,
503 self.dircap+":./subdir/uploaded6.txt"))
504 d.addCallback(lambda res:
505 self.do_cli("get", "tahoe:subdir/uploaded6.txt"))
506 d.addCallback(lambda (rc,stdout,stderr):
507 self.failUnlessEqual(stdout, DATA))
511 def test_mutable_unlinked(self):
512 # FILECAP = `echo DATA | tahoe put --mutable`
513 # tahoe get FILECAP, compare against DATA
514 # echo DATA2 | tahoe put - FILECAP
515 # tahoe get FILECAP, compare against DATA2
516 # tahoe put file.txt FILECAP
517 self.basedir = os.path.dirname(self.mktemp())
520 rel_fn = os.path.join(self.basedir, "DATAFILE")
521 abs_fn = os.path.abspath(rel_fn)
522 DATA3 = "three" * 100
523 f = open(rel_fn, "w")
527 d = self.set_up_nodes()
529 d.addCallback(lambda res: self.do_cli("put", "--mutable", stdin=DATA))
531 (rc, stdout, stderr) = res
532 self.failUnless("waiting for file data on stdin.." in stderr)
533 self.failUnless("200 OK" in stderr)
534 self.filecap = stdout
535 self.failUnless(self.filecap.startswith("URI:SSK:"))
536 d.addCallback(_created)
537 d.addCallback(lambda res: self.do_cli("get", self.filecap))
538 d.addCallback(lambda (rc,out,err): self.failUnlessEqual(out, DATA))
540 d.addCallback(lambda res: self.do_cli("put", "-", self.filecap, stdin=DATA2))
542 (rc, stdout, stderr) = res
543 self.failUnless("waiting for file data on stdin.." in stderr)
544 self.failUnless("200 OK" in stderr)
545 self.failUnlessEqual(self.filecap, stdout)
546 d.addCallback(_replaced)
547 d.addCallback(lambda res: self.do_cli("get", self.filecap))
548 d.addCallback(lambda (rc,out,err): self.failUnlessEqual(out, DATA2))
550 d.addCallback(lambda res: self.do_cli("put", rel_fn, self.filecap))
552 (rc, stdout, stderr) = res
553 self.failUnless("200 OK" in stderr)
554 self.failUnlessEqual(self.filecap, stdout)
555 d.addCallback(_replaced2)
556 d.addCallback(lambda res: self.do_cli("get", self.filecap))
557 d.addCallback(lambda (rc,out,err): self.failUnlessEqual(out, DATA3))
561 def test_mutable(self):
562 # echo DATA1 | tahoe put --mutable - uploaded.txt
563 # echo DATA2 | tahoe put - uploaded.txt # should modify-in-place
564 # tahoe get uploaded.txt, compare against DATA2
566 self.basedir = os.path.dirname(self.mktemp())
568 fn1 = os.path.join(self.basedir, "DATA1")
573 fn2 = os.path.join(self.basedir, "DATA2")
578 d = self.set_up_nodes()
579 d.addCallback(lambda res: self.do_cli("create-alias", "tahoe"))
580 d.addCallback(lambda res:
581 self.do_cli("put", "--mutable", fn1, "tahoe:uploaded.txt"))
582 d.addCallback(lambda res:
583 self.do_cli("put", fn2, "tahoe:uploaded.txt"))
584 d.addCallback(lambda res:
585 self.do_cli("get", "tahoe:uploaded.txt"))
586 d.addCallback(lambda (rc,out,err): self.failUnlessEqual(out, DATA2))
589 class Cp(SystemTestMixin, CLITestMixin, unittest.TestCase):
590 def test_not_enough_args(self):
592 self.failUnlessRaises(usage.UsageError,
593 o.parseOptions, ["onearg"])
595 def test_unicode_filename(self):
596 self.basedir = os.path.dirname(self.mktemp())
598 fn1 = os.path.join(self.basedir, "Ärtonwall")
599 DATA1 = "unicode file content"
600 open(fn1, "wb").write(DATA1)
602 fn2 = os.path.join(self.basedir, "Metallica")
603 DATA2 = "non-unicode file content"
604 open(fn2, "wb").write(DATA2)
607 # Assure that uploading a file whose name contains unicode character doesn't
608 # prevent further uploads in the same directory
609 d = self.set_up_nodes()
610 d.addCallback(lambda res: self.do_cli("create-alias", "tahoe"))
611 d.addCallback(lambda res: self.do_cli("cp", fn1, "tahoe:"))
612 d.addCallback(lambda res: self.do_cli("cp", fn2, "tahoe:"))
614 d.addCallback(lambda res: self.do_cli("get", "tahoe:Ärtonwall"))
615 d.addCallback(lambda (rc,out,err): self.failUnlessEqual(out, DATA1))
617 d.addCallback(lambda res: self.do_cli("get", "tahoe:Metallica"))
618 d.addCallback(lambda (rc,out,err): self.failUnlessEqual(out, DATA2))
621 test_unicode_filename.todo = "This behavior is not yet supported, although it does happen to work (for reasons that are ill-understood) on many platforms. See issue ticket #534."
623 def test_dangling_symlink_vs_recursion(self):
624 if not hasattr(os, 'symlink'):
625 raise unittest.SkipTest("There is no symlink on this platform.")
626 # cp -r on a directory containing a dangling symlink shouldn't assert
627 self.basedir = os.path.dirname(self.mktemp())
628 dn = os.path.join(self.basedir, "dir")
630 fn = os.path.join(dn, "Fakebandica")
631 ln = os.path.join(dn, "link")
634 d = self.set_up_nodes()
635 d.addCallback(lambda res: self.do_cli("create-alias", "tahoe"))
636 d.addCallback(lambda res: self.do_cli("cp", "--recursive",
640 class Backup(SystemTestMixin, CLITestMixin, StallMixin, unittest.TestCase):
641 def writeto(self, path, data):
642 d = os.path.dirname(os.path.join(self.basedir, "home", path))
643 fileutil.make_dirs(d)
644 f = open(os.path.join(self.basedir, "home", path), "w")
648 def count_output(self, out):
649 mo = re.search(r"(\d)+ files uploaded \((\d+) reused\), (\d+) directories created \((\d+) reused\)", out)
650 return [int(s) for s in mo.groups()]
652 def count_output2(self, out):
653 mo = re.search(r"(\d)+ files checked, (\d+) directories checked, (\d+) directories read", out)
654 return [int(s) for s in mo.groups()]
656 def test_backup(self):
657 self.basedir = os.path.dirname(self.mktemp())
659 # is the backupdb available? If so, we test that a second backup does
660 # not create new directories.
662 have_bdb = backupdb.get_backupdb(os.path.join(self.basedir, "dbtest"),
665 # create a small local directory with a couple of files
666 source = os.path.join(self.basedir, "home")
667 fileutil.make_dirs(os.path.join(source, "empty"))
668 self.writeto("parent/subdir/foo.txt", "foo")
669 self.writeto("parent/subdir/bar.txt", "bar\n" * 1000)
670 self.writeto("parent/blah.txt", "blah")
672 def do_backup(use_backupdb=True, verbose=False):
674 if not have_bdb or not use_backupdb:
675 cmd.append("--no-backupdb")
677 cmd.append("--verbose")
679 cmd.append("tahoe:backups")
680 return self.do_cli(*cmd)
682 d = self.set_up_nodes()
683 d.addCallback(lambda res: self.do_cli("create-alias", "tahoe"))
686 d.addCallback(lambda res: self.do_cli("backup", source, "tahoe:backups"))
687 def _should_complain((rc, out, err)):
688 self.failUnless("I was unable to import a python sqlite library" in err, err)
689 d.addCallback(_should_complain)
690 d.addCallback(self.stall, 1.1) # make sure the backups get distinct timestamps
692 d.addCallback(lambda res: do_backup())
693 def _check0((rc, out, err)):
694 self.failUnlessEqual(err, "")
695 self.failUnlessEqual(rc, 0)
696 fu, fr, dc, dr = self.count_output(out)
697 # foo.txt, bar.txt, blah.txt
698 self.failUnlessEqual(fu, 3)
699 self.failUnlessEqual(fr, 0)
700 # empty, home, home/parent, home/parent/subdir
701 self.failUnlessEqual(dc, 4)
702 self.failUnlessEqual(dr, 0)
703 d.addCallback(_check0)
705 d.addCallback(lambda res: self.do_cli("ls", "tahoe:backups"))
706 def _check1((rc, out, err)):
707 self.failUnlessEqual(err, "")
708 self.failUnlessEqual(rc, 0)
709 self.failUnlessEqual(sorted(out.split()), ["Archives", "Latest"])
710 d.addCallback(_check1)
711 d.addCallback(lambda res: self.do_cli("ls", "tahoe:backups/Latest"))
712 def _check2((rc, out, err)):
713 self.failUnlessEqual(err, "")
714 self.failUnlessEqual(rc, 0)
715 self.failUnlessEqual(sorted(out.split()), ["empty", "parent"])
716 d.addCallback(_check2)
717 d.addCallback(lambda res: self.do_cli("ls", "tahoe:backups/Latest/empty"))
718 def _check2a((rc, out, err)):
719 self.failUnlessEqual(err, "")
720 self.failUnlessEqual(rc, 0)
721 self.failUnlessEqual(out.strip(), "")
722 d.addCallback(_check2a)
723 d.addCallback(lambda res: self.do_cli("get", "tahoe:backups/Latest/parent/subdir/foo.txt"))
724 def _check3((rc, out, err)):
725 self.failUnlessEqual(err, "")
726 self.failUnlessEqual(rc, 0)
727 self.failUnlessEqual(out, "foo")
728 d.addCallback(_check3)
729 d.addCallback(lambda res: self.do_cli("ls", "tahoe:backups/Archives"))
730 def _check4((rc, out, err)):
731 self.failUnlessEqual(err, "")
732 self.failUnlessEqual(rc, 0)
733 self.old_archives = out.split()
734 self.failUnlessEqual(len(self.old_archives), 1)
735 d.addCallback(_check4)
738 d.addCallback(self.stall, 1.1)
739 d.addCallback(lambda res: do_backup())
740 def _check4a((rc, out, err)):
741 # second backup should reuse everything, if the backupdb is
743 self.failUnlessEqual(err, "")
744 self.failUnlessEqual(rc, 0)
746 fu, fr, dc, dr = self.count_output(out)
747 # foo.txt, bar.txt, blah.txt
748 self.failUnlessEqual(fu, 0)
749 self.failUnlessEqual(fr, 3)
750 # empty, home, home/parent, home/parent/subdir
751 self.failUnlessEqual(dc, 0)
752 self.failUnlessEqual(dr, 4)
753 d.addCallback(_check4a)
756 # sneak into the backupdb, crank back the "last checked"
757 # timestamp to force a check on all files
758 def _reset_last_checked(res):
759 dbfile = os.path.join(self.basedir,
760 "client0", "private", "backupdb.sqlite")
761 self.failUnless(os.path.exists(dbfile), dbfile)
762 bdb = backupdb.get_backupdb(dbfile)
763 bdb.cursor.execute("UPDATE last_upload SET last_checked=0")
764 bdb.connection.commit()
766 d.addCallback(_reset_last_checked)
768 d.addCallback(self.stall, 1.1)
769 d.addCallback(lambda res: do_backup(verbose=True))
770 def _check4b((rc, out, err)):
771 # we should check all files, and re-use all of them. None of
772 # the directories should have been changed.
773 self.failUnlessEqual(err, "")
774 self.failUnlessEqual(rc, 0)
775 fu, fr, dc, dr = self.count_output(out)
776 fchecked, dchecked, dread = self.count_output2(out)
777 self.failUnlessEqual(fchecked, 3)
778 self.failUnlessEqual(fu, 0)
779 self.failUnlessEqual(fr, 3)
780 # TODO: backupdb doesn't do dirs yet; when it does, this will
781 # change to dchecked=4, and maybe dread=0
782 self.failUnlessEqual(dchecked, 0)
783 self.failUnlessEqual(dread, 4)
784 self.failUnlessEqual(dc, 0)
785 self.failUnlessEqual(dr, 4)
786 d.addCallback(_check4b)
788 d.addCallback(lambda res: self.do_cli("ls", "tahoe:backups/Archives"))
789 def _check5((rc, out, err)):
790 self.failUnlessEqual(err, "")
791 self.failUnlessEqual(rc, 0)
792 self.new_archives = out.split()
796 self.failUnlessEqual(len(self.new_archives), expected_new, out)
797 # the original backup should still be the oldest (i.e. sorts
798 # alphabetically towards the beginning)
799 self.failUnlessEqual(sorted(self.new_archives)[0],
800 self.old_archives[0])
801 d.addCallback(_check5)
803 d.addCallback(self.stall, 1.1)
805 self.writeto("parent/subdir/foo.txt", "FOOF!")
806 # and turn a file into a directory
807 os.unlink(os.path.join(source, "parent/blah.txt"))
808 os.mkdir(os.path.join(source, "parent/blah.txt"))
809 self.writeto("parent/blah.txt/surprise file", "surprise")
810 self.writeto("parent/blah.txt/surprisedir/subfile", "surprise")
811 # turn a directory into a file
812 os.rmdir(os.path.join(source, "empty"))
813 self.writeto("empty", "imagine nothing being here")
815 d.addCallback(_modify)
816 def _check5a((rc, out, err)):
817 # second backup should reuse bar.txt (if backupdb is available),
818 # and upload the rest. None of the directories can be reused.
819 self.failUnlessEqual(err, "")
820 self.failUnlessEqual(rc, 0)
822 fu, fr, dc, dr = self.count_output(out)
823 # new foo.txt, surprise file, subfile, empty
824 self.failUnlessEqual(fu, 4)
826 self.failUnlessEqual(fr, 1)
827 # home, parent, subdir, blah.txt, surprisedir
828 self.failUnlessEqual(dc, 5)
829 self.failUnlessEqual(dr, 0)
830 d.addCallback(_check5a)
831 d.addCallback(lambda res: self.do_cli("ls", "tahoe:backups/Archives"))
832 def _check6((rc, out, err)):
833 self.failUnlessEqual(err, "")
834 self.failUnlessEqual(rc, 0)
835 self.new_archives = out.split()
839 self.failUnlessEqual(len(self.new_archives), expected_new)
840 self.failUnlessEqual(sorted(self.new_archives)[0],
841 self.old_archives[0])
842 d.addCallback(_check6)
843 d.addCallback(lambda res: self.do_cli("get", "tahoe:backups/Latest/parent/subdir/foo.txt"))
844 def _check7((rc, out, err)):
845 self.failUnlessEqual(err, "")
846 self.failUnlessEqual(rc, 0)
847 self.failUnlessEqual(out, "FOOF!")
848 # the old snapshot should not be modified
849 return self.do_cli("get", "tahoe:backups/Archives/%s/parent/subdir/foo.txt" % self.old_archives[0])
850 d.addCallback(_check7)
851 def _check8((rc, out, err)):
852 self.failUnlessEqual(err, "")
853 self.failUnlessEqual(rc, 0)
854 self.failUnlessEqual(out, "foo")
855 d.addCallback(_check8)
857 d.addCallback(self.stall, 1.1)
858 d.addCallback(lambda res: do_backup(use_backupdb=False))
859 def _check9((rc, out, err)):
860 # --no-backupdb means re-upload everything. We still get to
861 # re-use the directories, since nothing changed.
862 self.failUnlessEqual(err, "")
863 self.failUnlessEqual(rc, 0)
864 fu, fr, dc, dr = self.count_output(out)
865 self.failUnlessEqual(fu, 5)
866 self.failUnlessEqual(fr, 0)
867 self.failUnlessEqual(dc, 0)
868 self.failUnlessEqual(dr, 5)
869 d.addCallback(_check9)
873 # on our old dapper buildslave, this test takes a long time (usually
874 # 130s), so we have to bump up the default 120s timeout. The create-alias
875 # and initial backup alone take 60s, probably because of the handful of
876 # dirnodes being created (RSA key generation). The backup between check4
877 # and check4a takes 6s, as does the backup before check4b.
878 test_backup.timeout = 300