tahoe set-alias tahoe `tahoe mkdir`
-After thatm you can use "tahoe ls tahoe:" and "tahoe cp local.txt tahoe:",
+After that you can use "tahoe ls tahoe:" and "tahoe cp local.txt tahoe:",
and both will refer to the directory that you've just created.
=== Command Syntax Summary ===
["node-url", "u", None,
"URL of the tahoe node to use, a URL like \"http://127.0.0.1:8123\". "
"This overrides the URL found in the --node-directory ."],
- ["dir-cap", "r", "root",
- "Which dirnode URI should be used as a root directory. The "
- "string 'root' is special, and means we should use the "
- "directory found in the 'root_dir.cap' file in the 'private' "
- "subdirectory of the --node-directory ."],
+ ["dir-cap", "r", None,
+ "Which dirnode URI should be used as the 'tahoe' alias."]
]
def postOptions(self):
node_url_file = os.path.join(self['node-directory'], "node.url")
self['node-url'] = open(node_url_file, "r").read().strip()
- rootdircap = None
- if self['dir-cap'] == 'root':
- uri_file = os.path.join(self['node-directory'], 'private', "root_dir.cap")
- try:
- rootdircap = open(uri_file, "r").read().strip()
- except EnvironmentError, le:
- raise usage.UsageError("\n"
- "If --dir-cap is absent or is 'root', then the node directory's 'private'\n"
- "subdirectory is required to contain a file named 'root_dir.cap' which is\n"
- "required to contain a dir cap, but when we tried to open that file we got:\n"
- "'%s'." % (le,))
- else:
- rootdircap = self['dir-cap']
+ aliases = self.get_aliases(self['node-directory'])
+ if self['dir-cap']:
+ aliases["tahoe"] = self['dir-cap']
+ self.aliases = aliases # maps alias name to dircap
+
+
+ def get_aliases(self, nodedir):
from allmydata import uri
+ aliases = {}
+ aliasfile = os.path.join(nodedir, "private", "aliases")
+ rootfile = os.path.join(nodedir, "private", "root_dir.cap")
+ try:
+ f = open(rootfile, "r")
+ rootcap = f.read().strip()
+ if rootcap:
+ aliases["tahoe"] = uri.from_string_dirnode(rootcap).to_string()
+ except EnvironmentError:
+ pass
try:
- parsed = uri.NewDirectoryURI.init_from_human_encoding(rootdircap)
- except:
- try:
- parsed = uri.ReadonlyNewDirectoryURI.init_from_human_encoding(rootdircap)
- except:
- if self['dir-cap'] == 'root':
- raise usage.UsageError("\n"
- "If --dir-cap is absent or is 'root', then the node directory's 'private'\n"
- "subdirectory's 'root_dir.cap' is required to contain a dir cap, but we found\n"
- "'%s'." % (rootdircap,))
- else:
- raise usage.UsageError("--dir-cap is required to be a dir cap (or \"root\"), but we got '%s'." % (self['dir-cap'],))
-
- self['dir-cap'] = parsed.to_string()
+ f = open(aliasfile, "r")
+ for line in f.readlines():
+ line = line.strip()
+ if line.startswith("#"):
+ continue
+ name, cap = line.split(":", 1)
+ # normalize it: remove http: prefix, urldecode
+ cap = cap.strip()
+ aliases[name] = uri.from_string_dirnode(cap).to_string()
+ except EnvironmentError:
+ pass
+ return aliases
+
+class MakeDirectoryOptions(VDriveOptions):
+ def parseArgs(self, where=""):
+ self.where = where
+ longdesc = """Create a new directory, either unlinked or as a subdirectory."""
+
+class AddAliasOptions(VDriveOptions):
+ def parseArgs(self, alias, cap):
+ self.alias = alias
+ self.cap = cap
class ListOptions(VDriveOptions):
- def parseArgs(self, vdrive_pathname=""):
- self['vdrive_pathname'] = vdrive_pathname
+ optFlags = [
+ ("long", "l", "Use long format: show file sizes, and timestamps"),
+ ("uri", "u", "Show file URIs"),
+ ("classify", "F", "Append '/' to directory names, and '*' to mutable"),
+ ("json", None, "Show the raw JSON output"),
+ ]
+ def parseArgs(self, where=""):
+ self.where = where
longdesc = """List the contents of some portion of the virtual drive."""
class GetOptions(VDriveOptions):
- def parseArgs(self, vdrive_filename, local_filename="-"):
- self['vdrive_filename'] = vdrive_filename
- self['local_filename'] = local_filename
+ def parseArgs(self, arg1, arg2=None):
+ # tahoe get FOO |less # write to stdout
+ # tahoe get tahoe:FOO |less # same
+ # tahoe get FOO bar # write to local file
+ # tahoe get tahoe:FOO bar # same
+
+ self.from_file = arg1
+ self.to_file = arg2
+ if self.to_file == "-":
+ self.to_file = None
def getSynopsis(self):
return "%s get VDRIVE_FILE LOCAL_FILE" % (os.path.basename(sys.argv[0]),)
will be written to stdout."""
class PutOptions(VDriveOptions):
- def parseArgs(self, local_filename, vdrive_filename):
- self['local_filename'] = local_filename
- self['vdrive_filename'] = vdrive_filename
+ def parseArgs(self, arg1=None, arg2=None):
+ # cat FILE > tahoe put # create unlinked file from stdin
+ # cat FILE > tahoe put FOO # create tahoe:FOO from stdin
+ # cat FILE > tahoe put tahoe:FOO # same
+ # tahoe put bar FOO # copy local 'bar' to tahoe:FOO
+ # tahoe put bar tahoe:FOO # same
+
+ if arg1 is not None and arg2 is not None:
+ self.from_file = arg1
+ self.to_file = arg2
+ elif arg1 is not None and arg2 is None:
+ self.from_file = None
+ self.to_file = arg1
+ else:
+ self.from_file = arg1
+ self.to_file = arg2
+ if self.from_file == "-":
+ self.from_file = None
def getSynopsis(self):
return "%s put LOCAL_FILE VDRIVE_FILE" % (os.path.basename(sys.argv[0]),)
local file (it can't be stdin)."""
class RmOptions(VDriveOptions):
- def parseArgs(self, vdrive_pathname):
- self['vdrive_pathname'] = vdrive_pathname
+ def parseArgs(self, where):
+ self.where = where
def getSynopsis(self):
return "%s rm VE_FILE" % (os.path.basename(sys.argv[0]),)
class MvOptions(VDriveOptions):
def parseArgs(self, frompath, topath):
- self['from'] = frompath
- self['to'] = topath
+ self.from_file = frompath
+ self.to_file = topath
def getSynopsis(self):
return "%s mv FROM TO" % (os.path.basename(sys.argv[0]),)
pass
subCommands = [
+ ["mkdir", None, MakeDirectoryOptions, "Create a new directory"],
+ ["add-alias", None, AddAliasOptions, "Add a new alias cap"],
["ls", None, ListOptions, "List a directory"],
["get", None, GetOptions, "Retrieve a file from the virtual drive."],
["put", None, PutOptions, "Upload a file into the virtual drive."],
["repl", None, ReplOptions, "Open a python interpreter"],
]
+def mkdir(config, stdout, stderr):
+ from allmydata.scripts import tahoe_mkdir
+ rc = tahoe_mkdir.mkdir(config['node-url'],
+ config.aliases,
+ config.where,
+ stdout, stderr)
+ return rc
+
+def add_alias(config, stdout, stderr):
+ from allmydata.scripts import tahoe_add_alias
+ rc = tahoe_add_alias.add_alias(config['node-directory'],
+ config.alias,
+ config.cap,
+ stdout, stderr)
+ return rc
+
def list(config, stdout, stderr):
from allmydata.scripts import tahoe_ls
rc = tahoe_ls.list(config['node-url'],
- config['dir-cap'],
- config['vdrive_pathname'],
+ config.aliases,
+ config.where,
+ config,
stdout, stderr)
return rc
def get(config, stdout, stderr):
from allmydata.scripts import tahoe_get
- vdrive_filename = config['vdrive_filename']
- local_filename = config['local_filename']
rc = tahoe_get.get(config['node-url'],
- config['dir-cap'],
- vdrive_filename,
- local_filename,
+ config.aliases,
+ config.from_file,
+ config.to_file,
stdout, stderr)
if rc == 0:
- if local_filename is None or local_filename == "-":
+ if config.to_file is None:
# be quiet, since the file being written to stdout should be
# proof enough that it worked, unless the user is unlucky
# enough to have picked an empty file
pass
else:
print >>stderr, "%s retrieved and written to %s" % \
- (vdrive_filename, local_filename)
+ (config.from_file, config.to_file)
return rc
-def put(config, stdout, stderr):
+def put(config, stdout, stderr, stdin=sys.stdin):
from allmydata.scripts import tahoe_put
- vdrive_filename = config['vdrive_filename']
- local_filename = config['local_filename']
if config['quiet']:
verbosity = 0
else:
verbosity = 2
rc = tahoe_put.put(config['node-url'],
- config['dir-cap'],
- local_filename,
- vdrive_filename,
+ config.aliases,
+ config.from_file,
+ config.to_file,
verbosity,
- stdout, stderr)
+ stdin, stdout, stderr)
return rc
def rm(config, stdout, stderr):
from allmydata.scripts import tahoe_rm
- vdrive_pathname = config['vdrive_pathname']
if config['quiet']:
verbosity = 0
else:
verbosity = 2
rc = tahoe_rm.rm(config['node-url'],
- config['dir-cap'],
- vdrive_pathname,
+ config.aliases,
+ config.where,
verbosity,
stdout, stderr)
return rc
def mv(config, stdout, stderr):
from allmydata.scripts import tahoe_mv
- frompath = config['from']
- topath = config['to']
rc = tahoe_mv.mv(config['node-url'],
- config['dir-cap'],
- frompath,
- topath,
+ config.aliases,
+ config.from_file,
+ config.to_file,
stdout, stderr)
return rc
return code.interact()
dispatch = {
+ "mkdir": mkdir,
+ "add-alias": add_alias,
"ls": list,
"get": get,
"put": put,
-import os, sys
+import os, sys, urllib
from twisted.python import usage
if not self.basedirs:
raise usage.UsageError("--basedir must be provided")
+DEFAULT_ALIAS = "tahoe"
+def get_alias(aliases, path, default):
+ # transform "work:path/filename" into (aliases["work"], "path/filename")
+ # We special-case URI:
+ if path.startswith("URI:"):
+ # The only way to get a sub-path is to use URI:blah:./foo, and we
+ # strip out the :./ sequence.
+ sep = path.find(":./")
+ if sep != -1:
+ return path[:sep], path[sep+3:]
+ return path, ""
+ colon = path.find(":")
+ if colon == -1:
+ # no alias
+ return aliases[default], path
+ alias = path[:colon]
+ if "/" in alias:
+ # no alias, but there's a colon in a dirname/filename, like
+ # "foo/bar:7"
+ return aliases[default], path
+ return aliases[alias], path[colon+1:]
+
+def escape_path(path):
+ segments = path.split("/")
+ return "/".join([urllib.quote(s) for s in segments])
c.send(data)
return c.getresponse()
+
+def check_http_error(resp, stderr):
+ if resp.status < 200 or resp.status >= 300:
+ print >>stderr, "error %d during HTTP request" % resp.status
+ return 1
f = open(os.path.join(basedir, "webport"), "w")
f.write(config['webport'] + "\n")
f.close()
- # Create an empty root_dir.cap file, indicating that the node
- # should fill it with the URI after creating the directory.
from allmydata.util import fileutil
fileutil.make_dirs(os.path.join(basedir, "private"), 0700)
- open(os.path.join(basedir, "private", "root_dir.cap"), "w")
print >>out, "client created in %s" % basedir
print >>out, " please copy introducer.furl into the directory"
--- /dev/null
+
+import os.path
+from allmydata import uri
+
+def add_alias(nodedir, alias, cap, stdout, stderr):
+ aliasfile = os.path.join(nodedir, "private", "aliases")
+ cap = uri.from_string_dirnode(cap).to_string()
+ assert ":" not in alias
+ assert " " not in alias
+ # probably check for others..
+ f = open(aliasfile, "a")
+ f.write("%s: %s\n" % (alias, cap))
+ f.close()
+ print >>stdout, "Alias '%s' added" % (alias,)
+ return 0
+
import urllib
+from allmydata.scripts.common import get_alias, DEFAULT_ALIAS, escape_path
+from allmydata.scripts.common_http import do_http
-def get(nodeurl, dir_uri, vdrive_fname, local_file, stdout, stderr):
+def get(nodeurl, aliases, from_file, to_file, stdout, stderr):
if nodeurl[-1] != "/":
nodeurl += "/"
- url = nodeurl + "uri/%s/" % urllib.quote(dir_uri)
- if vdrive_fname:
- url += urllib.quote(vdrive_fname)
+ rootcap, path = get_alias(aliases, from_file, DEFAULT_ALIAS)
+ url = nodeurl + "uri/%s" % urllib.quote(rootcap)
+ if path:
+ url += "/" + escape_path(path)
- if local_file is None or local_file == "-":
+ if to_file:
+ outf = open(to_file, "wb")
+ close_outf = True
+ else:
outf = stdout
close_outf = False
+
+ resp = do_http("GET", url)
+ if resp.status in (200, 201,):
+ while True:
+ data = resp.read(4096)
+ if not data:
+ break
+ outf.write(data)
+ rc = 0
else:
- outf = open(local_file, "wb")
- close_outf = True
- inf = urllib.urlopen(url)
- while True:
- data = inf.read(4096)
- if not data:
- break
- outf.write(data)
+ print >>stderr, "Error, got %s %s" % (resp.status, resp.reason)
+ print >>stderr, resp.read()
+ rc = 1
+
if close_outf:
outf.close()
- return 0
+ return rc
-import urllib
+import urllib, time
import simplejson
+from allmydata.scripts.common import get_alias, DEFAULT_ALIAS, escape_path
-def list(nodeurl, dir_uri, vdrive_pathname, stdout, stderr):
- if nodeurl[-1] != "/":
+def list(nodeurl, aliases, where, config, stdout, stderr):
+ if not nodeurl.endswith("/"):
nodeurl += "/"
- url = nodeurl + "uri/%s/" % urllib.quote(dir_uri)
- if vdrive_pathname:
- url += urllib.quote(vdrive_pathname)
+ if where.endswith("/"):
+ where = where[:-1]
+ rootcap, path = get_alias(aliases, where, DEFAULT_ALIAS)
+ url = nodeurl + "uri/%s" % urllib.quote(rootcap)
+ if path:
+ # move where.endswith check here?
+ url += "/" + escape_path(path)
+ assert not url.endswith("/")
url += "?t=json"
data = urllib.urlopen(url).read()
+ if config['json']:
+ print >>stdout, data
+ return
+
parsed = simplejson.loads(data)
nodetype, d = parsed
+ children = {}
if nodetype == "dirnode":
- childnames = sorted(d['children'].keys())
- for name in childnames:
- child = d['children'][name]
- childtype = child[0]
- if childtype == "dirnode":
- print >>stdout, "%10s %s/" % ("", name)
- else:
- assert childtype == "filenode"
- size = child[1]['size']
- print >>stdout, "%10s %s" % (size, name)
+ children = d['children']
elif nodetype == "filenode":
- print >>stdout, "%10s %s" % (d['size'], vdrive_pathname)
+ childname = path.split("/")[-1]
+ children = {childname: d}
+ childnames = sorted(children.keys())
+ now = time.time()
+ for name in childnames:
+ child = children[name]
+ childtype = child[0]
+ ctime = child[1]["metadata"].get("ctime")
+ mtime = child[1]["metadata"].get("mtime")
+ if ctime:
+ # match for formatting that GNU 'ls' does
+ if (now - ctime) > 6*30*24*60*60:
+ # old files
+ fmt = "%b %d %Y"
+ else:
+ fmt = "%b %d %H:%M"
+ ctime_s = time.strftime(fmt, time.localtime(ctime))
+ else:
+ ctime_s = "-"
+ if childtype == "dirnode":
+ t = "d---------"
+ size = "-"
+ classify = "/"
+ elif childtype == "filenode":
+ t = "----------"
+ size = child[1]['size']
+ classify = ""
+ if "rw_uri" in child[1]:
+ classify = "*" # mutable
+
+ uri = child[1].get("rw_uri", child[1].get("ro_uri", "-"))
+
+ line = []
+ if config["long"]:
+ line.append("%s %10s %12s" % (t, size, ctime_s))
+ if config["uri"]:
+ line.append(uri)
+ line.append(name)
+ if config["classify"]:
+ line[-1] += classify
+
+ print >>stdout, " ".join(line)
--- /dev/null
+
+import urllib
+from allmydata.scripts.common_http import do_http, check_http_error
+from allmydata.scripts.common import get_alias, DEFAULT_ALIAS
+
+def mkdir(nodeurl, aliases, where, stdout, stderr):
+ if not nodeurl.endswith("/"):
+ nodeurl += "/"
+ if where:
+ rootcap, path = get_alias(aliases, where, DEFAULT_ALIAS)
+
+ if not where or not path:
+ # create a new unlinked directory
+ url = nodeurl + "uri?t=mkdir"
+ resp = do_http("POST", url)
+ rc = check_http_error(resp, stderr)
+ if rc:
+ return rc
+ new_uri = resp.read().strip()
+ # emit its write-cap
+ print >>stdout, new_uri
+ return 0
+
+ # create a new directory at the given location
+ if path.endswith("/"):
+ path = path[:-1]
+ # path (in argv) must be "/".join([s.encode("utf-8") for s in segments])
+ url = nodeurl + "uri/%s/%s?t=mkdir" % (urllib.quote(rootcap),
+ urllib.quote(path))
+ resp = do_http("POST", url)
+ check_http_error(resp, stderr)
+ new_uri = resp.read().strip()
+ print >>stdout, new_uri
+ return 0
import re
import urllib
import simplejson
+from allmydata.scripts.common import get_alias, DEFAULT_ALIAS, escape_path
from allmydata.scripts.common_http import do_http
-def mv(nodeurl, dir_uri, frompath, topath, stdout, stderr):
- frompath = urllib.quote(frompath)
- topath = urllib.quote(topath)
+def mv(nodeurl, aliases, from_file, to_file, stdout, stderr):
if nodeurl[-1] != "/":
nodeurl += "/"
- url = nodeurl + "uri/%s/" % urllib.quote(dir_uri)
- data = urllib.urlopen(url + frompath + "?t=json").read()
-
+ rootcap, path = get_alias(aliases, from_file, DEFAULT_ALIAS)
+ from_url = nodeurl + "uri/%s" % urllib.quote(rootcap)
+ if path:
+ from_url += "/" + escape_path(path)
+ # figure out the source cap
+ data = urllib.urlopen(from_url + "?t=json").read()
nodetype, attrs = simplejson.loads(data)
- uri = attrs.get("rw_uri") or attrs["ro_uri"]
- # simplejson always returns unicode, but we know that it's really just a
- # bytestring.
- uri = str(uri)
-
- put_url = url + topath + "?t=uri"
- resp = do_http("PUT", put_url, uri)
+ cap = attrs.get("rw_uri") or attrs["ro_uri"]
+ # simplejson always returns unicode, but we know that it's really just an
+ # ASCII file-cap.
+ cap = str(cap)
+
+ # now get the target
+ rootcap, path = get_alias(aliases, to_file, DEFAULT_ALIAS)
+ to_url = nodeurl + "uri/%s" % urllib.quote(rootcap)
+ if path:
+ to_url += "/" + escape_path(path)
+ if path.endswith("/"):
+ # "mv foo.txt bar/" == "mv foo.txt bar/foo.txt"
+ pass # TODO
+ to_url += "?t=uri"
+
+ resp = do_http("PUT", to_url, cap)
status = resp.status
if not re.search(r'^2\d\d$', str(status)):
print >>stderr, "error, got %s %s" % (resp.status, resp.reason)
print >>stderr, resp.read()
# now remove the original
- resp = do_http("DELETE", url + frompath)
+ resp = do_http("DELETE", from_url)
if not re.search(r'^2\d\d$', str(status)):
print >>stderr, "error, got %s %s" % (resp.status, resp.reason)
print >>stderr, resp.read()
+from cStringIO import StringIO
import urllib
from allmydata.scripts.common_http import do_http
+from allmydata.scripts.common import get_alias, DEFAULT_ALIAS, escape_path
-def put(nodeurl, dir_uri, local_fname, vdrive_fname, verbosity,
- stdout, stderr):
+def put(nodeurl, aliases, from_file, to_file, verbosity,
+ stdin, stdout, stderr):
"""
@param verbosity: 0, 1, or 2, meaning quiet, verbose, or very verbose
"""
if nodeurl[-1] != "/":
nodeurl += "/"
- url = nodeurl + "uri/%s/" % urllib.quote(dir_uri)
- if vdrive_fname:
- url += urllib.quote(vdrive_fname)
+ if to_file:
+ rootcap, path = get_alias(aliases, to_file, DEFAULT_ALIAS)
+ url = nodeurl + "uri/%s/" % urllib.quote(rootcap)
+ if path:
+ url += escape_path(path)
+ else:
+ url = nodeurl + "uri"
+ if from_file:
+ infileobj = open(from_file, "rb")
+ else:
+ # do_http() can't use stdin directly: for one thing, we need a
+ # Content-Length field. So we currently must copy it.
+ if verbosity > 0:
+ print >>stderr, "waiting for file data on stdin.."
+ data = stdin.read()
+ infileobj = StringIO(data)
- infileobj = open(local_fname, "rb")
resp = do_http("PUT", url, infileobj)
if resp.status in (200, 201,):
- print >>stdout, "%s %s" % (resp.status, resp.reason)
+ print >>stderr, "%s %s" % (resp.status, resp.reason)
+ print >>stdout, resp.read()
return 0
print >>stderr, "error, got %s %s" % (resp.status, resp.reason)
import urllib
from allmydata.scripts.common_http import do_http
+from allmydata.scripts.common import get_alias, DEFAULT_ALIAS, escape_path
-def rm(nodeurl, dir_uri, vdrive_pathname, verbosity, stdout, stderr):
+def rm(nodeurl, aliases, where, verbosity, stdout, stderr):
"""
@param verbosity: 0, 1, or 2, meaning quiet, verbose, or very verbose
"""
if nodeurl[-1] != "/":
nodeurl += "/"
- url = nodeurl + "uri/%s/" % urllib.quote(dir_uri)
- if vdrive_pathname:
- url += urllib.quote(vdrive_pathname)
+ rootcap, path = get_alias(aliases, where, DEFAULT_ALIAS)
+ assert path
+ url = nodeurl + "uri/%s" % urllib.quote(rootcap)
+ url += "/" + escape_path(path)
resp = do_http("DELETE", url)
# at least import the CLI scripts, even if we don't have any real tests for
# them yet.
from allmydata.scripts import tahoe_ls, tahoe_get, tahoe_put, tahoe_rm
+from allmydata.scripts.common import DEFAULT_ALIAS
_hush_pyflakes = [tahoe_ls, tahoe_get, tahoe_put, tahoe_rm]
from allmydata.scripts import cli, debug
+# this test case only looks at argument-processing and simple stuff.
+# test_system contains all the CLI tests that actually use a real node.
class CLI(unittest.TestCase):
def test_options(self):
o = cli.ListOptions()
o.parseOptions(["--node-directory", "cli/test_options"])
self.failUnlessEqual(o['node-url'], "http://localhost:8080/")
- self.failUnlessEqual(o['dir-cap'], private_uri)
- self.failUnlessEqual(o['vdrive_pathname'], "")
+ self.failUnlessEqual(o.aliases[DEFAULT_ALIAS], private_uri)
+ self.failUnlessEqual(o.where, "")
o = cli.ListOptions()
o.parseOptions(["--node-directory", "cli/test_options",
"--node-url", "http://example.org:8111/"])
self.failUnlessEqual(o['node-url'], "http://example.org:8111/")
- self.failUnlessEqual(o['dir-cap'], private_uri)
- self.failUnlessEqual(o['vdrive_pathname'], "")
+ self.failUnlessEqual(o.aliases[DEFAULT_ALIAS], private_uri)
+ self.failUnlessEqual(o.where, "")
o = cli.ListOptions()
o.parseOptions(["--node-directory", "cli/test_options",
"--dir-cap", "root"])
self.failUnlessEqual(o['node-url'], "http://localhost:8080/")
- self.failUnlessEqual(o['dir-cap'], private_uri)
- self.failUnlessEqual(o['vdrive_pathname'], "")
-
- o = cli.ListOptions()
- o.parseOptions(["--node-directory", "cli/test_options"])
- self.failUnlessEqual(o['node-url'], "http://localhost:8080/")
- self.failUnlessEqual(o['vdrive_pathname'], "")
+ self.failUnlessEqual(o.aliases[DEFAULT_ALIAS], "root")
+ self.failUnlessEqual(o.where, "")
o = cli.ListOptions()
other_filenode_uri = uri.WriteableSSKFileURI(writekey="\x11"*16,
o.parseOptions(["--node-directory", "cli/test_options",
"--dir-cap", other_uri])
self.failUnlessEqual(o['node-url'], "http://localhost:8080/")
- self.failUnlessEqual(o['dir-cap'], other_uri)
- self.failUnlessEqual(o['vdrive_pathname'], "")
+ self.failUnlessEqual(o.aliases[DEFAULT_ALIAS], other_uri)
+ self.failUnlessEqual(o.where, "")
o = cli.ListOptions()
o.parseOptions(["--node-directory", "cli/test_options",
"--dir-cap", other_uri, "subdir"])
self.failUnlessEqual(o['node-url'], "http://localhost:8080/")
- self.failUnlessEqual(o['dir-cap'], other_uri)
- self.failUnlessEqual(o['vdrive_pathname'], "subdir")
+ self.failUnlessEqual(o.aliases[DEFAULT_ALIAS], other_uri)
+ self.failUnlessEqual(o.where, "subdir")
def _dump_cap(self, *args):
out,err = StringIO(), StringIO()
from allmydata.introducer import IntroducerNode
from allmydata.util import deferredutil, fileutil, idlib, mathutil, testutil
from allmydata.util import log
-from allmydata.scripts import runner
+from allmydata.scripts import runner, cli
from allmydata.interfaces import IDirectoryNode, IFileNode, IFileURI
from allmydata.mutable.common import NotMutableError
from allmydata.mutable import layout as mutable_layout
s.setServiceParent(self.sparent)
return s
- def set_up_nodes(self, NUMCLIENTS=5, createprivdir=False):
+ def set_up_nodes(self, NUMCLIENTS=5):
self.numclients = NUMCLIENTS
- self.createprivdir = createprivdir
iv_dir = self.getdir("introducer")
if not os.path.isdir(iv_dir):
fileutil.make_dirs(iv_dir)
open(os.path.join(basedir, "webport"), "w").write("tcp:0:interface=127.0.0.1")
kgf = "%s\n" % (self.key_generator_furl,)
open(os.path.join(basedir, "key_generator.furl"), "w").write(kgf)
- if self.createprivdir:
- fileutil.make_dirs(os.path.join(basedir, "private"))
- open(os.path.join(basedir, "private", "root_dir.cap"), "w")
open(os.path.join(basedir, "introducer.furl"), "w").write(self.introducer_furl)
open(os.path.join(basedir, "stats_gatherer.furl"), "w").write(self.stats_gatherer_furl)
def test_vdrive(self):
self.basedir = "system/SystemTest/test_vdrive"
self.data = LARGE_DATA
- d = self.set_up_nodes(createprivdir=True)
+ d = self.set_up_nodes()
d.addCallback(self._test_introweb)
d.addCallback(self.log, "starting publish")
d.addCallback(self._do_publish1)
nodeargs = [
"--node-directory", client0_basedir,
- "--dir-cap", private_uri,
+ #"--dir-cap", private_uri,
]
public_nodeargs = [
"--node-url", self.webish_url,
d = defer.succeed(None)
- def _ls_root(res):
- argv = ["ls"] + nodeargs
- return self._run_cli(argv)
- d.addCallback(_ls_root)
+ # for compatibility with earlier versions, private/root_dir.cap is
+ # supposed to be treated as an alias named "tahoe:". Start by making
+ # sure that works, before we add other aliases.
+
+ root_file = os.path.join(client0_basedir, "private", "root_dir.cap")
+ f = open(root_file, "w")
+ f.write(private_uri)
+ f.close()
+
+ def run(ignored, verb, *args):
+ newargs = [verb] + nodeargs + list(args)
+ return self._run_cli(newargs)
+
+ def _check_ls((out,err), expected_children, unexpected_children=[]):
+ self.failUnlessEqual(err, "")
+ for s in expected_children:
+ self.failUnless(s in out, s)
+ for s in unexpected_children:
+ self.failIf(s in out, s)
+
def _check_ls_root((out,err)):
self.failUnless("personal" in out)
self.failUnless("s2-ro" in out)
self.failUnless("s2-rw" in out)
self.failUnlessEqual(err, "")
- d.addCallback(_check_ls_root)
-
- def _ls_subdir(res):
- argv = ["ls"] + nodeargs + ["personal"]
- return self._run_cli(argv)
- d.addCallback(_ls_subdir)
- def _check_ls_subdir((out,err)):
- self.failUnless("sekrit data" in out)
- self.failUnlessEqual(err, "")
- d.addCallback(_check_ls_subdir)
-
- def _ls_public_subdir(res):
- argv = ["ls"] + public_nodeargs + ["subdir1"]
- return self._run_cli(argv)
- d.addCallback(_ls_public_subdir)
- def _check_ls_public_subdir((out,err)):
- self.failUnless("subdir2" in out)
- self.failUnless("mydata567" in out)
- self.failUnlessEqual(err, "")
- d.addCallback(_check_ls_public_subdir)
-
- def _ls_file(res):
- argv = ["ls"] + public_nodeargs + ["subdir1/mydata567"]
- return self._run_cli(argv)
- d.addCallback(_ls_file)
- def _check_ls_file((out,err)):
- self.failUnlessEqual(out.strip(), "112 subdir1/mydata567")
+
+ # this should reference private_uri
+ d.addCallback(run, "ls")
+ d.addCallback(_check_ls, ["personal", "s2-ro", "s2-rw"])
+
+ # now that that's out of the way, remove root_dir.cap and work with
+ # new files
+ d.addCallback(lambda res: os.unlink(root_file))
+
+ d.addCallback(run, "mkdir")
+ def _got_dir( (out,err) ):
+ self.failUnless(uri.from_string_dirnode(out.strip()))
+ return out.strip()
+ d.addCallback(_got_dir)
+ d.addCallback(lambda newcap: run(None, "add-alias", "tahoe", newcap))
+ def _check_empty_dir((out,err)):
+ self.failUnlessEqual(out, "")
self.failUnlessEqual(err, "")
- d.addCallback(_check_ls_file)
+ d.addCallback(run, "ls")
+ d.addCallback(_check_empty_dir)
+
+ files = []
+ datas = []
+ for i in range(10):
+ fn = os.path.join(self.basedir, "file%d" % i)
+ files.append(fn)
+ data = "data to be uploaded: file%d\n" % i
+ datas.append(data)
+ open(fn,"w").write(data)
+
+ # test all both forms of put: from a file, and from stdin
+ # tahoe put bar FOO
+ d.addCallback(run, "put", files[0], "tahoe-file0")
+ def _put_out((out,err)):
+ self.failUnless("URI:LIT:" in out, out)
+ self.failUnless("201 Created" in err, err)
+ uri0 = out.strip()
+ return run(None, "get", uri0)
+ d.addCallback(_put_out)
+ d.addCallback(lambda (out,err): self.failUnlessEqual(out, datas[0]))
+
+ d.addCallback(run, "put", files[1], "subdir/tahoe-file1")
+ # tahoe put bar tahoe:FOO
+ d.addCallback(run, "put", files[2], "tahoe:file2")
+
+ def _put_from_stdin(res, data, *args):
+ args = nodeargs + list(args)
+ o = cli.PutOptions()
+ o.parseOptions(args)
+ stdin = StringIO(data)
+ stdout, stderr = StringIO(), StringIO()
+ d = threads.deferToThread(cli.put, o,
+ stdout=stdout, stderr=stderr, stdin=stdin)
+ def _done(res):
+ return stdout.getvalue(), stderr.getvalue()
+ d.addCallback(_done)
+ return d
+
+ # tahoe put FOO
+ STDIN_DATA = "This is the file to upload from stdin."
+ d.addCallback(_put_from_stdin,
+ STDIN_DATA,
+ "tahoe-file-stdin")
+ # tahoe put tahoe:FOO
+ d.addCallback(_put_from_stdin,
+ "Other file from stdin.",
+ "tahoe:from-stdin")
+
+ d.addCallback(run, "ls")
+ d.addCallback(_check_ls, ["tahoe-file0", "file2", "subdir",
+ "tahoe-file-stdin", "from-stdin"])
+ d.addCallback(run, "ls", "subdir")
+ d.addCallback(_check_ls, ["tahoe-file1"])
+
+ # tahoe mkdir FOO
+ d.addCallback(run, "mkdir", "subdir2")
+ d.addCallback(run, "ls")
+ # TODO: extract the URI, set an alias with it
+ d.addCallback(_check_ls, ["subdir2"])
+
+ # tahoe get: (to stdin and to a file)
+ d.addCallback(run, "get", "tahoe-file0")
+ d.addCallback(lambda (out,err):
+ self.failUnlessEqual(out, "data to be uploaded: file0\n"))
+ d.addCallback(run, "get", "tahoe:subdir/tahoe-file1")
+ d.addCallback(lambda (out,err):
+ self.failUnlessEqual(out, "data to be uploaded: file1\n"))
+ outfile0 = os.path.join(self.basedir, "outfile0")
+ d.addCallback(run, "get", "file2", outfile0)
+ def _check_outfile0((out,err)):
+ data = open(outfile0,"rb").read()
+ self.failUnlessEqual(data, "data to be uploaded: file2\n")
+ d.addCallback(_check_outfile0)
+ outfile1 = os.path.join(self.basedir, "outfile0")
+ d.addCallback(run, "get", "tahoe:subdir/tahoe-file1", outfile1)
+ def _check_outfile1((out,err)):
+ data = open(outfile1,"rb").read()
+ self.failUnlessEqual(data, "data to be uploaded: file1\n")
+ d.addCallback(_check_outfile1)
+
+ d.addCallback(run, "rm", "tahoe-file0")
+ d.addCallback(run, "rm", "tahoe:file2")
+ d.addCallback(run, "ls")
+ d.addCallback(_check_ls, [], ["tahoe-file0", "file2"])
+
+ d.addCallback(run, "ls", "-l")
+ def _check_ls_l((out,err)):
+ lines = out.split("\n")
+ for l in lines:
+ if "tahoe-file-stdin" in l:
+ self.failUnless(" %d " % len(STDIN_DATA) in l)
+ d.addCallback(_check_ls_l)
+
+ d.addCallback(run, "mv", "tahoe-file-stdin", "tahoe-moved")
+ d.addCallback(run, "ls")
+ d.addCallback(_check_ls, ["tahoe-moved"], ["tahoe-file-stdin"])
# tahoe_ls doesn't currently handle the error correctly: it tries to
# JSON-parse a traceback.
## self.failUnlessEqual(err, "")
## d.addCallback(_check_ls_missing)
- def _put(res):
- tdir = self.getdir("cli_put")
- fileutil.make_dirs(tdir)
- fn = os.path.join(tdir, "upload_me")
- f = open(fn, "wb")
- f.write(TESTDATA)
- f.close()
- argv = ["put"] + nodeargs + [fn, "test_put/upload.txt"]
- return self._run_cli(argv)
- d.addCallback(_put)
- def _check_put((out,err)):
- self.failUnless("201 Created" in out, out)
- self.failUnlessEqual(err, "")
- d = self._private_node.get_child_at_path(u"test_put/upload.txt")
- d.addCallback(lambda filenode: filenode.download_to_data())
- def _check_put2(res):
- self.failUnlessEqual(res, TESTDATA)
- d.addCallback(_check_put2)
- return d
- d.addCallback(_check_put)
-
- def _get_to_stdout(res):
- argv = ["get"] + nodeargs + ["test_put/upload.txt"]
- return self._run_cli(argv)
- d.addCallback(_get_to_stdout)
- def _check_get_to_stdout((out,err)):
- self.failUnlessEqual(out, TESTDATA)
- self.failUnlessEqual(err, "")
- d.addCallback(_check_get_to_stdout)
-
- get_to_file_target = self.basedir + "/get.downfile"
- def _get_to_file(res):
- argv = ["get"] + nodeargs + ["test_put/upload.txt",
- get_to_file_target]
- return self._run_cli(argv)
- d.addCallback(_get_to_file)
- def _check_get_to_file((out,err)):
- data = open(get_to_file_target, "rb").read()
- self.failUnlessEqual(data, TESTDATA)
- self.failUnlessEqual(out, "")
- self.failUnlessEqual(err, "test_put/upload.txt retrieved and written to system/SystemTest/test_vdrive/get.downfile\n")
- d.addCallback(_check_get_to_file)
-
-
- def _mv(res):
- argv = ["mv"] + nodeargs + ["test_put/upload.txt",
- "test_put/moved.txt"]
- return self._run_cli(argv)
- d.addCallback(_mv)
- def _check_mv((out,err)):
- self.failUnless("OK" in out)
- self.failUnlessEqual(err, "")
- d = self.shouldFail2(KeyError, "test_cli._check_rm", "'upload.txt'", self._private_node.get_child_at_path, u"test_put/upload.txt")
-
- d.addCallback(lambda res:
- self._private_node.get_child_at_path(u"test_put/moved.txt"))
- d.addCallback(lambda filenode: filenode.download_to_data())
- def _check_mv2(res):
- self.failUnlessEqual(res, TESTDATA)
- d.addCallback(_check_mv2)
- return d
- d.addCallback(_check_mv)
-
- def _rm(res):
- argv = ["rm"] + nodeargs + ["test_put/moved.txt"]
- return self._run_cli(argv)
- d.addCallback(_rm)
- def _check_rm((out,err)):
- self.failUnless("200 OK" in out)
- self.failUnlessEqual(err, "")
- d = self.shouldFail2(KeyError, "test_cli._check_rm", "'moved.txt'", self._private_node.get_child_at_path, u"test_put/moved.txt")
- return d
- d.addCallback(_check_rm)
return d
def _run_cli(self, argv):
+ #print "CLI:", argv
stdout, stderr = StringIO(), StringIO()
d = threads.deferToThread(runner.runner, argv, run_by_human=False,
stdout=stdout, stderr=stderr)