-import os
+import os, sys
+import twisted
from twisted.trial import unittest
from twisted.application import service
import allmydata
-from allmydata.node import OldConfigError
+import allmydata.frontends.magic_folder
+import allmydata.util.log
+
+from allmydata.node import Node, OldConfigError, OldConfigOptionError, MissingConfigEntry, UnescapedHashError
+from allmydata.frontends.auth import NeedRootcapLookupScheme
from allmydata import client
from allmydata.storage_client import StorageFarmBroker
+from allmydata.manhole import AuthorizedKeysManhole
from allmydata.util import base32, fileutil
from allmydata.interfaces import IFilesystemNode, IFileNode, \
IImmutableFileNode, IMutableFileNode, IDirectoryNode
from foolscap.api import flushEventualQueue
import allmydata.test.common_util as testutil
-import mock
BASECONFIG = ("[client]\n"
"introducer.furl = \n"
"introducer.furl = %s\n"
)
-class Basic(testutil.ReallyEqualMixin, unittest.TestCase):
+class Basic(testutil.ReallyEqualMixin, testutil.NonASCIIPathMixin, unittest.TestCase):
def test_loadable(self):
basedir = "test_client.Basic.test_loadable"
os.mkdir(basedir)
BASECONFIG)
client.Client(basedir)
- @mock.patch('twisted.python.log.msg')
- def test_error_on_old_config_files(self, mock_log_msg):
+ def test_comment(self):
+ should_fail = [r"test#test", r"#testtest", r"test\\#test"]
+ should_not_fail = [r"test\#test", r"test\\\#test", r"testtest"]
+
+ basedir = "test_client.Basic.test_comment"
+ os.mkdir(basedir)
+
+ def write_config(s):
+ config = ("[client]\n"
+ "introducer.furl = %s\n" % s)
+ fileutil.write(os.path.join(basedir, "tahoe.cfg"), config)
+
+ for s in should_fail:
+ self.failUnless(Node._contains_unescaped_hash(s))
+ write_config(s)
+ self.failUnlessRaises(UnescapedHashError, client.Client, basedir)
+
+ for s in should_not_fail:
+ self.failIf(Node._contains_unescaped_hash(s))
+ write_config(s)
+ client.Client(basedir)
+
+
+ def test_error_on_old_config_files(self):
basedir = "test_client.Basic.test_error_on_old_config_files"
os.mkdir(basedir)
fileutil.write(os.path.join(basedir, "tahoe.cfg"),
fileutil.write(os.path.join(basedir, "readonly_storage"), "")
fileutil.write(os.path.join(basedir, "debug_discard_storage"), "")
+ logged_messages = []
+ self.patch(twisted.python.log, 'msg', logged_messages.append)
+
e = self.failUnlessRaises(OldConfigError, client.Client, basedir)
- self.failUnlessIn(os.path.abspath(os.path.join(basedir, "introducer.furl")), e.args[0])
- self.failUnlessIn(os.path.abspath(os.path.join(basedir, "no_storage")), e.args[0])
- self.failUnlessIn(os.path.abspath(os.path.join(basedir, "readonly_storage")), e.args[0])
- self.failUnlessIn(os.path.abspath(os.path.join(basedir, "debug_discard_storage")), e.args[0])
+ abs_basedir = fileutil.abspath_expanduser_unicode(unicode(basedir)).encode(sys.getfilesystemencoding())
+ self.failUnlessIn(os.path.join(abs_basedir, "introducer.furl"), e.args[0])
+ self.failUnlessIn(os.path.join(abs_basedir, "no_storage"), e.args[0])
+ self.failUnlessIn(os.path.join(abs_basedir, "readonly_storage"), e.args[0])
+ self.failUnlessIn(os.path.join(abs_basedir, "debug_discard_storage"), e.args[0])
for oldfile in ['introducer.furl', 'no_storage', 'readonly_storage',
'debug_discard_storage']:
- logged = [ m for m in mock_log_msg.call_args_list if
- ("Found pre-Tahoe-LAFS-v1.3 configuration file" in str(m[0][0]) and oldfile in str(m[0][0])) ]
- self.failUnless(logged, (oldfile, mock_log_msg.call_args_list))
+ logged = [ m for m in logged_messages if
+ ("Found pre-Tahoe-LAFS-v1.3 configuration file" in str(m) and oldfile in str(m)) ]
+ self.failUnless(logged, (oldfile, logged_messages))
for oldfile in [
'nickname', 'webport', 'keepalive_timeout', 'log_gatherer.furl',
'disconnect_timeout', 'advertised_ip_addresses', 'helper.furl',
'key_generator.furl', 'stats_gatherer.furl', 'sizelimit',
'run_helper']:
- logged = [ m for m in mock_log_msg.call_args_list if
- ("Found pre-Tahoe-LAFS-v1.3 configuration file" in str(m[0][0]) and oldfile in str(m[0][0])) ]
- self.failIf(logged, oldfile)
+ logged = [ m for m in logged_messages if
+ ("Found pre-Tahoe-LAFS-v1.3 configuration file" in str(m) and oldfile in str(m)) ]
+ self.failIf(logged, (oldfile, logged_messages))
def test_secrets(self):
basedir = "test_client.Basic.test_secrets"
cancel_secret = c.get_cancel_secret()
self.failUnless(base32.b2a(cancel_secret))
+ def test_nodekey_yes_storage(self):
+ basedir = "test_client.Basic.test_nodekey_yes_storage"
+ os.mkdir(basedir)
+ fileutil.write(os.path.join(basedir, "tahoe.cfg"),
+ BASECONFIG)
+ c = client.Client(basedir)
+ self.failUnless(c.get_long_nodeid().startswith("v0-"))
+
+ def test_nodekey_no_storage(self):
+ basedir = "test_client.Basic.test_nodekey_no_storage"
+ os.mkdir(basedir)
+ fileutil.write(os.path.join(basedir, "tahoe.cfg"),
+ BASECONFIG + "[storage]\n" + "enabled = false\n")
+ c = client.Client(basedir)
+ self.failUnless(c.get_long_nodeid().startswith("v0-"))
+
def test_reserved_1(self):
basedir = "client.Basic.test_reserved_1"
os.mkdir(basedir)
"[storage]\n" + \
"enabled = true\n" + \
"reserved_space = bogus\n")
+ self.failUnlessRaises(ValueError, client.Client, basedir)
+
+ def test_web_staticdir(self):
+ basedir = u"client.Basic.test_web_staticdir"
+ os.mkdir(basedir)
+ fileutil.write(os.path.join(basedir, "tahoe.cfg"),
+ BASECONFIG +
+ "[node]\n" +
+ "web.port = tcp:0:interface=127.0.0.1\n" +
+ "web.static = relative\n")
+ c = client.Client(basedir)
+ w = c.getServiceNamed("webish")
+ abs_basedir = fileutil.abspath_expanduser_unicode(basedir)
+ expected = fileutil.abspath_expanduser_unicode(u"relative", abs_basedir)
+ self.failUnlessReallyEqual(w.staticdir, expected)
+
+ def test_manhole_keyfile(self):
+ basedir = u"client.Basic.test_manhole_keyfile"
+ os.mkdir(basedir)
+ fileutil.write(os.path.join(basedir, "tahoe.cfg"),
+ BASECONFIG +
+ "[node]\n" +
+ "ssh.port = tcp:0:interface=127.0.0.1\n" +
+ "ssh.authorized_keys_file = relative\n")
c = client.Client(basedir)
- self.failUnlessEqual(c.getServiceNamed("storage").reserved_space, 0)
+ m = [s for s in c if isinstance(s, AuthorizedKeysManhole)][0]
+ abs_basedir = fileutil.abspath_expanduser_unicode(basedir)
+ expected = fileutil.abspath_expanduser_unicode(u"relative", abs_basedir)
+ self.failUnlessReallyEqual(m.keyfile, expected)
+
+ # TODO: also test config options for SFTP.
+
+ def test_ftp_auth_keyfile(self):
+ basedir = u"client.Basic.test_ftp_auth_keyfile"
+ os.mkdir(basedir)
+ fileutil.write(os.path.join(basedir, "tahoe.cfg"),
+ (BASECONFIG +
+ "[ftpd]\n"
+ "enabled = true\n"
+ "port = tcp:0:interface=127.0.0.1\n"
+ "accounts.file = private/accounts\n"))
+ os.mkdir(os.path.join(basedir, "private"))
+ fileutil.write(os.path.join(basedir, "private", "accounts"), "\n")
+ c = client.Client(basedir) # just make sure it can be instantiated
+ del c
+
+ def test_ftp_auth_url(self):
+ basedir = u"client.Basic.test_ftp_auth_url"
+ os.mkdir(basedir)
+ fileutil.write(os.path.join(basedir, "tahoe.cfg"),
+ (BASECONFIG +
+ "[ftpd]\n"
+ "enabled = true\n"
+ "port = tcp:0:interface=127.0.0.1\n"
+ "accounts.url = http://0.0.0.0/\n"))
+ c = client.Client(basedir) # just make sure it can be instantiated
+ del c
+
+ def test_ftp_auth_no_accountfile_or_url(self):
+ basedir = u"client.Basic.test_ftp_auth_no_accountfile_or_url"
+ os.mkdir(basedir)
+ fileutil.write(os.path.join(basedir, "tahoe.cfg"),
+ (BASECONFIG +
+ "[ftpd]\n"
+ "enabled = true\n"
+ "port = tcp:0:interface=127.0.0.1\n"))
+ self.failUnlessRaises(NeedRootcapLookupScheme, client.Client, basedir)
def _permute(self, sb, key):
- return [ s.get_serverid() for s in sb.get_servers_for_psi(key) ]
+ return [ s.get_longname() for s in sb.get_servers_for_psi(key) ]
def test_permute(self):
- sb = StorageFarmBroker(None, True)
+ sb = StorageFarmBroker(None, True, 0, None)
for k in ["%d" % i for i in range(5)]:
- sb.test_add_rref(k, "rref")
+ ann = {"anonymous-storage-FURL": "pb://abcde@nowhere/fake",
+ "permutation-seed-base32": base32.b2a(k) }
+ sb.test_add_rref(k, "rref", ann)
self.failUnlessReallyEqual(self._permute(sb, "one"), ['3','1','0','4','2'])
self.failUnlessReallyEqual(self._permute(sb, "two"), ['0','4','2','1','3'])
sb.servers.clear()
self.failUnlessReallyEqual(self._permute(sb, "one"), [])
+ def test_permute_with_preferred(self):
+ sb = StorageFarmBroker(None, True, ['1','4'])
+ for k in ["%d" % i for i in range(5)]:
+ ann = {"anonymous-storage-FURL": "pb://abcde@nowhere/fake",
+ "permutation-seed-base32": base32.b2a(k) }
+ sb.test_add_rref(k, "rref", ann)
+
+ self.failUnlessReallyEqual(self._permute(sb, "one"), ['1','4','3','0','2'])
+ self.failUnlessReallyEqual(self._permute(sb, "two"), ['4','1','0','2','3'])
+ sb.servers.clear()
+ self.failUnlessReallyEqual(self._permute(sb, "one"), [])
+
def test_versions(self):
basedir = "test_client.Basic.test_versions"
os.mkdir(basedir)
self.failUnless("node.uptime" in stats)
self.failUnless(isinstance(stats["node.uptime"], float))
- @mock.patch('allmydata.util.log.msg')
- @mock.patch('allmydata.frontends.drop_upload.DropUploader')
- def test_create_drop_uploader(self, mock_drop_uploader, mock_log_msg):
- class MockDropUploader(service.MultiService):
- name = 'drop-upload'
+ def test_helper_furl(self):
+ basedir = "test_client.Basic.test_helper_furl"
+ os.mkdir(basedir)
- def __init__(self, client, upload_dircap, local_dir_utf8, inotify=None):
+ def _check(config, expected_furl):
+ fileutil.write(os.path.join(basedir, "tahoe.cfg"),
+ BASECONFIG + config)
+ c = client.Client(basedir)
+ uploader = c.getServiceNamed("uploader")
+ furl, connected = uploader.get_helper_info()
+ self.failUnlessEqual(furl, expected_furl)
+
+ _check("", None)
+ _check("helper.furl =\n", None)
+ _check("helper.furl = \n", None)
+ _check("helper.furl = None", None)
+ _check("helper.furl = pb://blah\n", "pb://blah")
+
+ def test_create_magic_folder_service(self):
+ class MockMagicFolder(service.MultiService):
+ name = 'magic-folder'
+
+ def __init__(self, client, upload_dircap, collective_dircap, local_dir, dbfile, umask, inotify=None,
+ pending_delay=1.0):
service.MultiService.__init__(self)
self.client = client
+ self._umask = umask
self.upload_dircap = upload_dircap
- self.local_dir_utf8 = local_dir_utf8
+ self.collective_dircap = collective_dircap
+ self.local_dir = local_dir
+ self.dbfile = dbfile
self.inotify = inotify
- mock_drop_uploader.side_effect = MockDropUploader
+ def ready(self):
+ pass
+
+ self.patch(allmydata.frontends.magic_folder, 'MagicFolder', MockMagicFolder)
upload_dircap = "URI:DIR2:blah"
- local_dir_utf8 = u"loc\u0101l_dir".encode('utf-8')
+ local_dir_u = self.unicode_or_fallback(u"loc\u0101l_dir", u"local_dir")
+ local_dir_utf8 = local_dir_u.encode('utf-8')
config = (BASECONFIG +
"[storage]\n" +
"enabled = false\n" +
- "[drop_upload]\n" +
- "enabled = true\n" +
- "upload.dircap = " + upload_dircap + "\n" +
- "local.directory = " + local_dir_utf8 + "\n")
+ "[magic_folder]\n" +
+ "enabled = true\n")
- basedir1 = "test_client.Basic.test_create_drop_uploader1"
+ basedir1 = "test_client.Basic.test_create_magic_folder_service1"
os.mkdir(basedir1)
+
+ fileutil.write(os.path.join(basedir1, "tahoe.cfg"),
+ config + "local.directory = " + local_dir_utf8 + "\n")
+ self.failUnlessRaises(MissingConfigEntry, client.Client, basedir1)
+
fileutil.write(os.path.join(basedir1, "tahoe.cfg"), config)
+ fileutil.write(os.path.join(basedir1, "private", "magic_folder_dircap"), "URI:DIR2:blah")
+ fileutil.write(os.path.join(basedir1, "private", "collective_dircap"), "URI:DIR2:meow")
+ self.failUnlessRaises(MissingConfigEntry, client.Client, basedir1)
+
+ fileutil.write(os.path.join(basedir1, "tahoe.cfg"),
+ config.replace("[magic_folder]\n", "[drop_upload]\n"))
+ self.failUnlessRaises(OldConfigOptionError, client.Client, basedir1)
+
+ fileutil.write(os.path.join(basedir1, "tahoe.cfg"),
+ config + "local.directory = " + local_dir_utf8 + "\n")
c1 = client.Client(basedir1)
- uploader = c1.getServiceNamed('drop-upload')
- self.failUnless(isinstance(uploader, MockDropUploader), uploader)
- self.failUnlessReallyEqual(uploader.client, c1)
- self.failUnlessReallyEqual(uploader.upload_dircap, upload_dircap)
- self.failUnlessReallyEqual(uploader.local_dir_utf8, local_dir_utf8)
- self.failUnless(uploader.inotify is None, uploader.inotify)
- self.failUnless(uploader.running)
+ magicfolder = c1.getServiceNamed('magic-folder')
+ self.failUnless(isinstance(magicfolder, MockMagicFolder), magicfolder)
+ self.failUnlessReallyEqual(magicfolder.client, c1)
+ self.failUnlessReallyEqual(magicfolder.upload_dircap, upload_dircap)
+ self.failUnlessReallyEqual(os.path.basename(magicfolder.local_dir), local_dir_u)
+ self.failUnless(magicfolder.inotify is None, magicfolder.inotify)
+ self.failUnless(magicfolder.running)
class Boom(Exception):
pass
- mock_drop_uploader.side_effect = Boom()
+ def BoomMagicFolder(client, upload_dircap, collective_dircap, local_dir, dbfile,
+ inotify=None, pending_delay=1.0):
+ raise Boom()
+ self.patch(allmydata.frontends.magic_folder, 'MagicFolder', BoomMagicFolder)
- basedir2 = "test_client.Basic.test_create_drop_uploader2"
+ basedir2 = "test_client.Basic.test_create_magic_folder_service2"
os.mkdir(basedir2)
+ os.mkdir(os.path.join(basedir2, "private"))
fileutil.write(os.path.join(basedir2, "tahoe.cfg"),
BASECONFIG +
- "[drop_upload]\n" +
- "enabled = true\n")
- c2 = client.Client(basedir2)
- self.failUnlessRaises(KeyError, c2.getServiceNamed, 'drop-upload')
- self.failIf([True for arg in mock_log_msg.call_args_list if "Boom" in repr(arg)],
- mock_log_msg.call_args_list)
- self.failUnless([True for arg in mock_log_msg.call_args_list if "upload.dircap or local.directory not specified" in repr(arg)],
- mock_log_msg.call_args_list)
-
- basedir3 = "test_client.Basic.test_create_drop_uploader3"
- os.mkdir(basedir3)
- fileutil.write(os.path.join(basedir3, "tahoe.cfg"), config)
- client.Client(basedir3)
- self.failUnless([True for arg in mock_log_msg.call_args_list if "Boom" in repr(arg)],
- mock_log_msg.call_args_list)
+ "[magic_folder]\n" +
+ "enabled = true\n" +
+ "local.directory = " + local_dir_utf8 + "\n")
+ fileutil.write(os.path.join(basedir2, "private", "magic_folder_dircap"), "URI:DIR2:blah")
+ fileutil.write(os.path.join(basedir2, "private", "collective_dircap"), "URI:DIR2:meow")
+ self.failUnlessRaises(Boom, client.Client, basedir2)
def flush_but_dont_ignore(res):
os.mkdir(basedir)
dummy = "pb://wl74cyahejagspqgy4x5ukrvfnevlknt@127.0.0.1:58889/bogus"
fileutil.write(os.path.join(basedir, "tahoe.cfg"), BASECONFIG_I % dummy)
- fileutil.write(os.path.join(basedir, "suicide_prevention_hotline"), "")
+ fileutil.write(os.path.join(basedir, client.Client.EXIT_TRIGGER_FILE), "")
client.Client(basedir)
def test_reloadable(self):
d.addCallback(self.stall, delay=2.0)
def _restart(res):
# TODO: pause for slightly over one second, to let
- # Client._check_hotline poll the file once. That will exercise
+ # Client._check_exit_trigger poll the file once. That will exercise
# another few lines. Then add another test in which we don't
- # update the file at all, and watch to see the node shutdown. (to
- # do this, use a modified node which overrides Node.shutdown(),
- # also change _check_hotline to use it instead of a raw
+ # update the file at all, and watch to see the node shutdown.
+ # (To do this, use a modified node which overrides Node.shutdown(),
+ # also change _check_exit_trigger to use it instead of a raw
# reactor.stop, also instrument the shutdown event in an
- # attribute that we can check)
+ # attribute that we can check.)
c2 = client.Client(basedir)
c2.setServiceParent(self.sparent)
return c2.disownServiceParent()
self.failUnless(n.is_readonly())
self.failIf(n.is_mutable())
+ # Testing #1679. There was a bug that would occur when downloader was
+ # downloading the same readcap more than once concurrently, so the
+ # filenode object was cached, and there was a failure from one of the
+ # servers in one of the download attempts. No subsequent download
+ # attempt would attempt to use that server again, which would lead to
+ # the file being undownloadable until the gateway was restarted. The
+ # current fix for this (hopefully to be superceded by a better fix
+ # eventually) is to prevent re-use of filenodes, so the NodeMaker is
+ # hereby required *not* to cache and re-use filenodes for CHKs.
+ other_n = c.create_node_from_uri("URI:CHK:6nmrpsubgbe57udnexlkiwzmlu:bjt7j6hshrlmadjyr7otq3dc24end5meo5xcr5xe5r663po6itmq:3:10:7277")
+ self.failIf(n is other_n, (n, other_n))
+
n = c.create_node_from_uri("URI:LIT:n5xgk")
self.failUnless(IFilesystemNode.providedBy(n))
self.failUnless(IFileNode.providedBy(n))