]> git.rkrishnan.org Git - tahoe-lafs/tahoe-lafs.git/commitdiff
WIP 2375.tests-non-ascii.4
authorDaira Hopwood <daira@jacaranda.org>
Tue, 24 Feb 2015 17:38:19 +0000 (17:38 +0000)
committerDaira Hopwood <daira@jacaranda.org>
Tue, 31 Mar 2015 16:52:38 +0000 (17:52 +0100)
17 files changed:
src/allmydata/node.py
src/allmydata/scripts/cli.py
src/allmydata/scripts/debug.py
src/allmydata/scripts/runner.py
src/allmydata/storage/common.py
src/allmydata/storage/server.py
src/allmydata/test/no_network.py
src/allmydata/test/test_cli.py
src/allmydata/test/test_cli_backup.py
src/allmydata/test/test_cli_create_alias.py
src/allmydata/test/test_client.py
src/allmydata/test/test_deepcheck.py
src/allmydata/test/test_mutable.py
src/allmydata/test/test_no_network.py
src/allmydata/test/test_storage.py
src/allmydata/test/test_system.py
src/allmydata/test/test_util.py

index 35ccb355e5c2feebf20b277775cbc24f1a82b5c1..516b0f22764847ae4116d94879b6a25d0de93fa9 100644 (file)
@@ -11,7 +11,8 @@ from allmydata.util import log
 from allmydata.util import fileutil, iputil, observer
 from allmydata.util.assertutil import precondition, _assert
 from allmydata.util.fileutil import abspath_expanduser_unicode
-from allmydata.util.encodingutil import get_filesystem_encoding, quote_output
+from allmydata.util.encodingutil import get_filesystem_encoding, quote_output, \
+    quote_local_unicode_path
 
 # Add our application versions to the data that Foolscap's LogPublisher
 # reports.
@@ -50,7 +51,7 @@ class OldConfigError(Exception):
         return ("Found pre-Tahoe-LAFS-v1.3 configuration file(s):\n"
                 "%s\n"
                 "See docs/historical/configuration.rst."
-                % "\n".join([quote_output(fname) for fname in self.args[0]]))
+                % "\n".join([quote_local_unicode_path(fname) for fname in self.args[0]]))
 
 class OldConfigOptionError(Exception):
     pass
@@ -74,8 +75,8 @@ class Node(service.MultiService):
         self.basedir = abspath_expanduser_unicode(unicode(basedir))
         self._portnumfile = os.path.join(self.basedir, self.PORTNUMFILE)
         self._tub_ready_observerlist = observer.OneShotObserverList()
-        fileutil.make_dirs(os.path.join(self.basedir, "private"), 0700)
-        open(os.path.join(self.basedir, "private", "README"), "w").write(PRIV_README)
+        fileutil.make_dirs(os.path.join(self.basedir, u"private"), 0700)
+        open(os.path.join(self.basedir, u"private", u"README"), "w").write(PRIV_README)
 
         # creates self.config
         self.read_config()
@@ -143,7 +144,7 @@ class Node(service.MultiService):
         self.error_about_old_config_files()
         self.config = ConfigParser.SafeConfigParser()
 
-        tahoe_cfg = os.path.join(self.basedir, "tahoe.cfg")
+        tahoe_cfg = os.path.join(self.basedir, u"tahoe.cfg")
         try:
             f = open(tahoe_cfg, "rb")
             try:
@@ -181,7 +182,7 @@ class Node(service.MultiService):
             'no_storage', 'readonly_storage', 'sizelimit',
             'debug_discard_storage', 'run_helper']:
             if name not in self.GENERATED_FILES:
-                fullfname = os.path.join(self.basedir, name)
+                fullfname = os.path.join(self.basedir, unicode(name))
                 if os.path.exists(fullfname):
                     oldfnames.add(fullfname)
         if oldfnames:
@@ -190,7 +191,7 @@ class Node(service.MultiService):
             raise e
 
     def create_tub(self):
-        certfile = os.path.join(self.basedir, "private", self.CERTFILE)
+        certfile = os.path.join(self.basedir, u"private", self.CERTFILE)
         self.tub = Tub(certFile=certfile)
         self.tub.setOption("logLocalFailures", True)
         self.tub.setOption("logRemoteFailures", True)
@@ -248,7 +249,7 @@ class Node(service.MultiService):
         config file that resides within the subdirectory named 'private'), and
         return it.
         """
-        privname = os.path.join(self.basedir, "private", name)
+        privname = os.path.join(self.basedir, u"private", name)
         open(privname, "w").write(value)
 
     def get_private_config(self, name, default=_None):
@@ -257,7 +258,7 @@ class Node(service.MultiService):
         and return it. Return a default, or raise an error if one was not
         given.
         """
-        privname = os.path.join(self.basedir, "private", name)
+        privname = os.path.join(self.basedir, u"private", name)
         try:
             return fileutil.read(privname)
         except EnvironmentError:
@@ -280,7 +281,7 @@ class Node(service.MultiService):
         If 'default' is a string, use it as a default value. If not, treat it
         as a zero-argument callable that is expected to return a string.
         """
-        privname = os.path.join(self.basedir, "private", name)
+        privname = os.path.join(self.basedir, u"private", name)
         try:
             value = fileutil.read(privname)
         except EnvironmentError:
@@ -373,16 +374,16 @@ class Node(service.MultiService):
                     ob.formatTime = newmeth
         # TODO: twisted >2.5.0 offers maxRotatedFiles=50
 
-        lgfurl_file = os.path.join(self.basedir, "private", "logport.furl").encode(get_filesystem_encoding())
-        self.tub.setOption("logport-furlfile", lgfurl_file)
+        logport_furl_file = os.path.join(self.basedir, u"private", u"logport.furl")
+        self.tub.setOption("logport-furlfile", logport_furl_file.encode(get_filesystem_encoding()))
         lgfurl = self.get_config("node", "log_gatherer.furl", "")
         if lgfurl:
             # this is in addition to the contents of log-gatherer-furlfile
             self.tub.setOption("log-gatherer-furl", lgfurl)
-        self.tub.setOption("log-gatherer-furlfile",
-                           os.path.join(self.basedir, "log_gatherer.furl"))
+        log_gatherer_furl_file = os.path.join(self.basedir, u"log_gatherer.furl")
+        self.tub.setOption("log-gatherer-furlfile",log_gatherer_furl_file.encode(get_filesystem_encoding()))
         self.tub.setOption("bridge-twisted-logs", True)
-        incident_dir = os.path.join(self.basedir, "logs", "incidents")
+        incident_dir = os.path.join(self.basedir, u"logs", u"incidents")
         foolscap.logging.log.setLogDir(incident_dir.encode(get_filesystem_encoding()))
 
     def log(self, *args, **kwargs):
index e240c9ecd1682af1a8e1b1823884f5fe4f864578..1fb194e4ce58a2dba0145fd361d5d79b83f60358 100644 (file)
@@ -1,8 +1,9 @@
 import os.path, re, fnmatch
 from twisted.python import usage
 from allmydata.scripts.common import get_aliases, get_default_nodedir, \
-     DEFAULT_ALIAS, BaseOptions
-from allmydata.util.encodingutil import argv_to_unicode, argv_to_abspath, quote_local_unicode_path
+    DEFAULT_ALIAS, BaseOptions
+from allmydata.util.encodingutil import argv_to_unicode, argv_to_abspath, quote_output, \
+    quote_local_unicode_path
 
 NODEURL_RE=re.compile("http(s?)://([^:]*)(:([1-9][0-9]*))?")
 
@@ -528,7 +529,7 @@ def get(options):
             pass
         else:
             print >>options.stderr, "%s retrieved and written to %s" % \
-                  (options.from_file, options.to_file)
+                  (quote_output(options.from_file), quote_local_unicode_path(options.to_file))
     return rc
 
 def put(options):
index fedd69851e312c28ca6b871d74fce84a4a39c198..1ba8e7ad64e52db78c8cbb6ccea041abe7c7953c 100644 (file)
@@ -37,12 +37,12 @@ verify-cap for the file that uses the share.
 
 def dump_share(options):
     from allmydata.storage.mutable import MutableShareFile
-    from allmydata.util.encodingutil import quote_output
+    from allmydata.util.encodingutil import quote_local_unicode_path
 
     out = options.stdout
 
     # check the version, to see if we have a mutable or immutable share
-    print >>out, "share filename: %s" % quote_output(options['filename'])
+    print >>out, "share filename: %s" % quote_local_unicode_path(options['filename'])
 
     f = open(options['filename'], "rb")
     prefix = f.read(32)
@@ -649,11 +649,11 @@ def find_shares(options):
 
     out = options.stdout
     sharedir = storage_index_to_dir(si_a2b(options.si_s))
-    for d in options.nodedirs:
-        d = os.path.join(d, "storage", "shares", sharedir)
-        if os.path.exists(d):
-            for shnum in listdir_unicode(d):
-                print >>out, quote_local_unicode_path(os.path.join(d, shnum), quotemarks=False)
+    for nodedir in options.nodedirs:
+        abs_sharedir = os.path.join(nodedir, u"storage", u"shares", sharedir)
+        if os.path.exists(abs_sharedir):
+            for shnum in listdir_unicode(abs_sharedir):
+                print >>out, quote_local_unicode_path(os.path.join(abs_sharedir, shnum), quotemarks=False)
 
     return 0
 
@@ -712,7 +712,7 @@ def describe_share(abs_sharefile, si_s, shnum_s, now, out):
     from allmydata.mutable.common import NeedMoreDataError
     from allmydata.immutable.layout import ReadBucketProxy
     from allmydata.util import base32
-    from allmydata.util.encodingutil import quote_output
+    from allmydata.util.encodingutil import quote_local_unicode_path
     import struct
 
     f = open(abs_sharefile, "rb")
@@ -755,7 +755,7 @@ def describe_share(abs_sharefile, si_s, shnum_s, now, out):
             print >>out, "SDMF %s %d/%d %d #%d:%s %d %s" % \
                   (si_s, k, N, datalen,
                    seqnum, base32.b2a(root_hash),
-                   expiration, quote_output(abs_sharefile))
+                   expiration, quote_local_unicode_path(abs_sharefile))
         elif share_type == "MDMF":
             from allmydata.mutable.layout import MDMFSlotReadProxy
             fake_shnum = 0
@@ -784,9 +784,9 @@ def describe_share(abs_sharefile, si_s, shnum_s, now, out):
             print >>out, "MDMF %s %d/%d %d #%d:%s %d %s" % \
                   (si_s, k, N, datalen,
                    seqnum, base32.b2a(root_hash),
-                   expiration, quote_output(abs_sharefile))
+                   expiration, quote_local_unicode_path(abs_sharefile))
         else:
-            print >>out, "UNKNOWN mutable %s" % quote_output(abs_sharefile)
+            print >>out, "UNKNOWN mutable %s" % quote_local_unicode_path(abs_sharefile)
 
     elif struct.unpack(">L", prefix[:4]) == (1,):
         # immutable
@@ -818,10 +818,10 @@ def describe_share(abs_sharefile, si_s, shnum_s, now, out):
 
         print >>out, "CHK %s %d/%d %d %s %d %s" % (si_s, k, N, filesize,
                                                    ueb_hash, expiration,
-                                                   quote_output(abs_sharefile))
+                                                   quote_local_unicode_path(abs_sharefile))
 
     else:
-        print >>out, "UNKNOWN really-unknown %s" % quote_output(abs_sharefile)
+        print >>out, "UNKNOWN really-unknown %s" % quote_local_unicode_path(abs_sharefile)
 
     f.close()
 
@@ -831,18 +831,18 @@ def catalog_shares(options):
     out = options.stdout
     err = options.stderr
     now = time.time()
-    for d in options.nodedirs:
-        d = os.path.join(d, "storage", "shares")
+    for nodedir in options.nodedirs:
+        abs_sharedir = os.path.join(nodedir, u"storage", u"shares")
         try:
-            abbrevs = listdir_unicode(d)
+            abbrevs = listdir_unicode(abs_sharedir)
         except EnvironmentError:
             # ignore nodes that have storage turned off altogether
             pass
         else:
             for abbrevdir in sorted(abbrevs):
-                if abbrevdir == "incoming":
+                if abbrevdir == u"incoming":
                     continue
-                abbrevdir = os.path.join(d, abbrevdir)
+                abbrevdir = os.path.join(nodedir, abbrevdir)
                 # this tool may get run against bad disks, so we can't assume
                 # that listdir_unicode will always succeed. Try to catalog as much
                 # as possible.
@@ -864,7 +864,7 @@ def _as_number(s):
         return "not int"
 
 def catalog_shares_one_abbrevdir(si_s, si_dir, now, out, err):
-    from allmydata.util.encodingutil import listdir_unicode, quote_output
+    from allmydata.util.encodingutil import listdir_unicode, quote_local_unicode_path
 
     try:
         for shnum_s in sorted(listdir_unicode(si_dir), key=_as_number):
@@ -874,10 +874,10 @@ def catalog_shares_one_abbrevdir(si_s, si_dir, now, out, err):
                 describe_share(abs_sharefile, si_s, shnum_s, now,
                                out)
             except:
-                print >>err, "Error processing %s" % quote_output(abs_sharefile)
+                print >>err, "Error processing %s" % quote_local_unicode_path(abs_sharefile)
                 failure.Failure().printTraceback(err)
     except:
-        print >>err, "Error processing %s" % quote_output(si_dir)
+        print >>err, "Error processing %s" % quote_local_unicode_path(si_dir)
         failure.Failure().printTraceback(err)
 
 class CorruptShareOptions(BaseOptions):
index 1a6de258ed6e6826fa6d3aad012df1435991d5b7..a3978dc025efe283444783f746859ad480e85aa8 100644 (file)
@@ -4,6 +4,8 @@ from cStringIO import StringIO
 
 from twisted.python import usage
 
+from allmydata.util.assertutil import precondition
+
 from allmydata.scripts.common import get_default_nodedir
 from allmydata.scripts import debug, create_node, startstop_node, cli, keygen, stats_gatherer, admin
 from allmydata.util.encodingutil import quote_output, quote_local_unicode_path, get_io_encoding
@@ -88,6 +90,8 @@ def runner(argv,
            run_by_human=True,
            stdin=None, stdout=None, stderr=None,
            install_node_control=True, additional_commands=None):
+    for arg in argv:
+        precondition(isinstance(arg, str), argv)
 
     stdin  = stdin  or sys.stdin
     stdout = stdout or sys.stdout
index 865275bc13b91f15266e7da1502825a1cfe9c0b7..1c4cc9bd47a4101deaaa4115a508576bb19dee04 100644 (file)
@@ -18,4 +18,4 @@ def si_a2b(ascii_storageindex):
 
 def storage_index_to_dir(storageindex):
     sia = si_b2a(storageindex)
-    return os.path.join(sia[:2], sia)
+    return unicode(os.path.join(sia[:2], sia))
index 1de4b22fe948fe8bcf5397c37a7ce772abbebffe..7439f16f7b35d0829736868788f823f22dc608ba 100644 (file)
@@ -6,6 +6,7 @@ from twisted.application import service
 from zope.interface import implements
 from allmydata.interfaces import RIStorageServer, IStatsProducer
 from allmydata.util import fileutil, idlib, log, time_format
+from allmydata.util.assertutil import precondition
 import allmydata # for __full_version__
 
 from allmydata.storage.common import si_b2a, si_a2b, storage_index_to_dir
@@ -47,23 +48,25 @@ class StorageServer(service.MultiService, Referenceable):
                  expiration_cutoff_date=None,
                  expiration_sharetypes=("mutable", "immutable")):
         service.MultiService.__init__(self)
-        assert isinstance(nodeid, str)
-        assert len(nodeid) == 20
+
+        precondition(isinstance(nodeid, str), nodeid)
+        precondition(len(nodeid) == 20, nodeid)
+
         self.my_nodeid = nodeid
-        self.storedir = storedir
-        sharedir = os.path.join(storedir, "shares")
+        self.storedir = unicode(storedir)
+        sharedir = os.path.join(storedir, u"shares")
         fileutil.make_dirs(sharedir)
         self.sharedir = sharedir
         # we don't actually create the corruption-advisory dir until necessary
         self.corruption_advisory_dir = os.path.join(storedir,
-                                                    "corruption-advisories")
+                                                    u"corruption-advisories")
         self.reserved_space = int(reserved_space)
         self.no_storage = discard_storage
         self.readonly_storage = readonly_storage
         self.stats_provider = stats_provider
         if self.stats_provider:
             self.stats_provider.register_producer(self)
-        self.incomingdir = os.path.join(sharedir, 'incoming')
+        self.incomingdir = os.path.join(sharedir, u"incoming")
         self._clean_incomplete()
         fileutil.make_dirs(self.incomingdir)
         self._active_writers = weakref.WeakKeyDictionary()
@@ -87,8 +90,8 @@ class StorageServer(service.MultiService, Referenceable):
                           }
         self.add_bucket_counter()
 
-        statefile = os.path.join(self.storedir, "lease_checker.state")
-        historyfile = os.path.join(self.storedir, "lease_checker.history")
+        statefile = os.path.join(self.storedir, u"lease_checker.state")
+        historyfile = os.path.join(self.storedir, u"lease_checker.history")
         klass = self.LeaseCheckerClass
         self.lease_checker = klass(self, statefile, historyfile,
                                    expiration_enabled, expiration_mode,
@@ -106,7 +109,7 @@ class StorageServer(service.MultiService, Referenceable):
         return bool(set(os.listdir(self.sharedir)) - set(["incoming"]))
 
     def add_bucket_counter(self):
-        statefile = os.path.join(self.storedir, "bucket_counter.state")
+        statefile = os.path.join(self.storedir, u"bucket_counter.state")
         self.bucket_counter = BucketCountingCrawler(self, statefile)
         self.bucket_counter.setServiceParent(self)
 
@@ -283,8 +286,8 @@ class StorageServer(service.MultiService, Referenceable):
             sf.add_or_renew_lease(lease_info)
 
         for shnum in sharenums:
-            incominghome = os.path.join(self.incomingdir, si_dir, "%d" % shnum)
-            finalhome = os.path.join(self.sharedir, si_dir, "%d" % shnum)
+            incominghome = os.path.join(self.incomingdir, si_dir, u"%d" % shnum)
+            finalhome = os.path.join(self.sharedir, si_dir, u"%d" % shnum)
             if os.path.exists(finalhome):
                 # great! we already have it. easy.
                 pass
@@ -491,7 +494,7 @@ class StorageServer(service.MultiService, Referenceable):
         (write_enabler, renew_secret, cancel_secret) = secrets
         my_nodeid = self.my_nodeid
         fileutil.make_dirs(bucketdir)
-        filename = os.path.join(bucketdir, "%d" % sharenum)
+        filename = os.path.join(bucketdir, u"%d" % sharenum)
         share = create_mutable_sharefile(filename, my_nodeid, write_enabler,
                                          self)
         return share
@@ -530,7 +533,7 @@ class StorageServer(service.MultiService, Referenceable):
         si_s = si_b2a(storage_index)
         # windows can't handle colons in the filename
         fn = os.path.join(self.corruption_advisory_dir,
-                          "%s--%s-%d" % (now, si_s, shnum)).replace(":","")
+                          (u"%s--%s-%d" % (now, si_s, shnum)).replace(u":", u""))
         f = open(fn, "w")
         f.write("report: Share Corruption\n")
         f.write("type: %s\n" % share_type)
index 8dd9a2f957a2a91469590db66849ea5984ba45ec..ad818262c8abb86615f7d7b52935c55acb687399 100644 (file)
@@ -24,6 +24,7 @@ from allmydata import uri as tahoe_uri
 from allmydata.client import Client
 from allmydata.storage.server import StorageServer, storage_index_to_dir
 from allmydata.util import fileutil, idlib, hashutil
+from allmydata.util.assertutil import precondition
 from allmydata.util.hashutil import sha1
 from allmydata.test.common_web import HTTPClientGETFactory
 from allmydata.interfaces import IStorageBroker, IServer
@@ -224,6 +225,8 @@ class NoNetworkGrid(service.MultiService):
     def __init__(self, basedir, num_clients=1, num_servers=10,
                  client_config_hooks={}):
         service.MultiService.__init__(self)
+        precondition(isinstance(basedir, unicode), basedir)
+
         self.basedir = basedir
         fileutil.make_dirs(basedir)
 
@@ -266,8 +269,8 @@ class NoNetworkGrid(service.MultiService):
 
     def make_server(self, i, readonly=False):
         serverid = hashutil.tagged_hash("serverid", str(i))[:20]
-        serverdir = os.path.join(self.basedir, "servers",
-                                 idlib.shortnodeid_b2a(serverid), "storage")
+        serverdir = os.path.join(self.basedir, u"servers",
+                                 unicode(idlib.shortnodeid_b2a(serverid)), u"storage")
         fileutil.make_dirs(serverdir)
         ss = StorageServer(serverdir, serverid, stats_provider=SimpleStats(),
                            readonly_storage=readonly)
@@ -345,7 +348,7 @@ class GridTestMixin:
     def set_up_grid(self, num_clients=1, num_servers=10,
                     client_config_hooks={}):
         # self.basedir must be set
-        self.g = NoNetworkGrid(self.basedir,
+        self.g = NoNetworkGrid(unicode(self.basedir),
                                num_clients=num_clients,
                                num_servers=num_servers,
                                client_config_hooks=client_config_hooks)
index 2eb6dd9e34603d39ea0ae1119da62787ab980250..68c6f12fb197fc36c69087994f998c748bcb58a7 100644 (file)
@@ -6,6 +6,8 @@ import urllib, sys
 
 from mock import Mock, call
 
+from allmydata.util.assertutil import precondition
+
 import allmydata
 from allmydata.util import fileutil, hashutil, base32, keyutil
 from allmydata import uri
@@ -33,9 +35,8 @@ from allmydata.test.no_network import GridTestMixin
 from twisted.internet import threads # CLI tests use deferToThread
 from twisted.python import usage
 
-from allmydata.util.assertutil import precondition
 from allmydata.util.encodingutil import listdir_unicode, unicode_platform, \
-    get_io_encoding, get_filesystem_encoding
+    get_io_encoding, get_filesystem_encoding, unicode_to_argv
 
 timeout = 480 # deep_check takes 360s on Zandr's linksys box, others take > 240s
 
@@ -52,6 +53,11 @@ class CLITestMixin(ReallyEqualMixin):
             "--node-directory", unicode_to_argv(self.get_clientdir()),
             ]
         argv = nodeargs + [verb] + list(args)
+
+        # runner.runner will also check this, but in another thread; this gives a better traceback
+        for arg in argv:
+            precondition(isinstance(arg, str), argv)
+
         stdin = kwargs.get("stdin", "")
         stdout, stderr = StringIO(), StringIO()
         d = threads.deferToThread(runner.runner, argv, run_by_human=False,
index 6d1dbe92a78cffddcf269a5c54a8d65f11b785fe..41556fd9bc723764ae68983bccf0a4ecd1978c58 100644 (file)
@@ -136,7 +136,7 @@ class Backup(GridTestMixin, CLITestMixin, StallMixin, unittest.TestCase):
         # timestamp to force a check on all files
         def _reset_last_checked(res):
             dbfile = os.path.join(self.get_clientdir(),
-                                  "private", "backupdb.sqlite")
+                                  u"private", u"backupdb.sqlite")
             self.failUnless(os.path.exists(dbfile), dbfile)
             bdb = backupdb.get_backupdb(dbfile)
             bdb.cursor.execute("UPDATE last_upload SET last_checked=0")
index ae3cd5bdbc000737cfe8da183521f482959b1d1d..b3d735658b448d6dbc63b58b67ece13ec2664d30 100644 (file)
@@ -5,7 +5,7 @@ from allmydata.util import fileutil
 from allmydata.scripts.common import get_aliases
 from allmydata.scripts import cli, runner
 from allmydata.test.no_network import GridTestMixin
-from allmydata.util.encodingutil import quote_output, get_io_encoding
+from allmydata.util.encodingutil import quote_output, get_io_encoding, unicode_to_argv
 from .test_cli import CLITestMixin
 
 timeout = 480 # deep_check takes 360s on Zandr's linksys box, others take > 240s
@@ -14,7 +14,7 @@ class CreateAlias(GridTestMixin, CLITestMixin, unittest.TestCase):
 
     def _test_webopen(self, args, expected_url):
         o = runner.Options()
-        o.parseOptions(["--node-directory", self.get_clientdir(), "webopen"]
+        o.parseOptions(["--node-directory", unicode_to_argv(self.get_clientdir()), "webopen"]
                        + list(args))
         urls = []
         rc = cli.webopen(o, urls.append)
@@ -25,7 +25,7 @@ class CreateAlias(GridTestMixin, CLITestMixin, unittest.TestCase):
     def test_create(self):
         self.basedir = "cli/CreateAlias/create"
         self.set_up_grid()
-        aliasfile = os.path.join(self.get_clientdir(), "private", "aliases")
+        aliasfile = os.path.join(self.get_clientdir(), u"private", u"aliases")
 
         d = self.do_cli("create-alias", "tahoe")
         def _done((rc,stdout,stderr)):
@@ -39,7 +39,7 @@ class CreateAlias(GridTestMixin, CLITestMixin, unittest.TestCase):
 
         def _stash_urls(res):
             aliases = get_aliases(self.get_clientdir())
-            node_url_file = os.path.join(self.get_clientdir(), "node.url")
+            node_url_file = os.path.join(self.get_clientdir(), u"node.url")
             nodeurl = fileutil.read(node_url_file).strip()
             self.welcome_url = nodeurl
             uribase = nodeurl + "uri/"
index d6cd7d0d45e35a0e77e88693535182875440ecd4..3a3aaa2f3139c77ae2d03d61697fe761bb5a850d 100644 (file)
@@ -1,4 +1,4 @@
-import os, sys
+import os
 from twisted.trial import unittest
 from twisted.application import service
 
@@ -70,7 +70,7 @@ class Basic(testutil.ReallyEqualMixin, unittest.TestCase):
         fileutil.write(os.path.join(basedir, "debug_discard_storage"), "")
 
         e = self.failUnlessRaises(OldConfigError, client.Client, basedir)
-        abs_basedir = fileutil.abspath_expanduser_unicode(unicode(basedir)).encode(sys.getfilesystemencoding())
+        abs_basedir = fileutil.abspath_expanduser_unicode(unicode(basedir))
         self.failUnlessIn(os.path.join(abs_basedir, "introducer.furl"), e.args[0])
         self.failUnlessIn(os.path.join(abs_basedir, "no_storage"), e.args[0])
         self.failUnlessIn(os.path.join(abs_basedir, "readonly_storage"), e.args[0])
index fd9db7ec15487595722a8ee96d5fa5c70c35a47d..9b6bf7181478d28c2bfec5f8c81df0bc9fbc2860 100644 (file)
@@ -4,11 +4,15 @@ from cStringIO import StringIO
 from twisted.trial import unittest
 from twisted.internet import defer
 from twisted.internet import threads # CLI tests use deferToThread
+
+from allmydata.util.assertutil import precondition
+
 from allmydata.immutable import upload
 from allmydata.mutable.common import UnrecoverableFileError
 from allmydata.mutable.publish import MutableData
 from allmydata.util import idlib
 from allmydata.util import base32
+from allmydata.util.encodingutil import unicode_to_argv
 from allmydata.scripts import runner
 from allmydata.interfaces import ICheckResults, ICheckAndRepairResults, \
      IDeepCheckResults, IDeepCheckAndRepairResults
@@ -25,9 +29,10 @@ timeout = 2400 # One of these took 1046.091s on Zandr's ARM box.
 
 class MutableChecker(GridTestMixin, unittest.TestCase, ErrorMixin):
     def _run_cli(self, argv):
+        precondition(argv[0] == "debug", argv)
+
         stdout, stderr = StringIO(), StringIO()
         # this can only do synchronous operations
-        assert argv[0] == "debug"
         runner.runner(argv, run_by_human=False, stdout=stdout, stderr=stderr)
         return stdout.getvalue()
 
@@ -728,6 +733,10 @@ class DeepCheckWebGood(DeepCheckBase, unittest.TestCase):
         return d
 
     def _run_cli(self, argv, stdin=""):
+        # runner.runner will also check this, but in another thread; this gives a better traceback
+        for arg in argv:
+            precondition(isinstance(arg, str), argv)
+
         #print "CLI:", argv
         stdout, stderr = StringIO(), StringIO()
         d = threads.deferToThread(runner.runner, argv, run_by_human=False,
@@ -758,7 +767,7 @@ class DeepCheckWebGood(DeepCheckBase, unittest.TestCase):
 
     def do_cli_manifest_stream1(self):
         basedir = self.get_clientdir(0)
-        d = self._run_cli(["--node-directory", basedir,
+        d = self._run_cli(["--node-directory", unicode_to_argv(basedir),
                            "manifest",
                            self.root_uri])
         def _check((out,err)):
@@ -786,7 +795,7 @@ class DeepCheckWebGood(DeepCheckBase, unittest.TestCase):
 
     def do_cli_manifest_stream2(self):
         basedir = self.get_clientdir(0)
-        d = self._run_cli(["--node-directory", basedir,
+        d = self._run_cli(["--node-directory", unicode_to_argv(basedir),
                            "manifest",
                            "--raw",
                            self.root_uri])
@@ -799,7 +808,7 @@ class DeepCheckWebGood(DeepCheckBase, unittest.TestCase):
 
     def do_cli_manifest_stream3(self):
         basedir = self.get_clientdir(0)
-        d = self._run_cli(["--node-directory", basedir,
+        d = self._run_cli(["--node-directory", unicode_to_argv(basedir),
                            "manifest",
                            "--storage-index",
                            self.root_uri])
@@ -811,7 +820,7 @@ class DeepCheckWebGood(DeepCheckBase, unittest.TestCase):
 
     def do_cli_manifest_stream4(self):
         basedir = self.get_clientdir(0)
-        d = self._run_cli(["--node-directory", basedir,
+        d = self._run_cli(["--node-directory", unicode_to_argv(basedir),
                            "manifest",
                            "--verify-cap",
                            self.root_uri])
@@ -827,7 +836,7 @@ class DeepCheckWebGood(DeepCheckBase, unittest.TestCase):
 
     def do_cli_manifest_stream5(self):
         basedir = self.get_clientdir(0)
-        d = self._run_cli(["--node-directory", basedir,
+        d = self._run_cli(["--node-directory", unicode_to_argv(basedir),
                            "manifest",
                            "--repair-cap",
                            self.root_uri])
@@ -843,7 +852,7 @@ class DeepCheckWebGood(DeepCheckBase, unittest.TestCase):
 
     def do_cli_stats1(self):
         basedir = self.get_clientdir(0)
-        d = self._run_cli(["--node-directory", basedir,
+        d = self._run_cli(["--node-directory", unicode_to_argv(basedir),
                            "stats",
                            self.root_uri])
         def _check3((out,err)):
@@ -863,7 +872,7 @@ class DeepCheckWebGood(DeepCheckBase, unittest.TestCase):
 
     def do_cli_stats2(self):
         basedir = self.get_clientdir(0)
-        d = self._run_cli(["--node-directory", basedir,
+        d = self._run_cli(["--node-directory", unicode_to_argv(basedir),
                            "stats",
                            "--raw",
                            self.root_uri])
@@ -984,9 +993,10 @@ class DeepCheckWebBad(DeepCheckBase, unittest.TestCase):
         return d
 
     def _run_cli(self, argv):
+        precondition(argv[0] == "debug", argv)
+
         stdout, stderr = StringIO(), StringIO()
         # this can only do synchronous operations
-        assert argv[0] == "debug"
         runner.runner(argv, run_by_human=False, stdout=stdout, stderr=stderr)
         return stdout.getvalue()
 
@@ -996,7 +1006,7 @@ class DeepCheckWebBad(DeepCheckBase, unittest.TestCase):
     def _corrupt_some_shares(self, node):
         for (shnum, serverid, sharefile) in self.find_uri_shares(node.get_uri()):
             if shnum in (0,1):
-                self._run_cli(["debug", "corrupt-share", sharefile])
+                self._run_cli(["debug", "corrupt-share", unicode_to_argv(sharefile)])
 
     def _delete_most_shares(self, node):
         self.delete_shares_numbered(node.get_uri(), range(1,10))
index 890d294e84d314d1e822a9a48c72362a601ddf0e..5086ae8c81cd1b99982794e5127b41bcadcd52c8 100644 (file)
@@ -3113,7 +3113,7 @@ class Version(GridTestMixin, unittest.TestCase, testutil.ShouldFailMixin, \
             fso = debug.FindSharesOptions()
             storage_index = base32.b2a(n.get_storage_index())
             fso.si_s = storage_index
-            fso.nodedirs = [os.path.dirname(abspath_expanduser_unicode(unicode(storedir)))
+            fso.nodedirs = [os.path.dirname(abspath_expanduser_unicode(storedir))
                             for (i,ss,storedir)
                             in self.iterate_servers()]
             fso.stdout = StringIO()
index 345662f7da9f29bfac7a1935508cc6b0112d510b..476e31dec48f6ab789ab9584053b1931da849d5e 100644 (file)
@@ -7,6 +7,7 @@ from allmydata.test.no_network import NoNetworkGrid
 from allmydata.immutable.upload import Data
 from allmydata.util.consumer import download_to_data
 
+
 class Harness(unittest.TestCase):
     def setUp(self):
         self.s = service.MultiService()
@@ -16,13 +17,13 @@ class Harness(unittest.TestCase):
         return self.s.stopService()
 
     def test_create(self):
-        basedir = "no_network/Harness/create"
+        basedir = u"no_network/Harness/create"
         g = NoNetworkGrid(basedir)
         g.startService()
         return g.stopService()
 
     def test_upload(self):
-        basedir = "no_network/Harness/upload"
+        basedir = u"no_network/Harness/upload"
         g = NoNetworkGrid(basedir)
         g.setServiceParent(self.s)
 
index 9692466cbb261a27a230397dbd5b2c7b179acbf3..2d0cdbd350081ffd046e3a04cc9e2839e683fe29 100644 (file)
@@ -1414,7 +1414,7 @@ class MDMFProxies(unittest.TestCase, ShouldFailMixin):
 
 
     def workdir(self, name):
-        basedir = os.path.join("storage", "MutableServer", name)
+        basedir = os.path.join("storage", "MDMFProxies", name)
         return basedir
 
 
@@ -2793,7 +2793,7 @@ class Stats(unittest.TestCase):
         return self.sparent.stopService()
 
     def workdir(self, name):
-        basedir = os.path.join("storage", "Server", name)
+        basedir = os.path.join("storage", "Stats", name)
         return basedir
 
     def create(self, name):
index 3b23b2f64ee02fa20c0e35f5b0b791daac6f9a3f..f323db063911f056a58fbfb421509ff0c7ce9f1d 100644 (file)
@@ -16,7 +16,8 @@ from allmydata.immutable.filenode import ImmutableFileNode
 from allmydata.util import idlib, mathutil
 from allmydata.util import log, base32
 from allmydata.util.verlib import NormalizedVersion
-from allmydata.util.encodingutil import quote_output, unicode_to_argv, get_filesystem_encoding
+from allmydata.util.encodingutil import quote_local_unicode_path, \
+     unicode_to_argv, get_filesystem_encoding
 from allmydata.util.fileutil import abspath_expanduser_unicode
 from allmydata.util.consumer import MemoryConsumer, download_to_data
 from allmydata.scripts import runner
@@ -730,6 +731,7 @@ class SystemTest(SystemTestMixin, RunBinTahoeMixin, unittest.TestCase):
         d.addCallback(self.log, "starting publish")
         d.addCallback(self._do_publish1)
         d.addCallback(self._test_runner)
+        return d
         d.addCallback(self._do_publish2)
         # at this point, we have the following filesystem (where "R" denotes
         # self._root_directory_uri):
@@ -1314,14 +1316,14 @@ class SystemTest(SystemTestMixin, RunBinTahoeMixin, unittest.TestCase):
 
         # find a share
         for (dirpath, dirnames, filenames) in os.walk(unicode(self.basedir)):
-            if "storage" not in dirpath:
+            if u"storage" not in dirpath:
                 continue
             if not filenames:
                 continue
             pieces = dirpath.split(os.sep)
             if (len(pieces) >= 4
-                and pieces[-4] == "storage"
-                and pieces[-3] == "shares"):
+                and pieces[-4] == u"storage"
+                and pieces[-3] == u"shares"):
                 # we're sitting in .../storage/shares/$START/$SINDEX , and there
                 # are sharefiles here
                 filename = os.path.join(dirpath, filenames[0])
@@ -1343,7 +1345,7 @@ class SystemTest(SystemTestMixin, RunBinTahoeMixin, unittest.TestCase):
 
         # we only upload a single file, so we can assert some things about
         # its size and shares.
-        self.failUnlessIn("share filename: %s" % quote_output(abspath_expanduser_unicode(filename)), output)
+        self.failUnlessIn("share filename: %s" % quote_local_unicode_path(abspath_expanduser_unicode(filename)), output)
         self.failUnlessIn("size: %d\n" % len(self.data), output)
         self.failUnlessIn("num_segments: 1\n", output)
         # segment_size is always a multiple of needed_shares
@@ -1377,11 +1379,12 @@ class SystemTest(SystemTestMixin, RunBinTahoeMixin, unittest.TestCase):
         out,err = StringIO(), StringIO()
         nodedirs = [self.getdir("client%d" % i) for i in range(self.numclients)]
         cmd = ["debug", "catalog-shares"] + nodedirs
+        #import pdb; pdb.set_trace()
         rc = runner.runner(cmd, stdout=out, stderr=err)
         self.failUnlessEqual(rc, 0)
         out.seek(0)
         descriptions = [sfn.strip() for sfn in out.readlines()]
-        self.failUnlessEqual(len(descriptions), 30)
+        self.failUnlessEqual(len(descriptions), 30, descriptions)
         matching = [line
                     for line in descriptions
                     if line.startswith("CHK %s " % storage_index_s)]
index a4dcbd8cf63d7b09f3ae49ba24fcec77e394b600..8ae959de2b5f2f7d6c4c74032c90fe1fa48836b9 100644 (file)
@@ -175,6 +175,7 @@ class Math(unittest.TestCase):
         f = mathutil.round_sigfigs
         self.failUnlessEqual(f(22.0/3, 4), 7.3330000000000002)
 
+
 class Statistics(unittest.TestCase):
     def should_assert(self, msg, func, *args, **kwargs):
         try:
@@ -372,6 +373,7 @@ class Asserts(unittest.TestCase):
         m = self.should_assert(f, False, othermsg="message2")
         self.failUnlessEqual("postcondition: othermsg: 'message2' <type 'str'>", m)
 
+
 class FileUtil(ReallyEqualMixin, unittest.TestCase):
     def mkdir(self, basedir, path, mode=0777):
         fn = os.path.join(basedir, path)
@@ -554,6 +556,7 @@ class FileUtil(ReallyEqualMixin, unittest.TestCase):
         disk = fileutil.get_disk_stats(u".", 2**128)
         self.failUnlessEqual(disk['avail'], 0)
 
+
 class PollMixinTests(unittest.TestCase):
     def setUp(self):
         self.pm = pollmixin.PollMixin()
@@ -581,6 +584,7 @@ class PollMixinTests(unittest.TestCase):
         d.addCallbacks(_suc, _err)
         return d
 
+
 class DeferredUtilTests(unittest.TestCase):
     def test_gather_results(self):
         d1 = defer.Deferred()
@@ -621,8 +625,8 @@ class DeferredUtilTests(unittest.TestCase):
         self.failUnless(isinstance(f, Failure))
         self.failUnless(f.check(ValueError))
 
-class HashUtilTests(unittest.TestCase):
 
+class HashUtilTests(unittest.TestCase):
     def test_random_key(self):
         k = hashutil.random_key()
         self.failUnlessEqual(len(k), hashutil.KEYLEN)
@@ -815,6 +819,7 @@ class Abbreviate(unittest.TestCase):
         e = self.failUnlessRaises(ValueError, p, "fhtagn")
         self.failUnlessIn("fhtagn", str(e))
 
+
 class Limiter(unittest.TestCase):
     timeout = 480 # This takes longer than 240 seconds on Francois's arm box.
 
@@ -890,6 +895,7 @@ class Limiter(unittest.TestCase):
         d.addCallback(_all_done)
         return d
 
+
 class TimeFormat(unittest.TestCase):
     def test_epoch(self):
         return self._help_test_epoch()
@@ -978,6 +984,7 @@ class TimeFormat(unittest.TestCase):
     def test_parse_date(self):
         self.failUnlessEqual(time_format.parse_date("2010-02-21"), 1266710400)
 
+
 class CacheDir(unittest.TestCase):
     def test_basic(self):
         basedir = "test_util/CacheDir/test_basic"
@@ -1042,6 +1049,7 @@ class CacheDir(unittest.TestCase):
         _failUnlessExists("c")
         del b2
 
+
 ctr = [0]
 class EqButNotIs:
     def __init__(self, x):
@@ -1065,6 +1073,7 @@ class EqButNotIs:
     def __eq__(self, other):
         return self.x == other
 
+
 class DictUtil(unittest.TestCase):
     def _help_test_empty_dict(self, klass):
         d1 = klass()
@@ -1440,6 +1449,7 @@ class DictUtil(unittest.TestCase):
         self.failUnlessEqual(d["one"], 1)
         self.failUnlessEqual(d.get_aux("one"), None)
 
+
 class Pipeline(unittest.TestCase):
     def pause(self, *args, **kwargs):
         d = defer.Deferred()
@@ -1614,9 +1624,11 @@ class Pipeline(unittest.TestCase):
 
         del d1,d2,d3,d4
 
+
 class SampleError(Exception):
     pass
 
+
 class Log(unittest.TestCase):
     def test_err(self):
         if not hasattr(self, "flushLoggedErrors"):
@@ -1716,6 +1728,7 @@ class SimpleSpans:
                 return False
         return True
 
+
 class ByteSpans(unittest.TestCase):
     def test_basic(self):
         s = Spans()
@@ -1950,6 +1963,7 @@ class ByteSpans(unittest.TestCase):
                     out.write(" ")
         out.write("\n")
 
+
 def extend(s, start, length, fill):
     if len(s) >= start+length:
         return s