from allmydata.util import fileutil, iputil, observer
from allmydata.util.assertutil import precondition, _assert
from allmydata.util.fileutil import abspath_expanduser_unicode
-from allmydata.util.encodingutil import get_filesystem_encoding, quote_output
+from allmydata.util.encodingutil import get_filesystem_encoding, quote_output, \
+ quote_local_unicode_path
# Add our application versions to the data that Foolscap's LogPublisher
# reports.
return ("Found pre-Tahoe-LAFS-v1.3 configuration file(s):\n"
"%s\n"
"See docs/historical/configuration.rst."
- % "\n".join([quote_output(fname) for fname in self.args[0]]))
+ % "\n".join([quote_local_unicode_path(fname) for fname in self.args[0]]))
class OldConfigOptionError(Exception):
pass
self.basedir = abspath_expanduser_unicode(unicode(basedir))
self._portnumfile = os.path.join(self.basedir, self.PORTNUMFILE)
self._tub_ready_observerlist = observer.OneShotObserverList()
- fileutil.make_dirs(os.path.join(self.basedir, "private"), 0700)
- open(os.path.join(self.basedir, "private", "README"), "w").write(PRIV_README)
+ fileutil.make_dirs(os.path.join(self.basedir, u"private"), 0700)
+ open(os.path.join(self.basedir, u"private", u"README"), "w").write(PRIV_README)
# creates self.config
self.read_config()
self.error_about_old_config_files()
self.config = ConfigParser.SafeConfigParser()
- tahoe_cfg = os.path.join(self.basedir, "tahoe.cfg")
+ tahoe_cfg = os.path.join(self.basedir, u"tahoe.cfg")
try:
f = open(tahoe_cfg, "rb")
try:
'no_storage', 'readonly_storage', 'sizelimit',
'debug_discard_storage', 'run_helper']:
if name not in self.GENERATED_FILES:
- fullfname = os.path.join(self.basedir, name)
+ fullfname = os.path.join(self.basedir, unicode(name))
if os.path.exists(fullfname):
oldfnames.add(fullfname)
if oldfnames:
raise e
def create_tub(self):
- certfile = os.path.join(self.basedir, "private", self.CERTFILE)
+ certfile = os.path.join(self.basedir, u"private", self.CERTFILE)
self.tub = Tub(certFile=certfile)
self.tub.setOption("logLocalFailures", True)
self.tub.setOption("logRemoteFailures", True)
config file that resides within the subdirectory named 'private'), and
return it.
"""
- privname = os.path.join(self.basedir, "private", name)
+ privname = os.path.join(self.basedir, u"private", name)
open(privname, "w").write(value)
def get_private_config(self, name, default=_None):
and return it. Return a default, or raise an error if one was not
given.
"""
- privname = os.path.join(self.basedir, "private", name)
+ privname = os.path.join(self.basedir, u"private", name)
try:
return fileutil.read(privname)
except EnvironmentError:
If 'default' is a string, use it as a default value. If not, treat it
as a zero-argument callable that is expected to return a string.
"""
- privname = os.path.join(self.basedir, "private", name)
+ privname = os.path.join(self.basedir, u"private", name)
try:
value = fileutil.read(privname)
except EnvironmentError:
ob.formatTime = newmeth
# TODO: twisted >2.5.0 offers maxRotatedFiles=50
- lgfurl_file = os.path.join(self.basedir, "private", "logport.furl").encode(get_filesystem_encoding())
- self.tub.setOption("logport-furlfile", lgfurl_file)
+ logport_furl_file = os.path.join(self.basedir, u"private", u"logport.furl")
+ self.tub.setOption("logport-furlfile", logport_furl_file.encode(get_filesystem_encoding()))
lgfurl = self.get_config("node", "log_gatherer.furl", "")
if lgfurl:
# this is in addition to the contents of log-gatherer-furlfile
self.tub.setOption("log-gatherer-furl", lgfurl)
- self.tub.setOption("log-gatherer-furlfile",
- os.path.join(self.basedir, "log_gatherer.furl"))
+ log_gatherer_furl_file = os.path.join(self.basedir, u"log_gatherer.furl")
+ self.tub.setOption("log-gatherer-furlfile",log_gatherer_furl_file.encode(get_filesystem_encoding()))
self.tub.setOption("bridge-twisted-logs", True)
- incident_dir = os.path.join(self.basedir, "logs", "incidents")
+ incident_dir = os.path.join(self.basedir, u"logs", u"incidents")
foolscap.logging.log.setLogDir(incident_dir.encode(get_filesystem_encoding()))
def log(self, *args, **kwargs):
import os.path, re, fnmatch
from twisted.python import usage
from allmydata.scripts.common import get_aliases, get_default_nodedir, \
- DEFAULT_ALIAS, BaseOptions
-from allmydata.util.encodingutil import argv_to_unicode, argv_to_abspath, quote_local_unicode_path
+ DEFAULT_ALIAS, BaseOptions
+from allmydata.util.encodingutil import argv_to_unicode, argv_to_abspath, quote_output, \
+ quote_local_unicode_path
NODEURL_RE=re.compile("http(s?)://([^:]*)(:([1-9][0-9]*))?")
pass
else:
print >>options.stderr, "%s retrieved and written to %s" % \
- (options.from_file, options.to_file)
+ (quote_output(options.from_file), quote_local_unicode_path(options.to_file))
return rc
def put(options):
def dump_share(options):
from allmydata.storage.mutable import MutableShareFile
- from allmydata.util.encodingutil import quote_output
+ from allmydata.util.encodingutil import quote_local_unicode_path
out = options.stdout
# check the version, to see if we have a mutable or immutable share
- print >>out, "share filename: %s" % quote_output(options['filename'])
+ print >>out, "share filename: %s" % quote_local_unicode_path(options['filename'])
f = open(options['filename'], "rb")
prefix = f.read(32)
out = options.stdout
sharedir = storage_index_to_dir(si_a2b(options.si_s))
- for d in options.nodedirs:
- d = os.path.join(d, "storage", "shares", sharedir)
- if os.path.exists(d):
- for shnum in listdir_unicode(d):
- print >>out, quote_local_unicode_path(os.path.join(d, shnum), quotemarks=False)
+ for nodedir in options.nodedirs:
+ abs_sharedir = os.path.join(nodedir, u"storage", u"shares", sharedir)
+ if os.path.exists(abs_sharedir):
+ for shnum in listdir_unicode(abs_sharedir):
+ print >>out, quote_local_unicode_path(os.path.join(abs_sharedir, shnum), quotemarks=False)
return 0
from allmydata.mutable.common import NeedMoreDataError
from allmydata.immutable.layout import ReadBucketProxy
from allmydata.util import base32
- from allmydata.util.encodingutil import quote_output
+ from allmydata.util.encodingutil import quote_local_unicode_path
import struct
f = open(abs_sharefile, "rb")
print >>out, "SDMF %s %d/%d %d #%d:%s %d %s" % \
(si_s, k, N, datalen,
seqnum, base32.b2a(root_hash),
- expiration, quote_output(abs_sharefile))
+ expiration, quote_local_unicode_path(abs_sharefile))
elif share_type == "MDMF":
from allmydata.mutable.layout import MDMFSlotReadProxy
fake_shnum = 0
print >>out, "MDMF %s %d/%d %d #%d:%s %d %s" % \
(si_s, k, N, datalen,
seqnum, base32.b2a(root_hash),
- expiration, quote_output(abs_sharefile))
+ expiration, quote_local_unicode_path(abs_sharefile))
else:
- print >>out, "UNKNOWN mutable %s" % quote_output(abs_sharefile)
+ print >>out, "UNKNOWN mutable %s" % quote_local_unicode_path(abs_sharefile)
elif struct.unpack(">L", prefix[:4]) == (1,):
# immutable
print >>out, "CHK %s %d/%d %d %s %d %s" % (si_s, k, N, filesize,
ueb_hash, expiration,
- quote_output(abs_sharefile))
+ quote_local_unicode_path(abs_sharefile))
else:
- print >>out, "UNKNOWN really-unknown %s" % quote_output(abs_sharefile)
+ print >>out, "UNKNOWN really-unknown %s" % quote_local_unicode_path(abs_sharefile)
f.close()
out = options.stdout
err = options.stderr
now = time.time()
- for d in options.nodedirs:
- d = os.path.join(d, "storage", "shares")
+ for nodedir in options.nodedirs:
+ abs_sharedir = os.path.join(nodedir, u"storage", u"shares")
try:
- abbrevs = listdir_unicode(d)
+ abbrevs = listdir_unicode(abs_sharedir)
except EnvironmentError:
# ignore nodes that have storage turned off altogether
pass
else:
for abbrevdir in sorted(abbrevs):
- if abbrevdir == "incoming":
+ if abbrevdir == u"incoming":
continue
- abbrevdir = os.path.join(d, abbrevdir)
+ abbrevdir = os.path.join(nodedir, abbrevdir)
# this tool may get run against bad disks, so we can't assume
# that listdir_unicode will always succeed. Try to catalog as much
# as possible.
return "not int"
def catalog_shares_one_abbrevdir(si_s, si_dir, now, out, err):
- from allmydata.util.encodingutil import listdir_unicode, quote_output
+ from allmydata.util.encodingutil import listdir_unicode, quote_local_unicode_path
try:
for shnum_s in sorted(listdir_unicode(si_dir), key=_as_number):
describe_share(abs_sharefile, si_s, shnum_s, now,
out)
except:
- print >>err, "Error processing %s" % quote_output(abs_sharefile)
+ print >>err, "Error processing %s" % quote_local_unicode_path(abs_sharefile)
failure.Failure().printTraceback(err)
except:
- print >>err, "Error processing %s" % quote_output(si_dir)
+ print >>err, "Error processing %s" % quote_local_unicode_path(si_dir)
failure.Failure().printTraceback(err)
class CorruptShareOptions(BaseOptions):
from twisted.python import usage
+from allmydata.util.assertutil import precondition
+
from allmydata.scripts.common import get_default_nodedir
from allmydata.scripts import debug, create_node, startstop_node, cli, keygen, stats_gatherer, admin
from allmydata.util.encodingutil import quote_output, quote_local_unicode_path, get_io_encoding
run_by_human=True,
stdin=None, stdout=None, stderr=None,
install_node_control=True, additional_commands=None):
+ for arg in argv:
+ precondition(isinstance(arg, str), argv)
stdin = stdin or sys.stdin
stdout = stdout or sys.stdout
def storage_index_to_dir(storageindex):
sia = si_b2a(storageindex)
- return os.path.join(sia[:2], sia)
+ return unicode(os.path.join(sia[:2], sia))
from zope.interface import implements
from allmydata.interfaces import RIStorageServer, IStatsProducer
from allmydata.util import fileutil, idlib, log, time_format
+from allmydata.util.assertutil import precondition
import allmydata # for __full_version__
from allmydata.storage.common import si_b2a, si_a2b, storage_index_to_dir
expiration_cutoff_date=None,
expiration_sharetypes=("mutable", "immutable")):
service.MultiService.__init__(self)
- assert isinstance(nodeid, str)
- assert len(nodeid) == 20
+
+ precondition(isinstance(nodeid, str), nodeid)
+ precondition(len(nodeid) == 20, nodeid)
+
self.my_nodeid = nodeid
- self.storedir = storedir
- sharedir = os.path.join(storedir, "shares")
+ self.storedir = unicode(storedir)
+ sharedir = os.path.join(storedir, u"shares")
fileutil.make_dirs(sharedir)
self.sharedir = sharedir
# we don't actually create the corruption-advisory dir until necessary
self.corruption_advisory_dir = os.path.join(storedir,
- "corruption-advisories")
+ u"corruption-advisories")
self.reserved_space = int(reserved_space)
self.no_storage = discard_storage
self.readonly_storage = readonly_storage
self.stats_provider = stats_provider
if self.stats_provider:
self.stats_provider.register_producer(self)
- self.incomingdir = os.path.join(sharedir, 'incoming')
+ self.incomingdir = os.path.join(sharedir, u"incoming")
self._clean_incomplete()
fileutil.make_dirs(self.incomingdir)
self._active_writers = weakref.WeakKeyDictionary()
}
self.add_bucket_counter()
- statefile = os.path.join(self.storedir, "lease_checker.state")
- historyfile = os.path.join(self.storedir, "lease_checker.history")
+ statefile = os.path.join(self.storedir, u"lease_checker.state")
+ historyfile = os.path.join(self.storedir, u"lease_checker.history")
klass = self.LeaseCheckerClass
self.lease_checker = klass(self, statefile, historyfile,
expiration_enabled, expiration_mode,
return bool(set(os.listdir(self.sharedir)) - set(["incoming"]))
def add_bucket_counter(self):
- statefile = os.path.join(self.storedir, "bucket_counter.state")
+ statefile = os.path.join(self.storedir, u"bucket_counter.state")
self.bucket_counter = BucketCountingCrawler(self, statefile)
self.bucket_counter.setServiceParent(self)
sf.add_or_renew_lease(lease_info)
for shnum in sharenums:
- incominghome = os.path.join(self.incomingdir, si_dir, "%d" % shnum)
- finalhome = os.path.join(self.sharedir, si_dir, "%d" % shnum)
+ incominghome = os.path.join(self.incomingdir, si_dir, u"%d" % shnum)
+ finalhome = os.path.join(self.sharedir, si_dir, u"%d" % shnum)
if os.path.exists(finalhome):
# great! we already have it. easy.
pass
(write_enabler, renew_secret, cancel_secret) = secrets
my_nodeid = self.my_nodeid
fileutil.make_dirs(bucketdir)
- filename = os.path.join(bucketdir, "%d" % sharenum)
+ filename = os.path.join(bucketdir, u"%d" % sharenum)
share = create_mutable_sharefile(filename, my_nodeid, write_enabler,
self)
return share
si_s = si_b2a(storage_index)
# windows can't handle colons in the filename
fn = os.path.join(self.corruption_advisory_dir,
- "%s--%s-%d" % (now, si_s, shnum)).replace(":","")
+ (u"%s--%s-%d" % (now, si_s, shnum)).replace(u":", u""))
f = open(fn, "w")
f.write("report: Share Corruption\n")
f.write("type: %s\n" % share_type)
from allmydata.client import Client
from allmydata.storage.server import StorageServer, storage_index_to_dir
from allmydata.util import fileutil, idlib, hashutil
+from allmydata.util.assertutil import precondition
from allmydata.util.hashutil import sha1
from allmydata.test.common_web import HTTPClientGETFactory
from allmydata.interfaces import IStorageBroker, IServer
def __init__(self, basedir, num_clients=1, num_servers=10,
client_config_hooks={}):
service.MultiService.__init__(self)
+ precondition(isinstance(basedir, unicode), basedir)
+
self.basedir = basedir
fileutil.make_dirs(basedir)
def make_server(self, i, readonly=False):
serverid = hashutil.tagged_hash("serverid", str(i))[:20]
- serverdir = os.path.join(self.basedir, "servers",
- idlib.shortnodeid_b2a(serverid), "storage")
+ serverdir = os.path.join(self.basedir, u"servers",
+ unicode(idlib.shortnodeid_b2a(serverid)), u"storage")
fileutil.make_dirs(serverdir)
ss = StorageServer(serverdir, serverid, stats_provider=SimpleStats(),
readonly_storage=readonly)
def set_up_grid(self, num_clients=1, num_servers=10,
client_config_hooks={}):
# self.basedir must be set
- self.g = NoNetworkGrid(self.basedir,
+ self.g = NoNetworkGrid(unicode(self.basedir),
num_clients=num_clients,
num_servers=num_servers,
client_config_hooks=client_config_hooks)
from mock import Mock, call
+from allmydata.util.assertutil import precondition
+
import allmydata
from allmydata.util import fileutil, hashutil, base32, keyutil
from allmydata import uri
from twisted.internet import threads # CLI tests use deferToThread
from twisted.python import usage
-from allmydata.util.assertutil import precondition
from allmydata.util.encodingutil import listdir_unicode, unicode_platform, \
- get_io_encoding, get_filesystem_encoding
+ get_io_encoding, get_filesystem_encoding, unicode_to_argv
timeout = 480 # deep_check takes 360s on Zandr's linksys box, others take > 240s
"--node-directory", unicode_to_argv(self.get_clientdir()),
]
argv = nodeargs + [verb] + list(args)
+
+ # runner.runner will also check this, but in another thread; this gives a better traceback
+ for arg in argv:
+ precondition(isinstance(arg, str), argv)
+
stdin = kwargs.get("stdin", "")
stdout, stderr = StringIO(), StringIO()
d = threads.deferToThread(runner.runner, argv, run_by_human=False,
# timestamp to force a check on all files
def _reset_last_checked(res):
dbfile = os.path.join(self.get_clientdir(),
- "private", "backupdb.sqlite")
+ u"private", u"backupdb.sqlite")
self.failUnless(os.path.exists(dbfile), dbfile)
bdb = backupdb.get_backupdb(dbfile)
bdb.cursor.execute("UPDATE last_upload SET last_checked=0")
from allmydata.scripts.common import get_aliases
from allmydata.scripts import cli, runner
from allmydata.test.no_network import GridTestMixin
-from allmydata.util.encodingutil import quote_output, get_io_encoding
+from allmydata.util.encodingutil import quote_output, get_io_encoding, unicode_to_argv
from .test_cli import CLITestMixin
timeout = 480 # deep_check takes 360s on Zandr's linksys box, others take > 240s
def _test_webopen(self, args, expected_url):
o = runner.Options()
- o.parseOptions(["--node-directory", self.get_clientdir(), "webopen"]
+ o.parseOptions(["--node-directory", unicode_to_argv(self.get_clientdir()), "webopen"]
+ list(args))
urls = []
rc = cli.webopen(o, urls.append)
def test_create(self):
self.basedir = "cli/CreateAlias/create"
self.set_up_grid()
- aliasfile = os.path.join(self.get_clientdir(), "private", "aliases")
+ aliasfile = os.path.join(self.get_clientdir(), u"private", u"aliases")
d = self.do_cli("create-alias", "tahoe")
def _done((rc,stdout,stderr)):
def _stash_urls(res):
aliases = get_aliases(self.get_clientdir())
- node_url_file = os.path.join(self.get_clientdir(), "node.url")
+ node_url_file = os.path.join(self.get_clientdir(), u"node.url")
nodeurl = fileutil.read(node_url_file).strip()
self.welcome_url = nodeurl
uribase = nodeurl + "uri/"
-import os, sys
+import os
from twisted.trial import unittest
from twisted.application import service
fileutil.write(os.path.join(basedir, "debug_discard_storage"), "")
e = self.failUnlessRaises(OldConfigError, client.Client, basedir)
- abs_basedir = fileutil.abspath_expanduser_unicode(unicode(basedir)).encode(sys.getfilesystemencoding())
+ abs_basedir = fileutil.abspath_expanduser_unicode(unicode(basedir))
self.failUnlessIn(os.path.join(abs_basedir, "introducer.furl"), e.args[0])
self.failUnlessIn(os.path.join(abs_basedir, "no_storage"), e.args[0])
self.failUnlessIn(os.path.join(abs_basedir, "readonly_storage"), e.args[0])
from twisted.trial import unittest
from twisted.internet import defer
from twisted.internet import threads # CLI tests use deferToThread
+
+from allmydata.util.assertutil import precondition
+
from allmydata.immutable import upload
from allmydata.mutable.common import UnrecoverableFileError
from allmydata.mutable.publish import MutableData
from allmydata.util import idlib
from allmydata.util import base32
+from allmydata.util.encodingutil import unicode_to_argv
from allmydata.scripts import runner
from allmydata.interfaces import ICheckResults, ICheckAndRepairResults, \
IDeepCheckResults, IDeepCheckAndRepairResults
class MutableChecker(GridTestMixin, unittest.TestCase, ErrorMixin):
def _run_cli(self, argv):
+ precondition(argv[0] == "debug", argv)
+
stdout, stderr = StringIO(), StringIO()
# this can only do synchronous operations
- assert argv[0] == "debug"
runner.runner(argv, run_by_human=False, stdout=stdout, stderr=stderr)
return stdout.getvalue()
return d
def _run_cli(self, argv, stdin=""):
+ # runner.runner will also check this, but in another thread; this gives a better traceback
+ for arg in argv:
+ precondition(isinstance(arg, str), argv)
+
#print "CLI:", argv
stdout, stderr = StringIO(), StringIO()
d = threads.deferToThread(runner.runner, argv, run_by_human=False,
def do_cli_manifest_stream1(self):
basedir = self.get_clientdir(0)
- d = self._run_cli(["--node-directory", basedir,
+ d = self._run_cli(["--node-directory", unicode_to_argv(basedir),
"manifest",
self.root_uri])
def _check((out,err)):
def do_cli_manifest_stream2(self):
basedir = self.get_clientdir(0)
- d = self._run_cli(["--node-directory", basedir,
+ d = self._run_cli(["--node-directory", unicode_to_argv(basedir),
"manifest",
"--raw",
self.root_uri])
def do_cli_manifest_stream3(self):
basedir = self.get_clientdir(0)
- d = self._run_cli(["--node-directory", basedir,
+ d = self._run_cli(["--node-directory", unicode_to_argv(basedir),
"manifest",
"--storage-index",
self.root_uri])
def do_cli_manifest_stream4(self):
basedir = self.get_clientdir(0)
- d = self._run_cli(["--node-directory", basedir,
+ d = self._run_cli(["--node-directory", unicode_to_argv(basedir),
"manifest",
"--verify-cap",
self.root_uri])
def do_cli_manifest_stream5(self):
basedir = self.get_clientdir(0)
- d = self._run_cli(["--node-directory", basedir,
+ d = self._run_cli(["--node-directory", unicode_to_argv(basedir),
"manifest",
"--repair-cap",
self.root_uri])
def do_cli_stats1(self):
basedir = self.get_clientdir(0)
- d = self._run_cli(["--node-directory", basedir,
+ d = self._run_cli(["--node-directory", unicode_to_argv(basedir),
"stats",
self.root_uri])
def _check3((out,err)):
def do_cli_stats2(self):
basedir = self.get_clientdir(0)
- d = self._run_cli(["--node-directory", basedir,
+ d = self._run_cli(["--node-directory", unicode_to_argv(basedir),
"stats",
"--raw",
self.root_uri])
return d
def _run_cli(self, argv):
+ precondition(argv[0] == "debug", argv)
+
stdout, stderr = StringIO(), StringIO()
# this can only do synchronous operations
- assert argv[0] == "debug"
runner.runner(argv, run_by_human=False, stdout=stdout, stderr=stderr)
return stdout.getvalue()
def _corrupt_some_shares(self, node):
for (shnum, serverid, sharefile) in self.find_uri_shares(node.get_uri()):
if shnum in (0,1):
- self._run_cli(["debug", "corrupt-share", sharefile])
+ self._run_cli(["debug", "corrupt-share", unicode_to_argv(sharefile)])
def _delete_most_shares(self, node):
self.delete_shares_numbered(node.get_uri(), range(1,10))
fso = debug.FindSharesOptions()
storage_index = base32.b2a(n.get_storage_index())
fso.si_s = storage_index
- fso.nodedirs = [os.path.dirname(abspath_expanduser_unicode(unicode(storedir)))
+ fso.nodedirs = [os.path.dirname(abspath_expanduser_unicode(storedir))
for (i,ss,storedir)
in self.iterate_servers()]
fso.stdout = StringIO()
from allmydata.immutable.upload import Data
from allmydata.util.consumer import download_to_data
+
class Harness(unittest.TestCase):
def setUp(self):
self.s = service.MultiService()
return self.s.stopService()
def test_create(self):
- basedir = "no_network/Harness/create"
+ basedir = u"no_network/Harness/create"
g = NoNetworkGrid(basedir)
g.startService()
return g.stopService()
def test_upload(self):
- basedir = "no_network/Harness/upload"
+ basedir = u"no_network/Harness/upload"
g = NoNetworkGrid(basedir)
g.setServiceParent(self.s)
def workdir(self, name):
- basedir = os.path.join("storage", "MutableServer", name)
+ basedir = os.path.join("storage", "MDMFProxies", name)
return basedir
return self.sparent.stopService()
def workdir(self, name):
- basedir = os.path.join("storage", "Server", name)
+ basedir = os.path.join("storage", "Stats", name)
return basedir
def create(self, name):
from allmydata.util import idlib, mathutil
from allmydata.util import log, base32
from allmydata.util.verlib import NormalizedVersion
-from allmydata.util.encodingutil import quote_output, unicode_to_argv, get_filesystem_encoding
+from allmydata.util.encodingutil import quote_local_unicode_path, \
+ unicode_to_argv, get_filesystem_encoding
from allmydata.util.fileutil import abspath_expanduser_unicode
from allmydata.util.consumer import MemoryConsumer, download_to_data
from allmydata.scripts import runner
d.addCallback(self.log, "starting publish")
d.addCallback(self._do_publish1)
d.addCallback(self._test_runner)
+ return d
d.addCallback(self._do_publish2)
# at this point, we have the following filesystem (where "R" denotes
# self._root_directory_uri):
# find a share
for (dirpath, dirnames, filenames) in os.walk(unicode(self.basedir)):
- if "storage" not in dirpath:
+ if u"storage" not in dirpath:
continue
if not filenames:
continue
pieces = dirpath.split(os.sep)
if (len(pieces) >= 4
- and pieces[-4] == "storage"
- and pieces[-3] == "shares"):
+ and pieces[-4] == u"storage"
+ and pieces[-3] == u"shares"):
# we're sitting in .../storage/shares/$START/$SINDEX , and there
# are sharefiles here
filename = os.path.join(dirpath, filenames[0])
# we only upload a single file, so we can assert some things about
# its size and shares.
- self.failUnlessIn("share filename: %s" % quote_output(abspath_expanduser_unicode(filename)), output)
+ self.failUnlessIn("share filename: %s" % quote_local_unicode_path(abspath_expanduser_unicode(filename)), output)
self.failUnlessIn("size: %d\n" % len(self.data), output)
self.failUnlessIn("num_segments: 1\n", output)
# segment_size is always a multiple of needed_shares
out,err = StringIO(), StringIO()
nodedirs = [self.getdir("client%d" % i) for i in range(self.numclients)]
cmd = ["debug", "catalog-shares"] + nodedirs
+ #import pdb; pdb.set_trace()
rc = runner.runner(cmd, stdout=out, stderr=err)
self.failUnlessEqual(rc, 0)
out.seek(0)
descriptions = [sfn.strip() for sfn in out.readlines()]
- self.failUnlessEqual(len(descriptions), 30)
+ self.failUnlessEqual(len(descriptions), 30, descriptions)
matching = [line
for line in descriptions
if line.startswith("CHK %s " % storage_index_s)]
f = mathutil.round_sigfigs
self.failUnlessEqual(f(22.0/3, 4), 7.3330000000000002)
+
class Statistics(unittest.TestCase):
def should_assert(self, msg, func, *args, **kwargs):
try:
m = self.should_assert(f, False, othermsg="message2")
self.failUnlessEqual("postcondition: othermsg: 'message2' <type 'str'>", m)
+
class FileUtil(ReallyEqualMixin, unittest.TestCase):
def mkdir(self, basedir, path, mode=0777):
fn = os.path.join(basedir, path)
disk = fileutil.get_disk_stats(u".", 2**128)
self.failUnlessEqual(disk['avail'], 0)
+
class PollMixinTests(unittest.TestCase):
def setUp(self):
self.pm = pollmixin.PollMixin()
d.addCallbacks(_suc, _err)
return d
+
class DeferredUtilTests(unittest.TestCase):
def test_gather_results(self):
d1 = defer.Deferred()
self.failUnless(isinstance(f, Failure))
self.failUnless(f.check(ValueError))
-class HashUtilTests(unittest.TestCase):
+class HashUtilTests(unittest.TestCase):
def test_random_key(self):
k = hashutil.random_key()
self.failUnlessEqual(len(k), hashutil.KEYLEN)
e = self.failUnlessRaises(ValueError, p, "fhtagn")
self.failUnlessIn("fhtagn", str(e))
+
class Limiter(unittest.TestCase):
timeout = 480 # This takes longer than 240 seconds on Francois's arm box.
d.addCallback(_all_done)
return d
+
class TimeFormat(unittest.TestCase):
def test_epoch(self):
return self._help_test_epoch()
def test_parse_date(self):
self.failUnlessEqual(time_format.parse_date("2010-02-21"), 1266710400)
+
class CacheDir(unittest.TestCase):
def test_basic(self):
basedir = "test_util/CacheDir/test_basic"
_failUnlessExists("c")
del b2
+
ctr = [0]
class EqButNotIs:
def __init__(self, x):
def __eq__(self, other):
return self.x == other
+
class DictUtil(unittest.TestCase):
def _help_test_empty_dict(self, klass):
d1 = klass()
self.failUnlessEqual(d["one"], 1)
self.failUnlessEqual(d.get_aux("one"), None)
+
class Pipeline(unittest.TestCase):
def pause(self, *args, **kwargs):
d = defer.Deferred()
del d1,d2,d3,d4
+
class SampleError(Exception):
pass
+
class Log(unittest.TestCase):
def test_err(self):
if not hasattr(self, "flushLoggedErrors"):
return False
return True
+
class ByteSpans(unittest.TestCase):
def test_basic(self):
s = Spans()
out.write(" ")
out.write("\n")
+
def extend(s, start, length, fill):
if len(s) >= start+length:
return s