# do not import any allmydata modules at this level. Do that from inside
# individual functions instead.
-import struct, time, os
+import struct, time, os, sys
from twisted.python import usage, failure
from twisted.internet import defer
+from twisted.scripts import trial as twisted_trial
class DumpOptions(usage.Options):
from allmydata.util.encodingutil import quote_output, to_str
# use a ReadBucketProxy to parse the bucket and find the uri extension
- bp = ReadBucketProxy(None, '', '')
+ bp = ReadBucketProxy(None, None, '')
offsets = bp._parse_offsets(f.read_share_data(0, 0x44))
print >>out, "%20s: %d" % ("version", bp._version)
seek = offsets['uri_extension']
print >>out, "Literal File URI:"
print >>out, " data:", quote_output(u.data)
- elif isinstance(u, uri.WriteableSSKFileURI):
+ elif isinstance(u, uri.WriteableSSKFileURI): # SDMF
if show_header:
- print >>out, "SSK Writeable URI:"
+ print >>out, "SDMF Writeable URI:"
print >>out, " writekey:", base32.b2a(u.writekey)
print >>out, " readkey:", base32.b2a(u.readkey)
print >>out, " storage index:", si_b2a(u.get_storage_index())
print >>out, " write_enabler:", base32.b2a(we)
print >>out
_dump_secrets(u.get_storage_index(), secret, nodeid, out)
-
elif isinstance(u, uri.ReadonlySSKFileURI):
if show_header:
- print >>out, "SSK Read-only URI:"
+ print >>out, "SDMF Read-only URI:"
print >>out, " readkey:", base32.b2a(u.readkey)
print >>out, " storage index:", si_b2a(u.get_storage_index())
print >>out, " fingerprint:", base32.b2a(u.fingerprint)
elif isinstance(u, uri.SSKVerifierURI):
if show_header:
- print >>out, "SSK Verifier URI:"
+ print >>out, "SDMF Verifier URI:"
print >>out, " storage index:", si_b2a(u.get_storage_index())
print >>out, " fingerprint:", base32.b2a(u.fingerprint)
- elif isinstance(u, uri.DirectoryURI):
+ elif isinstance(u, uri.WriteableMDMFFileURI): # MDMF
+ if show_header:
+ print >>out, "MDMF Writeable URI:"
+ print >>out, " writekey:", base32.b2a(u.writekey)
+ print >>out, " readkey:", base32.b2a(u.readkey)
+ print >>out, " storage index:", si_b2a(u.get_storage_index())
+ print >>out, " fingerprint:", base32.b2a(u.fingerprint)
+ print >>out
+ if nodeid:
+ we = hashutil.ssk_write_enabler_hash(u.writekey, nodeid)
+ print >>out, " write_enabler:", base32.b2a(we)
+ print >>out
+ _dump_secrets(u.get_storage_index(), secret, nodeid, out)
+ elif isinstance(u, uri.ReadonlyMDMFFileURI):
+ if show_header:
+ print >>out, "MDMF Read-only URI:"
+ print >>out, " readkey:", base32.b2a(u.readkey)
+ print >>out, " storage index:", si_b2a(u.get_storage_index())
+ print >>out, " fingerprint:", base32.b2a(u.fingerprint)
+ elif isinstance(u, uri.MDMFVerifierURI):
+ if show_header:
+ print >>out, "MDMF Verifier URI:"
+ print >>out, " storage index:", si_b2a(u.get_storage_index())
+ print >>out, " fingerprint:", base32.b2a(u.fingerprint)
+
+
+ elif isinstance(u, uri.ImmutableDirectoryURI): # CHK-based directory
+ if show_header:
+ print >>out, "CHK Directory URI:"
+ dump_uri_instance(u._filenode_uri, nodeid, secret, out, False)
+ elif isinstance(u, uri.ImmutableDirectoryURIVerifier):
+ if show_header:
+ print >>out, "CHK Directory Verifier URI:"
+ dump_uri_instance(u._filenode_uri, nodeid, secret, out, False)
+
+ elif isinstance(u, uri.DirectoryURI): # SDMF-based directory
if show_header:
print >>out, "Directory Writeable URI:"
dump_uri_instance(u._filenode_uri, nodeid, secret, out, False)
if show_header:
print >>out, "Directory Verifier URI:"
dump_uri_instance(u._filenode_uri, nodeid, secret, out, False)
+
+ elif isinstance(u, uri.MDMFDirectoryURI): # MDMF-based directory
+ if show_header:
+ print >>out, "Directory Writeable URI:"
+ dump_uri_instance(u._filenode_uri, nodeid, secret, out, False)
+ elif isinstance(u, uri.ReadonlyMDMFDirectoryURI):
+ if show_header:
+ print >>out, "Directory Read-only URI:"
+ dump_uri_instance(u._filenode_uri, nodeid, secret, out, False)
+ elif isinstance(u, uri.MDMFDirectoryURIVerifier):
+ if show_header:
+ print >>out, "Directory Verifier URI:"
+ dump_uri_instance(u._filenode_uri, nodeid, secret, out, False)
+
else:
print >>out, "unknown cap type"
class ImmediateReadBucketProxy(ReadBucketProxy):
def __init__(self, sf):
self.sf = sf
- ReadBucketProxy.__init__(self, "", "", "")
+ ReadBucketProxy.__init__(self, None, None, "")
def __repr__(self):
return "<ImmediateReadBucketProxy>"
def _read(self, offset, size):
# ignore nodes that have storage turned off altogether
pass
else:
- for abbrevdir in abbrevs:
+ for abbrevdir in sorted(abbrevs):
if abbrevdir == "incoming":
continue
abbrevdir = os.path.join(d, abbrevdir)
# as possible.
try:
sharedirs = listdir_unicode(abbrevdir)
- for si_s in sharedirs:
+ for si_s in sorted(sharedirs):
si_dir = os.path.join(abbrevdir, si_s)
catalog_shares_one_abbrevdir(si_s, si_dir, now, out,err)
except:
return 0
+def _as_number(s):
+ try:
+ return int(s)
+ except ValueError:
+ return "not int"
+
def catalog_shares_one_abbrevdir(si_s, si_dir, now, out, err):
from allmydata.util.encodingutil import listdir_unicode, quote_output
try:
- for shnum_s in listdir_unicode(si_dir):
+ for shnum_s in sorted(listdir_unicode(si_dir), key=_as_number):
abs_sharefile = os.path.join(si_dir, shnum_s)
assert os.path.isfile(abs_sharefile)
try:
else:
# otherwise assume it's immutable
f = ShareFile(fn)
- bp = ReadBucketProxy(None, '', '')
+ bp = ReadBucketProxy(None, None, '')
offsets = bp._parse_offsets(f.read_share_data(0, 0x24))
start = f._data_offset + offsets["data"]
end = f._data_offset + offsets["plaintext_hash_tree"]
return code.interact()
+DEFAULT_TESTSUITE = 'allmydata'
+
+class TrialOptions(twisted_trial.Options):
+ def getSynopsis(self):
+ return "Usage: tahoe debug trial [options] [[file|package|module|TestCase|testmethod]...]"
+
+ def parseOptions(self, all_subargs, *a, **kw):
+ self.trial_args = list(all_subargs)
+
+ # any output from the option parsing will be printed twice, but that's harmless
+ return twisted_trial.Options.parseOptions(self, all_subargs, *a, **kw)
+
+ def parseArgs(self, *nonoption_args):
+ if not nonoption_args:
+ self.trial_args.append(DEFAULT_TESTSUITE)
+
+ def getUsage(self, width=None):
+ t = twisted_trial.Options.getUsage(self, width)
+ t += """
+The 'tahoe debug trial' command uses the correct imports for this instance of
+Tahoe-LAFS. The default test suite is '%s'.
+""" % (DEFAULT_TESTSUITE,)
+ return t
+
+def trial(config):
+ sys.argv = ['trial'] + config.trial_args
+
+ # This does not return.
+ twisted_trial.run()
+
+
class DebugCommand(usage.Options):
subCommands = [
["dump-share", None, DumpOptions,
["catalog-shares", None, CatalogSharesOptions, "Describe all shares in node dirs."],
["corrupt-share", None, CorruptShareOptions, "Corrupt a share by flipping a bit."],
["repl", None, ReplOptions, "Open a Python interpreter."],
+ ["trial", None, TrialOptions, "Run tests using Twisted Trial with the right imports."],
]
def postOptions(self):
if not hasattr(self, 'subOptions'):
tahoe debug catalog-shares Describe all shares in node dirs.
tahoe debug corrupt-share Corrupt a share by flipping a bit.
tahoe debug repl Open a Python interpreter.
+ tahoe debug trial Run tests using Twisted Trial with the right imports.
Please run e.g. 'tahoe debug dump-share --help' for more details on each
subcommand.
"""
+ # See ticket #1441 for why we print different information when
+ # run via /usr/bin/tahoe. Note that argv[0] is the full path.
+ if sys.argv[0] == '/usr/bin/tahoe':
+ t += """
+To get branch coverage for the Tahoe test suite (on the installed copy of
+Tahoe), install the 'python-coverage' package and then use:
+
+ python-coverage run --branch /usr/bin/tahoe debug trial
+"""
+ else:
+ t += """
+Another debugging feature is that bin%stahoe allows executing an arbitrary
+"runner" command (typically an installed Python script, such as 'coverage'),
+with the Tahoe libraries on the PYTHONPATH. The runner command name is
+prefixed with '@', and any occurrences of '@tahoe' in its arguments are
+replaced by the full path to the tahoe script.
+
+For example, if 'coverage' is installed and on the PATH, you can use:
+
+ bin%stahoe @coverage run --branch @tahoe debug trial
+
+to get branch coverage for the Tahoe test suite. Or, to run python with
+the -3 option that warns about Python 3 incompatibilities:
+
+ bin%stahoe @python -3 @tahoe command [options]
+""" % (os.sep, os.sep, os.sep)
return t
subDispatch = {
"catalog-shares": catalog_shares,
"corrupt-share": corrupt_share,
"repl": repl,
+ "trial": trial,
}