]> git.rkrishnan.org Git - tahoe-lafs/tahoe-lafs.git/commitdiff
Straw-man Python 3 support. 2065-python3-strawman
authorDaira Hopwood <daira@jacaranda.org>
Fri, 30 Aug 2013 03:06:23 +0000 (04:06 +0100)
committerDaira Hopwood <daira@jacaranda.org>
Fri, 30 Aug 2013 03:06:23 +0000 (04:06 +0100)
Signed-off-by: Daira Hopwood <daira@jacaranda.org>
149 files changed:
.gitignore
bin/tahoe-script.template
misc/build_helpers/build-deb.py
misc/build_helpers/check-build.py
misc/build_helpers/gen-package-table.py
misc/build_helpers/get-version.py
misc/build_helpers/pyver.py
misc/build_helpers/run-with-pythonpath.py
misc/build_helpers/run_trial.py
misc/build_helpers/show-tool-versions.py
misc/build_helpers/sub-ver.py
misc/build_helpers/test-darcs-boringfile.py
misc/build_helpers/test-git-ignore.py
misc/coding_tools/check-interfaces.py
misc/coding_tools/check-miscaptures.py
misc/coding_tools/check-umids.py
misc/coding_tools/find-trailing-spaces.py
misc/coding_tools/make-canary-files.py
misc/operations_helpers/cpu-watcher-poll.py
misc/operations_helpers/cpu-watcher-subscribe.py
misc/operations_helpers/cpu-watcher.tac
misc/operations_helpers/find-share-anomalies.py
misc/operations_helpers/getmem.py
misc/operations_helpers/provisioning/provisioning.py
misc/simulators/bench_spans.py
misc/simulators/count_dirs.py
misc/simulators/hashbasedsig.py
misc/simulators/ringsim.py
misc/simulators/simulate_load.py
misc/simulators/simulator.py
misc/simulators/sizes.py
misc/simulators/storage-overhead.py
setup.py
src/allmydata/__init__.py
src/allmydata/_auto_deps.py
src/allmydata/blacklist.py
src/allmydata/client.py
src/allmydata/codec.py
src/allmydata/control.py
src/allmydata/dirnode.py
src/allmydata/frontends/auth.py
src/allmydata/frontends/ftpd.py
src/allmydata/frontends/sftpd.py
src/allmydata/hashtree.py
src/allmydata/immutable/checker.py
src/allmydata/immutable/downloader/fetcher.py
src/allmydata/immutable/downloader/finder.py
src/allmydata/immutable/downloader/node.py
src/allmydata/immutable/downloader/segmentation.py
src/allmydata/immutable/downloader/share.py
src/allmydata/immutable/downloader/status.py
src/allmydata/immutable/encode.py
src/allmydata/immutable/filenode.py
src/allmydata/immutable/upload.py
src/allmydata/introducer/interfaces.py
src/allmydata/key_generator.py
src/allmydata/mutable/filenode.py
src/allmydata/mutable/layout.py
src/allmydata/mutable/publish.py
src/allmydata/mutable/retrieve.py
src/allmydata/mutable/servermap.py
src/allmydata/node.py
src/allmydata/nodemaker.py
src/allmydata/scripts/admin.py
src/allmydata/scripts/backupdb.py
src/allmydata/scripts/cli.py
src/allmydata/scripts/common.py
src/allmydata/scripts/common_http.py
src/allmydata/scripts/create_node.py
src/allmydata/scripts/debug.py
src/allmydata/scripts/keygen.py
src/allmydata/scripts/runner.py
src/allmydata/scripts/slow_operation.py
src/allmydata/scripts/startstop_node.py
src/allmydata/scripts/stats_gatherer.py
src/allmydata/scripts/tahoe_add_alias.py
src/allmydata/scripts/tahoe_backup.py
src/allmydata/scripts/tahoe_check.py
src/allmydata/scripts/tahoe_cp.py
src/allmydata/scripts/tahoe_get.py
src/allmydata/scripts/tahoe_ls.py
src/allmydata/scripts/tahoe_manifest.py
src/allmydata/scripts/tahoe_mkdir.py
src/allmydata/scripts/tahoe_mv.py
src/allmydata/scripts/tahoe_put.py
src/allmydata/scripts/tahoe_unlink.py
src/allmydata/scripts/tahoe_webopen.py
src/allmydata/stats.py
src/allmydata/storage/server.py
src/allmydata/test/bench_dirnode.py
src/allmydata/test/check_grid.py
src/allmydata/test/check_load.py
src/allmydata/test/check_memory.py
src/allmydata/test/check_speed.py
src/allmydata/test/common.py
src/allmydata/test/common_util.py
src/allmydata/test/no_network.py
src/allmydata/test/test_base62.py
src/allmydata/test/test_checker.py
src/allmydata/test/test_cli.py
src/allmydata/test/test_codec.py
src/allmydata/test/test_crawler.py
src/allmydata/test/test_deepcheck.py
src/allmydata/test/test_dirnode.py
src/allmydata/test/test_download.py
src/allmydata/test/test_encode.py
src/allmydata/test/test_encodingutil.py
src/allmydata/test/test_ftp.py
src/allmydata/test/test_hashtree.py
src/allmydata/test/test_hung_server.py
src/allmydata/test/test_immutable.py
src/allmydata/test/test_introducer.py
src/allmydata/test/test_mutable.py
src/allmydata/test/test_node.py
src/allmydata/test/test_repairer.py
src/allmydata/test/test_runner.py
src/allmydata/test/test_sftp.py
src/allmydata/test/test_storage.py
src/allmydata/test/test_system.py
src/allmydata/test/test_upload.py
src/allmydata/test/test_util.py
src/allmydata/test/test_version.py
src/allmydata/test/test_web.py
src/allmydata/test/trial_coverage.py
src/allmydata/uri.py
src/allmydata/util/abbreviate.py
src/allmydata/util/assertutil.py
src/allmydata/util/base32.py
src/allmydata/util/dictutil.py
src/allmydata/util/encodingutil.py
src/allmydata/util/fake_inotify.py
src/allmydata/util/fileutil.py
src/allmydata/util/happinessutil.py
src/allmydata/util/humanreadable.py
src/allmydata/util/iputil.py
src/allmydata/util/mathutil.py
src/allmydata/util/pollmixin.py
src/allmydata/util/sixutil.py [new file with mode: 0644]
src/allmydata/util/spans.py
src/allmydata/util/statistics.py
src/allmydata/util/time_format.py
src/allmydata/util/verlib.py
src/allmydata/web/common.py
src/allmydata/web/directory.py
src/allmydata/web/introweb.py
src/allmydata/web/operations.py
src/allmydata/web/status.py
src/allmydata/windows/fixups.py
src/allmydata/windows/registry.py

index 1d261e5dd9793d91341ad5da91b0d36a0d25d213..51e4d882379ef02a7ab5ce7533ea266549518836 100644 (file)
@@ -3,6 +3,7 @@
 *~
 *.DS_Store
 .*.kate-swp
+*.py.bak
 
 /build/
 /support/
index 3fb7efedf717f8af311ea39aa7bf0221e801f076..0231f1662cdd56a31755badf4e8d3d345e79a743 100644 (file)
@@ -1,7 +1,6 @@
 #!/bin/false # You must specify a python interpreter.
-import sys; assert sys.version_info < (3,), ur"Tahoe-LAFS does not run under Python 3. Please use a version of Python between 2.6 and 2.7.x inclusive."
 
-import os, subprocess
+import os, subprocess, sys
 
 where = os.path.realpath(sys.argv[0])
 base = os.path.dirname(os.path.dirname(where))
@@ -65,7 +64,7 @@ if sys.platform == "win32":
     def mangle(s):
         return str(re.sub(u'[^\\x20-\\x7F]', lambda m: u'\x7F%x;' % (ord(m.group(0)),), s))
 
-    argv = [mangle(argv_unicode[i]) for i in xrange(0, argc.value)]
+    argv = [mangle(argv_unicode[i]) for i in range(0, argc.value)]
 
     # Take only the suffix with the same number of arguments as sys.argv.
     # This accounts for anything that can cause initial arguments to be stripped,
@@ -98,7 +97,7 @@ if len(args) >= 1 and args[0].startswith('@'):
     def _subst(a):
         if a == '@tahoe': return script
         return a
-    command = prefix + [runner] + map(_subst, args[1:])
+    command = prefix + [runner] + [_subst(arg) for arg in args[1:]]
 else:
     runner = script
     command = prefix + [script] + args
index 8b87159d1d14f2af56d7d6d339fb86f325fde014..cccd8636122676437a0470e866b60346df8aa3fa 100644 (file)
@@ -1,5 +1,6 @@
 #!/bin/false # invoke this with a specific python
 
+from __future__ import print_function
 import sys, shutil, os.path
 from subprocess import Popen, PIPE
 
@@ -11,21 +12,21 @@ class SubprocessError(Exception):
 
 def get_output(*cmd, **kwargs):
     tolerate_stderr = kwargs.get("tolerate_stderr", False)
-    print " " + " ".join(cmd)
+    print(" " + " ".join(cmd))
     p = Popen(cmd, stdout=PIPE)
     (out,err) = p.communicate()
     rc = p.returncode
     if rc != 0:
-        print >>sys.stderr, err
+        print(err, file=sys.stderr)
         raise SubprocessError("command %s exited with rc=%s", (cmd, rc))
     if err and not tolerate_stderr:
-        print >>sys.stderr, "stderr:", err
+        print("stderr:", err, file=sys.stderr)
         raise SubprocessError("command emitted unexpected stderr")
-    print " =>", out,
+    print(" =>", out, end=' ')
     return out
 
 def run(*cmd, **kwargs):
-    print " " + " ".join(cmd)
+    print(" " + " ".join(cmd))
 #    if "stdin" in kwargs:
 #        stdin = kwargs.pop("stdin")
 #        p = Popen(cmd, stdin=PIPE, **kwargs)
@@ -88,7 +89,7 @@ for n in ["compat", "control", "copyright", "pycompat", "rules"]:
 
     shutil.copyfile(fn, os.path.join(DEBDIR, n))
     if n == "rules":
-        os.chmod(os.path.join(DEBDIR, n), 0755) # +x
+        os.chmod(os.path.join(DEBDIR, n), 0o755) # +x
 
 # We put "local package" on the first line of the changelog entry to suppress
 # the lintian NMU warnings (since debchange's new entry's "author" will
index e293c6a1bea5313062f1bff2bed2b67fa11d27c7..b93c5a869539149e854ffebb1c45ef47044a6e50 100644 (file)
@@ -2,13 +2,14 @@
 
 # This helper script is used with the 'test-desert-island' Makefile target.
 
+from __future__ import print_function
 import sys
 
 good = True
 build_out = sys.argv[1]
 mode = sys.argv[2]
 
-print
+print()
 
 for line in open(build_out, "r"):
     if mode == "no-downloads":
@@ -28,13 +29,13 @@ for line in open(build_out, "r"):
         # if it has all the packages that it needs locally, but we
         # currently don't enforce that stronger requirement.
         if line.startswith("Downloading http:"):
-            print line,
+            print(line, end=' ')
             good = False
 if good:
     if mode == "no-downloads":
-        print "Good: build did not try to download any files"
+        print("Good: build did not try to download any files")
     sys.exit(0)
 else:
     if mode == "no-downloads":
-        print "Failed: build tried to download files"
+        print("Failed: build tried to download files")
     sys.exit(1)
index 99a43ed02c9ca30659004978e652227177aac876..c0a1cb9f3508df761c00cf94b716227a81f44d23 100644 (file)
@@ -2,6 +2,7 @@
 # This script generates a table of dependencies in HTML format on stdout.
 # It expects to be run in the tahoe-lafs-dep-eggs directory.
 
+from __future__ import print_function
 import re, os, sys
 import pkg_resources
 
@@ -69,27 +70,27 @@ width = 100 / (len(platform_independent_pkgs) + 1)
 greybgstyle = '; background-color: #E0E0E0'
 nobgstyle = ''
 
-print '<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">'
-print '<html>'
-print '<head>'
-print '  <meta http-equiv="Content-Type" content="text/html;charset=us-ascii">'
-print '  <title>Software packages that Tahoe-LAFS depends on</title>'
-print '</head>'
-print '<body>'
-print '<h2>What is this?</h2>'
-print '<p>See <a href="https://tahoe-lafs.org/trac/tahoe-lafs/browser/docs/quickstart.rst">quickstart.rst</a>, <a href="https://tahoe-lafs.org/trac/tahoe-lafs/wiki/Installation">wiki:Installation</a>, and <a href="https://tahoe-lafs.org/trac/tahoe-lafs/wiki/CompileError">wiki:CompileError</a>.'
-print '<h2>Software packages that Tahoe-LAFS depends on</h2>'
-print
+print('<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">')
+print('<html>')
+print('<head>')
+print('  <meta http-equiv="Content-Type" content="text/html;charset=us-ascii">')
+print('  <title>Software packages that Tahoe-LAFS depends on</title>')
+print('</head>')
+print('<body>')
+print('<h2>What is this?</h2>')
+print('<p>See <a href="https://tahoe-lafs.org/trac/tahoe-lafs/browser/docs/quickstart.rst">quickstart.rst</a>, <a href="https://tahoe-lafs.org/trac/tahoe-lafs/wiki/Installation">wiki:Installation</a>, and <a href="https://tahoe-lafs.org/trac/tahoe-lafs/wiki/CompileError">wiki:CompileError</a>.')
+print('<h2>Software packages that Tahoe-LAFS depends on</h2>')
+print()
 for pyver in reversed(sorted(python_versions)):
     greybackground = False
     if pyver:
-        print '<p>Packages for Python %s that have compiled C/C++ code:</p>' % (pyver,)
-        print '<table border="1">'
-        print '  <tr>'
-        print '    <th style="background-color: #FFFFD0" width="%d%%">&nbsp;Platform&nbsp;</th>' % (width,)
+        print('<p>Packages for Python %s that have compiled C/C++ code:</p>' % (pyver,))
+        print('<table border="1">')
+        print('  <tr>')
+        print('    <th style="background-color: #FFFFD0" width="%d%%">&nbsp;Platform&nbsp;</th>' % (width,))
         for pkg in sorted(platform_dependent_pkgs):
-            print '    <th style="background-color:#FFE8FF;" width="%d%%">&nbsp;%s&nbsp;</th>' % (width, pkg)
-        print '  </tr>'
+            print('    <th style="background-color:#FFE8FF;" width="%d%%">&nbsp;%s&nbsp;</th>' % (width, pkg))
+        print('  </tr>')
 
         first = True
         for platform in sorted(matrix[pyver]):
@@ -103,42 +104,42 @@ for pyver in reversed(sorted(python_versions)):
             style1 += bgstyle
             style2 = first and 'border-top: 2px solid #000000' or ''
             style2 += bgstyle
-            print '  <tr>'
-            print '    <td style="%s">&nbsp;%s&nbsp;</td>' % (style1, platform,)
+            print('  <tr>')
+            print('    <td style="%s">&nbsp;%s&nbsp;</td>' % (style1, platform,))
             for pkg in sorted(platform_dependent_pkgs):
                 files = [n for (p, n) in row_files if pkg == p]
                 bestfile = files and max([(pkg_resources.parse_version(x), x) for x in files])[1] or None
                 if pkg == 'pywin32' and not platform.startswith('windows'):
-                    print '    <td style="border: 0; text-align: center; %s"> n/a </td>' % (style2,)
+                    print('    <td style="border: 0; text-align: center; %s"> n/a </td>' % (style2,))
                 else:
-                    print '    <td style="%s">&nbsp;%s</td>' % (style2,
-                            bestfile and '<a href="%s">%s</a>' % (bestfile, bestfile) or '')
-            print '  </tr>'
+                    print('    <td style="%s">&nbsp;%s</td>' % (style2,
+                            bestfile and '<a href="%s">%s</a>' % (bestfile, bestfile) or ''))
+            print('  </tr>')
             first = False
 
-    print '</table>'
-    print
+    print('</table>')
+    print()
 
-print '<p>Packages that are platform-independent or source-only:</p>'
-print '<table border="1">'
-print '  <tr>'
-print '    <th style="background-color:#FFFFD0;">&nbsp;Package&nbsp;</th>'
-print '    <th style="background-color:#FFE8FF;">&nbsp;All Python versions&nbsp;</th>'
-print '  </tr>'
+print('<p>Packages that are platform-independent or source-only:</p>')
+print('<table border="1">')
+print('  <tr>')
+print('    <th style="background-color:#FFFFD0;">&nbsp;Package&nbsp;</th>')
+print('    <th style="background-color:#FFE8FF;">&nbsp;All Python versions&nbsp;</th>')
+print('  </tr>')
 
 style1 = 'border-top: 2px solid #000000; background-color:#FFFFF0;'
 style2 = 'border-top: 2px solid #000000;'
 m = matrix['']['']
 for pkg in sorted(platform_independent_pkgs):
-    print '  <tr>'
-    print '    <th style="%s">&nbsp;%s&nbsp;</th>' % (style1, pkg)
+    print('  <tr>')
+    print('    <th style="%s">&nbsp;%s&nbsp;</th>' % (style1, pkg))
     files = [n for (p, n) in m if pkg == p]
-    print '    <td style="%s">&nbsp;%s</td>' % (style2, '<br>&nbsp;'.join(['<a href="%s">%s</a>' % (f, f) for f in files]))
-    print '  </tr>'
+    print('    <td style="%s">&nbsp;%s</td>' % (style2, '<br>&nbsp;'.join(['<a href="%s">%s</a>' % (f, f) for f in files])))
+    print('  </tr>')
 
-print '</table>'
+print('</table>')
 
 # The document does validate, but not when it is included at the bottom of a directory listing.
 #print '<hr>'
 #print '<a href="http://validator.w3.org/check?uri=referer" target="_blank"><img border="0" src="http://www.w3.org/Icons/valid-html401-blue" alt="Valid HTML 4.01 Transitional" height="31" width="88"></a>'
-print '</body></html>'
+print('</body></html>')
index a3ef5d11ae388f2152ed6f6391bc6c9338af4412..829911c96c288e797f16e771ddc4f6a4fc85750e 100644 (file)
@@ -16,6 +16,7 @@ the version available in the code image when you do:
 
 """
 
+from __future__ import print_function
 import os.path, re
 
 def get_version():
@@ -34,5 +35,5 @@ def get_version():
 
 if __name__ == '__main__':
     verstr = get_version()
-    print verstr
+    print(verstr)
 
index d53db165e71e6b2a8d7af081f580d0aaecfdfd66..4b1672e9bbb33bcd315902e989e5e7acfd51c9e8 100644 (file)
@@ -1,4 +1,5 @@
 #!/usr/bin/env python
 
+from __future__ import print_function
 import sys
-print "python%d.%d" % (sys.version_info[:2])
+print("python%d.%d" % (sys.version_info[:2]))
index fd2fbe6fe51016d63da99a50b5dd78f9367ca780..bff3547f32b6d5214d16d8684fa8e94312170e39 100644 (file)
@@ -7,6 +7,7 @@ code, including dependent libraries. Run this like:
  python misc/build_helpers/run-with-pythonpath.py python foo.py
 """
 
+from __future__ import print_function
 import os, sys
 
 # figure out where support/lib/pythonX.X/site-packages is
@@ -36,7 +37,7 @@ cmd = sys.argv[1]
 if cmd and cmd[0] not in "/~.":
     cmds = which(cmd)
     if not cmds:
-        print >>sys.stderr, "'%s' not found on PATH" % (cmd,)
+        print("'%s' not found on PATH" % (cmd,), file=sys.stderr)
         sys.exit(-1)
     cmd = cmds[0]
 
index e398a7a7b3ecf23cd2c9e088f0f2d73e41290ac2..1a38dfae22c69aa2a8c792887befc4b6b0635a64 100644 (file)
@@ -1,5 +1,6 @@
 #!/usr/bin/env python
 
+from __future__ import print_function
 import os, sys, re, glob
 
 
@@ -22,7 +23,7 @@ if version is None:
 APPNAME='allmydata-tahoe'
 
 adglobals = {}
-execfile(os.path.join('..', 'src', 'allmydata', '_auto_deps.py'), adglobals)
+exec(compile(open(os.path.join('..', 'src', 'allmydata', '_auto_deps.py')).read(), os.path.join('..', 'src', 'allmydata', '_auto_deps.py'), 'exec'), adglobals)
 install_requires = adglobals['install_requires']
 test_requires = adglobals.get('test_requires', ['mock'])
 
@@ -31,19 +32,19 @@ test_requires = adglobals.get('test_requires', ['mock'])
 
 __requires__ = [APPNAME + '==' + version] + install_requires + test_requires
 
-print "Requirements: %r" % (__requires__,)
+print("Requirements: %r" % (__requires__,))
 
 eggz = glob.glob(os.path.join('..', 'setuptools-*.egg'))
 if len(eggz) > 0:
    egg = os.path.realpath(eggz[0])
-   print "Inserting egg on sys.path: %r" % (egg,)
+   print("Inserting egg on sys.path: %r" % (egg,))
    sys.path.insert(0, egg)
 
 import pkg_resources
 pkg_resources  # hush pyflakes
 
 modulename = None
-for i in xrange(1, len(sys.argv)):
+for i in range(1, len(sys.argv)):
     if not sys.argv[i].startswith('-'):
         modulename = sys.argv[i]
         break
@@ -85,7 +86,7 @@ same = (root_from_cwd == rootdir)
 if not same:
     try:
         same = os.path.samefile(root_from_cwd, rootdir)
-    except AttributeError, e:
+    except AttributeError as e:
         e  # hush pyflakes
 
 if not same:
index 44584c96589f794163ba908f58f00b1be93257ed..0c13a03f3c87f42a48018d57ecb506c62b88e9ed 100644 (file)
@@ -1,5 +1,6 @@
 #! /usr/bin/env python
 
+from __future__ import print_function
 import locale, os, platform, subprocess, sys, traceback
 
 added_zetuptoolz_egg = False
@@ -11,7 +12,7 @@ except ImportError:
     eggz = glob.glob('setuptools-*.egg')
     if len(eggz) > 0:
         egg = os.path.realpath(eggz[0])
-        print >>sys.stderr, "Inserting egg on sys.path: %r" % (egg,)
+        print("Inserting egg on sys.path: %r" % (egg,), file=sys.stderr)
         added_zetuptoolz_egg = True
         sys.path.insert(0, egg)
 
@@ -25,10 +26,10 @@ def print_platform():
     try:
         import platform
         out = platform.platform()
-        print "platform:", foldlines(out)
-        print "machine: ", platform.machine()
+        print("platform:", foldlines(out))
+        print("machine: ", platform.machine())
         if hasattr(platform, 'linux_distribution'):
-            print "linux_distribution:", repr(platform.linux_distribution())
+            print("linux_distribution:", repr(platform.linux_distribution()))
     except EnvironmentError:
         sys.stderr.write("\nGot exception using 'platform'. Exception follows\n")
         traceback.print_exc(file=sys.stderr)
@@ -36,17 +37,17 @@ def print_platform():
         pass
 
 def print_python_ver():
-    print "python:", foldlines(sys.version)
-    print 'maxunicode: ' + str(sys.maxunicode)
+    print("python:", foldlines(sys.version))
+    print('maxunicode: ' + str(sys.maxunicode))
 
 def print_python_encoding_settings():
-    print 'filesystem.encoding: ' + str(sys.getfilesystemencoding())
-    print 'locale.getpreferredencoding: ' + str(locale.getpreferredencoding())
+    print('filesystem.encoding: ' + str(sys.getfilesystemencoding()))
+    print('locale.getpreferredencoding: ' + str(locale.getpreferredencoding()))
     try:
-        print 'locale.defaultlocale: ' + str(locale.getdefaultlocale())
-    except ValueError, e:
-        print 'got exception from locale.getdefaultlocale(): ', e
-    print 'locale.locale: ' + str(locale.getlocale())
+        print('locale.defaultlocale: ' + str(locale.getdefaultlocale()))
+    except ValueError as e:
+        print('got exception from locale.getdefaultlocale(): ', e)
+    print('locale.locale: ' + str(locale.getlocale()))
 
 def print_stdout(cmdlist, label=None, numlines=None):
     try:
@@ -54,10 +55,10 @@ def print_stdout(cmdlist, label=None, numlines=None):
             label = cmdlist[0]
         res = subprocess.Popen(cmdlist, stdin=open(os.devnull),
                                stdout=subprocess.PIPE).communicate()[0]
-        print label + ': ' + foldlines(res, numlines)
-    except EnvironmentError, e:
+        print(label + ': ' + foldlines(res, numlines))
+    except EnvironmentError as e:
         if isinstance(e, OSError) and e.errno == 2:
-            print label + ': no such file or directory'
+            print(label + ': no such file or directory')
             return
         sys.stderr.write("\nGot exception invoking '%s'. Exception follows.\n" % (cmdlist[0],))
         traceback.print_exc(file=sys.stderr)
@@ -66,12 +67,12 @@ def print_stdout(cmdlist, label=None, numlines=None):
 
 def print_as_ver():
     if os.path.exists('a.out'):
-        print "WARNING: a file named a.out exists, and getting the version of the 'as' assembler writes to that filename, so I'm not attempting to get the version of 'as'."
+        print("WARNING: a file named a.out exists, and getting the version of the 'as' assembler writes to that filename, so I'm not attempting to get the version of 'as'.")
         return
     try:
         res = subprocess.Popen(['as', '-version'], stdin=open(os.devnull),
                                stdout=subprocess.PIPE, stderr=subprocess.PIPE).communicate()
-        print 'as: ' + foldlines(res[0]+' '+res[1])
+        print('as: ' + foldlines(res[0]+' '+res[1]))
         if os.path.exists('a.out'):
             os.remove('a.out')
     except EnvironmentError:
@@ -83,36 +84,36 @@ def print_as_ver():
 def print_setuptools_ver():
     if added_zetuptoolz_egg:
         # it would be misleading to report the bundled version of zetuptoolz as the installed version
-        print "setuptools: using bundled egg"
+        print("setuptools: using bundled egg")
         return
     try:
         import pkg_resources
         out = str(pkg_resources.require("setuptools"))
-        print "setuptools:", foldlines(out)
+        print("setuptools:", foldlines(out))
     except (ImportError, EnvironmentError):
         sys.stderr.write("\nGot exception using 'pkg_resources' to get the version of setuptools. Exception follows\n")
         traceback.print_exc(file=sys.stderr)
         sys.stderr.flush()
         pass
     except pkg_resources.DistributionNotFound:
-        print 'setuptools: DistributionNotFound'
+        print('setuptools: DistributionNotFound')
         pass
 
 def print_py_pkg_ver(pkgname, modulename=None):
     if modulename is None:
         modulename = pkgname
-    print
+    print()
     try:
         import pkg_resources
         out = str(pkg_resources.require(pkgname))
-        print pkgname + ': ' + foldlines(out)
+        print(pkgname + ': ' + foldlines(out))
     except (ImportError, EnvironmentError):
         sys.stderr.write("\nGot exception using 'pkg_resources' to get the version of %s. Exception follows.\n" % (pkgname,))
         traceback.print_exc(file=sys.stderr)
         sys.stderr.flush()
         pass
     except pkg_resources.DistributionNotFound:
-        print pkgname + ': DistributionNotFound'
+        print(pkgname + ': DistributionNotFound')
         pass
     try:
         __import__(modulename)
@@ -120,19 +121,19 @@ def print_py_pkg_ver(pkgname, modulename=None):
         pass
     else:
         modobj = sys.modules.get(modulename)
-        print pkgname + ' module: ' + str(modobj)
+        print(pkgname + ' module: ' + str(modobj))
         try:
-            print pkgname + ' __version__: ' + str(modobj.__version__)
+            print(pkgname + ' __version__: ' + str(modobj.__version__))
         except AttributeError:
             pass
 
 print_platform()
-print
+print()
 print_python_ver()
-print
+print()
 print_stdout(['locale'])
 print_python_encoding_settings()
-print
+print()
 print_stdout(['buildbot', '--version'])
 print_stdout(['buildslave', '--version'])
 if 'windows' in platform.system().lower():
index 6c1fbbad2be8b0226718111c3bf0ace38f1e551c..14cbc1a28d70cddd99cd02fc61634fae32dc5d2a 100644 (file)
@@ -1,5 +1,6 @@
 #!/usr/bin/env python
 
+from __future__ import print_function
 from allmydata import __version__ as v
 
 import sys
@@ -22,5 +23,5 @@ vern = {
     }
 
 for line in input.readlines():
-    print line % vern,
+    print(line % vern, end=' ')
 
index 619a4e448c93db4d4b820fb7f33f009dddb46162..7e42ea7beeeb08202b876bd0f750b5fda239159e 100644 (file)
@@ -1,12 +1,13 @@
 #!/usr/bin/env python
 
+from __future__ import print_function
 import sys
 from subprocess import Popen, PIPE
 
 cmd = ["darcs", "whatsnew", "-l"]
 p = Popen(cmd, stdout=PIPE)
 output = p.communicate()[0]
-print output
+print(output)
 if output == "No changes!\n":
     sys.exit(0)
 sys.exit(1)
index 832f775d94315067c5942f4e3d9a5c9508e6861c..5c7a2500a3be5d6eff954959f30ef2c68f613174 100644 (file)
@@ -1,12 +1,13 @@
 #!/usr/bin/env python
 
+from __future__ import print_function
 import sys
 from subprocess import Popen, PIPE
 
 cmd = ["git", "status", "--porcelain"]
 p = Popen(cmd, stdout=PIPE)
 output = p.communicate()[0]
-print output
+print(output)
 if output == "":
     sys.exit(0)
 sys.exit(1)
index 6400bfe7de2949917fd007b64aa481eceb758b8b..9dafb7b0cd56a65752d6d157d1680096adcec000 100644 (file)
@@ -4,6 +4,7 @@
 #
 #   bin/tahoe @misc/coding_tools/check-interfaces.py
 
+from __future__ import print_function
 import os, sys, re, platform
 
 import zope.interface as zi
@@ -44,10 +45,10 @@ def strictly_implements(*interfaces):
                 for interface in interfaces:
                     try:
                         verifyClass(interface, cls)
-                    except Exception, e:
-                        print >>_err, ("%s.%s does not correctly implement %s.%s:\n%s"
-                                       % (cls.__module__, cls.__name__,
-                                          interface.__module__, interface.__name__, e))
+                    except Exception as e:
+                        print("%s.%s does not correctly implement %s.%s:\n%s"
+                              % (cls.__module__, cls.__name__,
+                                 interface.__module__, interface.__name__, e), file=_err)
         else:
             _other_modules_with_violations.add(cls.__module__)
         return cls
@@ -62,7 +63,7 @@ def check():
 
     if len(sys.argv) >= 2:
         if sys.argv[1] == '--help' or len(sys.argv) > 2:
-            print >>_err, "Usage: check-miscaptures.py [SOURCEDIR]"
+            print("Usage: check-miscaptures.py [SOURCEDIR]", file=_err)
             return
         srcdir = sys.argv[1]
     else:
@@ -79,26 +80,26 @@ def check():
         for fn in filenames:
             (basename, ext) = os.path.splitext(fn)
             if ext in ('.pyc', '.pyo') and not os.path.exists(os.path.join(dirpath, basename+'.py')):
-                print >>_err, ("Warning: no .py source file for %r.\n"
-                               % (os.path.join(dirpath, fn),))
+                print(("Warning: no .py source file for %r.\n"
+                               % (os.path.join(dirpath, fn),)), file=_err)
 
             if ext == '.py' and not excluded_file_basenames.match(basename):
                 relpath = os.path.join(dirpath[len(srcdir)+1:], basename)
                 module = relpath.replace(os.sep, '/').replace('/', '.')
                 try:
                     __import__(module)
-                except ImportError, e:
+                except ImportError as e:
                     if not is_windows and (' _win' in str(e) or 'win32' in str(e)):
-                        print >>_err, ("Warning: %r imports a Windows-specific module, so we cannot check it (%s).\n"
-                                       % (module, str(e)))
+                        print(("Warning: %r imports a Windows-specific module, so we cannot check it (%s).\n"
+                                       % (module, str(e))), file=_err)
                     else:
                         import traceback
                         traceback.print_exc(file=_err)
-                        print >>_err
+                        print(file=_err)
 
     others = list(_other_modules_with_violations)
     others.sort()
-    print >>_err, "There were also interface violations in:\n", ", ".join(others), "\n"
+    print("There were also interface violations in:\n", ", ".join(others), "\n", file=_err)
 
 
 # Forked from
@@ -184,7 +185,7 @@ def _verify(iface, candidate, tentative=0, vtype=None):
             # should never get here, since classes should not provide functions
             meth = fromFunction(attr, iface, name=name)
         elif (isinstance(attr, MethodTypes)
-              and type(attr.im_func) is FunctionType):
+              and type(attr.__func__) is FunctionType):
             meth = fromMethod(attr, iface, name)
         else:
             if not callable(attr):
index 07b94a5bd563f278a0a42de8b43db42982a561eb..336d9757ca6b1dd51ea88f27dc0f7ebd32ef0f4c 100644 (file)
@@ -1,5 +1,6 @@
 #! /usr/bin/python
 
+from __future__ import print_function
 import os, sys, compiler
 from compiler.ast import Node, For, While, ListComp, AssName, Name, Lambda, Function
 
@@ -13,7 +14,7 @@ def check_file(path):
 def check_thing(parser, thing):
     try:
         ast = parser(thing)
-    except SyntaxError, e:
+    except SyntaxError as e:
         return e
     else:
         results = []
@@ -133,7 +134,7 @@ def make_result(funcnode, var_name, var_lineno):
 
 def report(out, path, results):
     for r in results:
-        print >>out, path + (":%r %s captures %r assigned at line %d" % r)
+        print(path + (":%r %s captures %r assigned at line %d" % r), file=out)
 
 def check(sources, out):
     class Counts:
@@ -146,7 +147,7 @@ def check(sources, out):
     def _process(path):
         results = check_file(path)
         if isinstance(results, SyntaxError):
-            print >>out, path + (" NOT ANALYSED due to syntax error: %s" % results)
+            print(path + (" NOT ANALYSED due to syntax error: %s" % results), file=out)
             counts.error_files += 1
         else:
             report(out, path, results)
@@ -156,7 +157,7 @@ def check(sources, out):
                 counts.suspect_files += 1
 
     for source in sources:
-        print >>out, "Checking %s..." % (source,)
+        print("Checking %s..." % (source,), file=out)
         if os.path.isfile(source):
             _process(source)
         else:
@@ -166,11 +167,11 @@ def check(sources, out):
                     if ext == '.py':
                         _process(os.path.join(dirpath, fn))
 
-    print >>out, ("%d suspiciously captured variables in %d out of %d file(s)."
-                  % (counts.n, counts.suspect_files, counts.processed_files))
+    print("%d suspiciously captured variables in %d out of %d file(s)."
+          % (counts.n, counts.suspect_files, counts.processed_files), file=out)
     if counts.error_files > 0:
-        print >>out, ("%d file(s) not processed due to syntax errors."
-                      % (counts.error_files,))
+        print("%d file(s) not processed due to syntax errors."
+              % (counts.error_files,), file=out)
     return counts.n
 
 
index 05e8825b9c7c05de9b28c9fd6897931db44142d0..87d92055994c967a40d053d703ab478b1d405713 100644 (file)
@@ -2,6 +2,7 @@
 
 # ./rumid.py foo.py
 
+from __future__ import print_function
 import sys, re, os
 
 ok = True
@@ -18,13 +19,13 @@ for fn in sys.argv[1:]:
             umid = mo.group(1)
             if umid in umids:
                 oldfn, oldlineno = umids[umid]
-                print "%s:%d: duplicate umid '%s'" % (fn, lineno, umid)
-                print "%s:%d: first used here" % (oldfn, oldlineno)
+                print("%s:%d: duplicate umid '%s'" % (fn, lineno, umid))
+                print("%s:%d: first used here" % (oldfn, oldlineno))
                 ok = False
             umids[umid] = (fn,lineno)
 
 if ok:
-    print "all umids are unique"
+    print("all umids are unique")
 else:
-    print "some umids were duplicates"
+    print("some umids were duplicates")
     sys.exit(1)
index ad2cc5835f740c2c19da42d65c7e3b9bf760ff6a..87def1428574538adc74bb867b88d5d6bae067cb 100644 (file)
@@ -1,5 +1,6 @@
 #!/usr/bin/env python
 
+from __future__ import print_function
 import os, sys
 
 from twisted.python import usage
@@ -22,7 +23,7 @@ def check(fn):
             line = line[:-1]
         if line.rstrip() != line:
             # the %s:%d:%d: lets emacs' compile-mode jump to those locations
-            print "%s:%d:%d: trailing whitespace" % (fn, i+1, len(line)+1)
+            print("%s:%d:%d: trailing whitespace" % (fn, i+1, len(line)+1))
             found[0] = True
     f.close()
 
index 57f900a5c2cac64ef3bc643489448f69567b4bfa..684da93b3da1fd4cb6ec61feb34ada81bcc259c7 100644 (file)
@@ -49,6 +49,7 @@ system where Tahoe is installed, or in a source tree with setup.py like this:
 
  setup.py run_with_pythonpath -p -c 'misc/make-canary-files.py ARGS..'
 """
+from __future__ import print_function
 
 import os, sha
 from twisted.python import usage
@@ -86,8 +87,8 @@ for line in open(opts["nodeids"], "r").readlines():
     nodes[nodeid] = nickname
 
 if opts["k"] != 3 or opts["N"] != 10:
-    print "note: using non-default k/N requires patching the Tahoe code"
-    print "src/allmydata/client.py line 55, DEFAULT_ENCODING_PARAMETERS"
+    print("note: using non-default k/N requires patching the Tahoe code")
+    print("src/allmydata/client.py line 55, DEFAULT_ENCODING_PARAMETERS")
 
 convergence_file = os.path.expanduser(opts["convergence"])
 convergence_s = open(convergence_file, "rb").read().strip()
@@ -109,7 +110,7 @@ def find_share_for_target(target):
     while True:
         attempts += 1
         suffix = base32.b2a(os.urandom(10))
-        if verbose: print " trying", suffix,
+        if verbose: print(" trying", suffix, end=' ')
         data = prefix + suffix + "\n"
         assert len(data) > 55  # no LIT files
         # now, what storage index will this get?
@@ -117,11 +118,11 @@ def find_share_for_target(target):
         eu = upload.EncryptAnUploadable(u)
         d = eu.get_storage_index() # this happens to run synchronously
         def _got_si(si, data=data):
-            if verbose: print "SI", base32.b2a(si),
+            if verbose: print("SI", base32.b2a(si), end=' ')
             peerlist = get_permuted_peers(si)
             if peerlist[0] == target:
                 # great!
-                if verbose: print "  yay!"
+                if verbose: print("  yay!")
                 fn = base32.b2a(target)
                 if nodes[target]:
                     nickname = nodes[target].replace("/", "_")
@@ -131,7 +132,7 @@ def find_share_for_target(target):
                 open(fn, "w").write(data)
                 return True
             # nope, must try again
-            if verbose: print "  boo"
+            if verbose: print("  boo")
             return False
         d.addCallback(_got_si)
         # get sneaky and look inside the Deferred for the synchronous result
@@ -142,10 +143,10 @@ os.mkdir("canaries")
 attempts = []
 for target in nodes:
     target_s = base32.b2a(target)
-    print "working on", target_s
+    print("working on", target_s)
     attempts.append(find_share_for_target(target))
-print "done"
-print "%d attempts total, avg %d per target, max %d" % \
-      (sum(attempts), 1.0* sum(attempts) / len(nodes), max(attempts))
+print("done")
+print("%d attempts total, avg %d per target, max %d" % \
+      (sum(attempts), 1.0* sum(attempts) / len(nodes), max(attempts)))
 
 
index 68ac4b46a8e7e64ec111a76e3ab001ac55d9081c..0b4a60c53d91bc8a3a4f0cf348ac28c8ba659344 100644 (file)
@@ -1,13 +1,14 @@
 #!/usr/bin/env python
 
+from __future__ import print_function
 from foolscap import Tub, eventual
 from twisted.internet import reactor
 import sys
 import pprint
 
 def oops(f):
-    print "ERROR"
-    print f
+    print("ERROR")
+    print(f)
 
 def fetch(furl):
     t = Tub()
index 4c560e2c6ce2668d7733b5cf0038112c930098da..514ce1836b632d5a8a70f9474a2c0077bb92fa1a 100644 (file)
@@ -1,5 +1,6 @@
 # -*- python -*-
 
+from __future__ import print_function
 from twisted.internet import reactor
 import sys
 
@@ -31,7 +32,7 @@ class CPUWatcherSubscriber(service.MultiService, Referenceable):
         tub.connectTo(furl, self.connected)
 
     def connected(self, rref):
-        print "subscribing"
+        print("subscribing")
         d = rref.callRemote("get_averages")
         d.addCallback(self.remote_averages)
         d.addErrback(log.err)
index 12f2932481f846c29ee7d03b1cba6673cb7498f2..eacd20d153e82b7582b131b021b2053bb3e7b44e 100644 (file)
@@ -210,7 +210,7 @@ class CPUWatcher(service.MultiService, resource.Resource, Referenceable):
                 row.append(self._average_N(pid, avg))
             current.append(tuple(row))
         self.current = current
-        print current
+        print(current)
         for ob in self.observers:
             eventual.eventually(self.notify, ob)
 
index 76fe3419f2d47a5923b1bdfdcd592a4180dee15c..5a114c71ed1cfd4ad22459398a3627e583ed04d7 100644 (file)
@@ -2,6 +2,7 @@
 
 # feed this the results of 'tahoe catalog-shares' for all servers
 
+from __future__ import print_function
 import sys
 
 chk_encodings = {}
@@ -44,23 +45,23 @@ sdmf_multiple_versions = [(si,lines)
 sdmf_multiple_versions.sort()
 
 if chk_multiple_encodings:
-    print
-    print "CHK multiple encodings:"
+    print()
+    print("CHK multiple encodings:")
     for (si,lines) in chk_multiple_encodings:
-        print " " + si
+        print(" " + si)
         for line in sorted(lines):
-            print "  " + line
+            print("  " + line)
 if sdmf_multiple_encodings:
-    print
-    print "SDMF multiple encodings:"
+    print()
+    print("SDMF multiple encodings:")
     for (si,lines) in sdmf_multiple_encodings:
-        print " " + si
+        print(" " + si)
         for line in sorted(lines):
-            print "  " + line
+            print("  " + line)
 if sdmf_multiple_versions:
-    print
-    print "SDMF multiple versions:"
+    print()
+    print("SDMF multiple versions:")
     for (si,lines) in sdmf_multiple_versions:
-        print " " + si
+        print(" " + si)
         for line in sorted(lines):
-            print "  " + line
+            print("  " + line)
index 8ddc3ed7e91ca1fa998b30f957d894fbeb2fdfed..5903760dd84e5734da08fc77dd7aa3245b3f7fe3 100644 (file)
@@ -1,5 +1,6 @@
 #! /usr/bin/env python
 
+from __future__ import print_function
 from foolscap import Tub
 from foolscap.eventual import eventually
 import sys
@@ -10,7 +11,7 @@ def go():
     d = t.getReference(sys.argv[1])
     d.addCallback(lambda rref: rref.callRemote("get_memory_usage"))
     def _got(res):
-        print res
+        print(res)
         reactor.stop()
     d.addCallback(_got)
 
index 37acd16d22f752bbb8aac358d993a0a280214025..8d041f2407db00202de927e10ad54223892cf158 100644 (file)
@@ -18,7 +18,7 @@ def factorial(n):
     factorial(n) with n<0 is -factorial(abs(n))
     """
     result = 1
-    for i in xrange(1, abs(n)+1):
+    for i in range(1, abs(n)+1):
         result *= i
     assert n >= 0
     return result
@@ -30,7 +30,7 @@ def binomial(n, k):
     # calculate n!/k! as one product, avoiding factors that
     # just get canceled
     P = k+1
-    for i in xrange(k+2, n+1):
+    for i in range(k+2, n+1):
         P *= i
     # if you are paranoid:
     # C, rem = divmod(P, factorial(n-k))
index e1e6e7ebd465153c91cc6121015e305d8d977df8..4a4bfd412156be4e638e96795238f580db93e4f6 100644 (file)
@@ -8,6 +8,7 @@ And run this command passing that trace file's name:
 python bench_spans.py run-112-above28-flog-dump-sh8-on-nsziz.txt
 """
 
+from __future__ import print_function
 from pyutil import benchutil
 
 from allmydata.util.spans import DataSpans
@@ -68,17 +69,17 @@ class B(object):
             elif INIT_S in inline:
                 pass
             else:
-                print "Warning, didn't recognize this line: %r" % (inline,)
+                print("Warning, didn't recognize this line: %r" % (inline,))
             count += 1
             inline = self.inf.readline()
 
         # print self.stats
 
 benchutil.print_bench_footer(UNITS_PER_SECOND=1000000)
-print "(microseconds)"
+print("(microseconds)")
 
 for N in [600, 6000, 60000]:
     b = B(open(sys.argv[1], 'rU'))
-    print "%7d" % N,
+    print("%7d" % N, end=' ')
     benchutil.rep_bench(b.run, N, b.init, UNITS_PER_SECOND=1000000)
 
index 78412d33bd564784a0910c18748466acddd6c14c..39fc9bd3450be92e1ff903f034cec85bf61f704a 100644 (file)
@@ -34,6 +34,7 @@ MODES:
 
 """
 
+from __future__ import print_function
 import sys, os.path
 
 #URI:7jzbza6iwdsk5xbxsvdgjaugyrhetw64zpflp4gihmyh5krjblra====:a5qdejwbimu5b2wfke7xwexxlq======:gzeub5v42rjbgd7ccawnahu2evqd42lpdpzd447c6zkmdvjkpowq====:25:100:219889
@@ -96,10 +97,10 @@ def scan(root):
         for mode in MODES:
             total[mode] += slotsize(mode, len(files), len(dirs)) + stringsize
 
-    print "%d directories" % num_dirs
-    print "%d files" % num_files
+    print("%d directories" % num_dirs)
+    print("%d files" % num_files)
     for mode in sorted(total.keys()):
-        print "%s: %d bytes" % (mode, total[mode])
+        print("%s: %d bytes" % (mode, total[mode]))
 
 
 if __name__ == '__main__':
index 640331aec7d4d8d39ccaca4446d322d145368db3..a7913d7b7a812ab0d4fc74e62d1e761b484fd35f 100644 (file)
@@ -19,6 +19,7 @@ cycles_per_byte = 15.8      # cost of hash
 Mcycles_per_block = cycles_per_byte * L_block / (8 * 1000000.0)
 
 
+from __future__ import print_function
 from math import floor, ceil, log, log1p, pow, e
 from sys import stderr
 from gc import collect
@@ -77,7 +78,7 @@ def make_candidate(B, K, K1, K2, q, T, T_min, L_hash, lg_N, sig_bytes, c_sign, c
 
 # Winternitz with B < 4 is never optimal. For example, going from B=4 to B=2 halves the
 # chain depth, but that is cancelled out by doubling (roughly) the number of digits.
-range_B = xrange(4, 33)
+range_B = range(4, 33)
 
 M = pow(2, lg_M)
 
@@ -98,7 +99,7 @@ def calculate(K, K1, K2, q_max, L_hash, trees):
     T_min = ceil_div(lg_M - lg_K1, lg_K)
 
     last_q = None
-    for T in xrange(T_min, T_min+21):
+    for T in range(T_min, T_min+21):
         # lg(total number of leaf private keys)
         lg_S = lg_K1 + lg_K*T
         lg_N = lg_S + lg_K2
@@ -135,17 +136,17 @@ def calculate(K, K1, K2, q_max, L_hash, trees):
 
         # We approximate lg(M-x) as lg(M)
         lg_px_step = lg_M + lg_p - lg_1_p
-        for x in xrange(1, j):
+        for x in range(1, j):
             lg_px[x] = lg_px[x-1] - lg(x) + lg_px_step
 
         q = None
         # Find the minimum acceptable value of q.
-        for q_cand in xrange(1, q_max+1):
+        for q_cand in range(1, q_max+1):
             lg_q = lg(q_cand)
-            lg_pforge = [lg_px[x] + (lg_q*x - lg_K2)*q_cand for x in xrange(1, j)]
+            lg_pforge = [lg_px[x] + (lg_q*x - lg_K2)*q_cand for x in range(1, j)]
             if max(lg_pforge) < -L_hash + lg(j) and lg_px[j-1] + 1.0 < -L_hash:
-                #print "K = %d, K1 = %d, K2 = %d, L_hash = %d, lg_K2 = %.3f, q = %d, lg_pforge_1 = %.3f, lg_pforge_2 = %.3f, lg_pforge_3 = %.3f" \
-                #      % (K, K1, K2, L_hash, lg_K2, q, lg_pforge_1, lg_pforge_2, lg_pforge_3)
+                #print("K = %d, K1 = %d, K2 = %d, L_hash = %d, lg_K2 = %.3f, q = %d, lg_pforge_1 = %.3f, lg_pforge_2 = %.3f, lg_pforge_3 = %.3f"
+                #      % (K, K1, K2, L_hash, lg_K2, q, lg_pforge_1, lg_pforge_2, lg_pforge_3))
                 q = q_cand
                 break
 
@@ -212,10 +213,10 @@ def calculate(K, K1, K2, q_max, L_hash, trees):
 
 def search():
     for L_hash in range_L_hash:
-        print >>stderr, "collecting...   \r",
+        print("collecting...   \r", end=' ', file=stderr)
         collect()
 
-        print >>stderr, "precomputing... \r",
+        print("precomputing... \r", end=' ', file=stderr)
 
         """
         # d/dq (lg(q+1) + L_hash/q) = 1/(ln(2)*(q+1)) - L_hash/q^2
@@ -244,13 +245,13 @@ def search():
         K_max = 50
         c2 = compressions(2*L_hash + L_label)
         c3 = compressions(3*L_hash + L_label)
-        for dau in xrange(0, 10):
+        for dau in range(0, 10):
             a = pow(2, dau)
-            for tri in xrange(0, ceil_log(30-dau, 3)):
+            for tri in range(0, ceil_log(30-dau, 3)):
                 x = int(a*pow(3, tri))
                 h = dau + 2*tri
                 c_x = int(sum_powers(2, dau)*c2 + a*sum_powers(3, tri)*c3)
-                for y in xrange(1, x+1):
+                for y in range(1, x+1):
                     if tri > 0:
                         # If the bottom level has arity 3, then for every 2 nodes by which the tree is
                         # imperfect, we can save c3 compressions by pruning 3 leaves back to their parent.
@@ -266,24 +267,24 @@ def search():
                         trees[y] = (h, c_y, (dau, tri))
 
         #for x in xrange(1, K_max+1):
-        #    print x, trees[x]
+        #    print("%r: %r" % (x, trees[x]))
 
         candidates = []
         progress = 0
         fuzz = 0
         complete = (K_max-1)*(2200-200)/100
-        for K in xrange(2, K_max+1):
-            for K2 in xrange(200, 2200, 100):
-                for K1 in xrange(max(2, K-fuzz), min(K_max, K+fuzz)+1):
+        for K in range(2, K_max+1):
+            for K2 in range(200, 2200, 100):
+                for K1 in range(max(2, K-fuzz), min(K_max, K+fuzz)+1):
                     candidates += calculate(K, K1, K2, q_max, L_hash, trees)
                 progress += 1
-                print >>stderr, "searching: %3d %% \r" % (100.0 * progress / complete,),
+                print("searching: %3d %% \r" % (100.0 * progress / complete,), end=' ', file=stderr)
 
-        print >>stderr, "filtering...    \r",
+        print("filtering...    \r", end=' ', file=stderr)
         step = 2.0
         bins = {}
         limit = floor_div(limit_cost, step)
-        for bin in xrange(0, limit+2):
+        for bin in range(0, limit+2):
             bins[bin] = []
 
         for c in candidates:
@@ -294,7 +295,7 @@ def search():
 
         # For each in a range of signing times, find the best candidate.
         best = []
-        for bin in xrange(0, limit):
+        for bin in range(0, limit):
             candidates = bins[bin] + bins[bin+1] + bins[bin+2]
             if len(candidates) > 0:
                 best += [min(candidates, key=lambda c: c['sig_bytes'])]
@@ -306,33 +307,33 @@ def search():
                     "%(c_ver)7d +/-%(c_ver_pm)5d (%(Mcycles_ver)5.2f +/-%(Mcycles_ver_pm)5.2f)   "
                    ) % candidate
 
-        print >>stderr, "                \r",
+        print("                \r", end=' ', file=stderr)
         if len(best) > 0:
-            print "  B    K   K1     K2    q    T  L_hash  lg_N  sig_bytes  c_sign (Mcycles)        c_ver     (    Mcycles   )"
-            print "---- ---- ---- ------ ---- ---- ------ ------ --------- ------------------ --------------------------------"
+            print("  B    K   K1     K2    q    T  L_hash  lg_N  sig_bytes  c_sign (Mcycles)        c_ver     (    Mcycles   )")
+            print("---- ---- ---- ------ ---- ---- ------ ------ --------- ------------------ --------------------------------")
 
             best.sort(key=lambda c: (c['sig_bytes'], c['cost']))
             last_sign = None
             last_ver = None
             for c in best:
                 if last_sign is None or c['c_sign'] < last_sign or c['c_ver'] < last_ver:
-                    print format_candidate(c)
+                    print(format_candidate(c))
                     last_sign = c['c_sign']
                     last_ver = c['c_ver']
 
-            print
+            print()
         else:
-            print "No candidates found for L_hash = %d or higher." % (L_hash)
+            print("No candidates found for L_hash = %d or higher." % (L_hash))
             return
 
         del bins
         del best
 
-print "Maximum signature size: %d bytes" % (limit_bytes,)
-print "Maximum (signing + %d*verification) cost: %.1f Mcycles" % (weight_ver, limit_cost)
-print "Hash parameters: %d-bit blocks with %d-bit padding and %d-bit labels, %.2f cycles per byte" \
-      % (L_block, L_pad, L_label, cycles_per_byte)
-print "PRF output size: %d bits" % (L_prf,)
-print "Security level given by L_hash is maintained for up to 2^%d signatures.\n" % (lg_M,)
+print("Maximum signature size: %d bytes" % (limit_bytes,))
+print("Maximum (signing + %d*verification) cost: %.1f Mcycles" % (weight_ver, limit_cost))
+print("Hash parameters: %d-bit blocks with %d-bit padding and %d-bit labels, %.2f cycles per byte" \
+      % (L_block, L_pad, L_label, cycles_per_byte))
+print("PRF output size: %d bits" % (L_prf,))
+print("Security level given by L_hash is maintained for up to 2^%d signatures.\n" % (lg_M,))
 
 search()
index 74c603a6666a5a2c86787cac1b4b54d4741b9cd6..1524988513a906a50757a4aca6a266ea65d1fcd5 100644 (file)
@@ -2,6 +2,7 @@
 
 # used to discuss ticket #302: "stop permuting peerlist?"
 
+from __future__ import print_function
 # import time
 import math
 from hashlib import md5  # sha1, sha256
@@ -47,7 +48,7 @@ def make_up_a_file_size(seed):
 
 sizes = [make_up_a_file_size(str(i)) for i in range(10000)]
 avg_filesize = sum(sizes)/len(sizes)
-print "average file size:", abbreviate_space(avg_filesize)
+print("average file size:", abbreviate_space(avg_filesize))
 
 SERVER_CAPACITY = 10**12
 
@@ -94,11 +95,11 @@ class Ring:
             prev_s = self.servers[(i-1)%len(self.servers)]
             diff = "%032x" % (int(s.nodeid,16) - int(prev_s.nodeid,16))
             s.prev_diff = diff
-            print s, s.prev_diff
+            print(s, s.prev_diff)
 
-        print "sorted by delta"
+        print("sorted by delta")
         for s in sorted(self.servers, key=lambda s:s.prev_diff):
-            print s, s.prev_diff
+            print(s, s.prev_diff)
 
     def servers_for_si(self, si):
         if self.permute:
@@ -121,7 +122,7 @@ class Ring:
         return "".join(bits)
 
     def dump_usage(self, numfiles, avg_space_per_file):
-        print "uploaded", numfiles
+        print("uploaded", numfiles)
         # avg_space_per_file measures expected grid-wide ciphertext per file
         used = list(reversed(sorted([s.used for s in self.servers])))
         # used is actual per-server ciphertext
@@ -137,19 +138,19 @@ class Ring:
         std_deviation = math.sqrt(variance)
         sd_of_total = std_deviation / avg_usage_per_file
 
-        print "min/max/(exp) usage-pf-ps %s/%s/(%s):" % (
+        print("min/max/(exp) usage-pf-ps %s/%s/(%s):" % (
             abbreviate_space(usedpf[-1]),
             abbreviate_space(usedpf[0]),
-            abbreviate_space(avg_usage_per_file) ),
-        print "spread-pf: %s (%.2f%%)" % (
-            abbreviate_space(spreadpf), 100.0*spreadpf/avg_usage_per_file),
+            abbreviate_space(avg_usage_per_file) ), end=' ')
+        print("spread-pf: %s (%.2f%%)" % (
+            abbreviate_space(spreadpf), 100.0*spreadpf/avg_usage_per_file), end=' ')
         #print "average_usage:", abbreviate_space(average_usagepf)
-        print "stddev: %s (%.2f%%)" % (abbreviate_space(std_deviation),
-                                       100.0*sd_of_total)
+        print("stddev: %s (%.2f%%)" % (abbreviate_space(std_deviation),
+                                       100.0*sd_of_total))
         if self.SHOW_MINMAX:
             s2 = sorted(self.servers, key=lambda s: s.used)
-            print "least:", s2[0].nodeid
-            print "most:", s2[-1].nodeid
+            print("least:", s2[0].nodeid)
+            print("most:", s2[-1].nodeid)
 
 
 class Options(usage.Options):
@@ -196,7 +197,7 @@ def do_run(ring, opts):
                 server_was_full = True
                 remaining_servers.discard(s)
                 if not remaining_servers:
-                    print "-- GRID IS FULL"
+                    print("-- GRID IS FULL")
                     ring.dump_usage(filenum, avg_space_per_file)
                     return filenum
                 index += 1
@@ -207,11 +208,11 @@ def do_run(ring, opts):
 
         if server_was_full and all_servers_have_room:
             all_servers_have_room = False
-            print "-- FIRST SERVER FULL"
+            print("-- FIRST SERVER FULL")
             ring.dump_usage(filenum, avg_space_per_file)
         if file_was_wrapped and no_files_have_wrapped:
             no_files_have_wrapped = False
-            print "-- FIRST FILE WRAPPED"
+            print("-- FIRST FILE WRAPPED")
             ring.dump_usage(filenum, avg_space_per_file)
 
 
@@ -219,11 +220,11 @@ def do_ring(opts):
     total_capacity = opts["servers"]*SERVER_CAPACITY
     avg_space_per_file = avg_filesize * opts["N"] / opts["k"]
     avg_files = total_capacity / avg_space_per_file
-    print "expected number of uploads:", avg_files
+    print("expected number of uploads:", avg_files)
     if opts["permute"]:
-        print " PERMUTED"
+        print(" PERMUTED")
     else:
-        print " LINEAR"
+        print(" LINEAR")
     seed = opts["seed"]
 
     ring = Ring(opts["servers"], seed, opts["permute"])
index 50ff45b9cf4eeda944dfa66ac365e3a4e2c98f62..bae8ad39d752562aa9b05bca7c557eaea3209718 100644 (file)
@@ -2,6 +2,7 @@
 
 # WARNING. There is a bug in this script so that it does not simulate the actual Tahoe Two server selection algorithm that it was intended to simulate. See http://allmydata.org/trac/tahoe-lafs/ticket/302 (stop permuting peerlist, use SI as offset into ring instead?)
 
+from __future__ import print_function
 import random
 
 SERVER_CAPACITY = 10**12
@@ -83,7 +84,7 @@ def test(permutedpeerlist, iters):
     filledat = []
     for test in range(iters):
         (servers, doubled_up_shares) = go(permutedpeerlist)
-        print "doubled_up_shares: ", doubled_up_shares
+        print("doubled_up_shares: ", doubled_up_shares)
         for server in servers:
             fidx = server.full_at_tick
             filledat.extend([0]*(fidx-len(filledat)+1))
@@ -147,8 +148,8 @@ if __name__ == "__main__":
         if arg.startswith("--iters="):
             iters = int(arg[8:])
     if "--permute" in sys.argv:
-        print "doing permuted peerlist, iterations: %d" % iters
+        print("doing permuted peerlist, iterations: %d" % iters)
         test(True, iters)
     else:
-        print "doing simple ring, iterations: %d" % iters
+        print("doing simple ring, iterations: %d" % iters)
         test(False, iters)
index 972c95827c05077288fe2e9603ebc878202ce36a..4d3e3abe9834849dd861bacd9f6e75e3f24e419a 100644 (file)
@@ -1,5 +1,6 @@
 #! /usr/bin/env python
 
+from __future__ import print_function
 import sha as shamodule
 import os, random
 
@@ -205,7 +206,7 @@ class Simulator:
         size = random.randrange(1000)
         n = random.choice(self.all_nodes)
         if self.verbose:
-            print "add_file(size=%d, from node %s)" % (size, n)
+            print("add_file(size=%d, from node %s)" % (size, n))
         fileid = randomid()
         able = n.publish_file(fileid, size)
         if able:
@@ -226,7 +227,7 @@ class Simulator:
             if n.delete_file():
                 self.deleted_files += 1
                 return
-        print "no files to delete"
+        print("no files to delete")
 
     def _add_event(self, etype):
         rate = getattr(self, "RATE_" + etype)
@@ -259,14 +260,14 @@ class Simulator:
         # self.print_stats(current_time, etype)
 
     def print_stats_header(self):
-        print "time:  added   failed   lost  avg_tried"
+        print("time:  added   failed   lost  avg_tried")
 
     def print_stats(self, time, etype):
         if not self.published_files:
             avg_tried = "NONE"
         else:
             avg_tried = sum(self.published_files) / len(self.published_files)
-        print time, etype, self.added_data, self.failed_files, self.lost_data_bytes, avg_tried, len(self.introducer.living_files), self.introducer.utilization
+        print(time, etype, self.added_data, self.failed_files, self.lost_data_bytes, avg_tried, len(self.introducer.living_files), self.introducer.utilization)
 
 global s
 s = None
@@ -282,7 +283,7 @@ def main():
     # s.print_stats_header()
     for i in range(1000):
         s.do_event()
-    print "%d files added, %d files deleted" % (s.added_files, s.deleted_files)
+    print("%d files added, %d files deleted" % (s.added_files, s.deleted_files))
     return s
 
 if __name__ == '__main__':
index b61eb152a2556cd9946b4fc6e3c2b10c731a532d..3cfbfafd021827a5fe6994f5593e0710435922ae 100644 (file)
@@ -1,5 +1,6 @@
 #! /usr/bin/env python
 
+from __future__ import print_function
 import random, math, re
 from twisted.python import usage
 
@@ -126,7 +127,7 @@ class Sizes:
                   "share_storage_overhead", "share_transmission_overhead",
                   "storage_overhead", "storage_overhead_percentage",
                   "bytes_until_some_data"):
-            print k, getattr(self, k)
+            print(k, getattr(self, k))
 
 def fmt(num, trim=False):
     if num < KiB:
@@ -160,11 +161,11 @@ def text():
     mode = opts["mode"]
     arity = opts["arity"]
     #      0123456789012345678901234567890123456789012345678901234567890123456
-    print "mode=%s" % mode, " arity=%d" % arity
-    print "                    storage    storage"
-    print "Size     sharesize  overhead   overhead     k  d  alacrity"
-    print "                    (bytes)      (%)"
-    print "-------  -------    --------   --------  ---- --  --------"
+    print("mode=%s" % mode, " arity=%d" % arity)
+    print("                    storage    storage")
+    print("Size     sharesize  overhead   overhead     k  d  alacrity")
+    print("                    (bytes)      (%)")
+    print("-------  -------    --------   --------  ---- --  --------")
     #sizes = [2 ** i for i in range(7, 41)]
     radix = math.sqrt(10); expstep = 2
     radix = 2; expstep = 2
@@ -181,7 +182,7 @@ def text():
         out += " %4d" % int(s.block_arity)
         out += " %2d" % int(s.block_tree_depth)
         out += " %8s" % fmt(s.bytes_until_some_data)
-        print out
+        print(out)
 
 
 def graph():
index a294b8d07cc6c3175d8718ef1ce2def680a9cb37..d9069d77d0905156090d539899b729912d30f767 100644 (file)
@@ -1,5 +1,6 @@
 #!/usr/bin/env python
 
+from __future__ import print_function
 import sys, math
 from allmydata import uri, storage
 from allmydata.immutable import upload
@@ -64,18 +65,18 @@ def calc(filesize, params=(3,7,10), segsize=DEFAULT_MAX_SEGMENT_SIZE):
 def main():
     filesize = int(sys.argv[1])
     urisize, sharesize, sharespace = calc(filesize)
-    print "urisize:", urisize
-    print "sharesize:  %10d" % sharesize
-    print "sharespace: %10d" % sharespace
-    print "desired expansion: %1.1f" % (1.0 * 10 / 3)
-    print "effective expansion: %1.1f" % (1.0 * sharespace / filesize)
+    print("urisize:", urisize)
+    print("sharesize:  %10d" % sharesize)
+    print("sharespace: %10d" % sharespace)
+    print("desired expansion: %1.1f" % (1.0 * 10 / 3))
+    print("effective expansion: %1.1f" % (1.0 * sharespace / filesize))
 
 def chart():
     filesize = 2
     while filesize < 2**20:
         urisize, sharesize, sharespace = calc(int(filesize))
         expansion = 1.0 * sharespace / int(filesize)
-        print "%d,%d,%d,%1.2f" % (int(filesize), urisize, sharespace, expansion)
+        print("%d,%d,%d,%1.2f" % (int(filesize), urisize, sharespace, expansion))
         filesize  = filesize * 2**0.5
 
 if __name__ == '__main__':
index fe52383b8ce01047c87ce8d84d6317aba03a7828..4257e686390f93597cba92900bb1d9ca95555acb 100644 (file)
--- a/setup.py
+++ b/setup.py
@@ -1,6 +1,5 @@
 #! /usr/bin/env python
 # -*- coding: utf-8 -*-
-import sys; assert sys.version_info < (3,), ur"Tahoe-LAFS does not run under Python 3. Please use a version of Python between 2.6 and 2.7.x inclusive."
 
 # Tahoe-LAFS -- secure, distributed storage grid
 #
@@ -10,7 +9,7 @@ import sys; assert sys.version_info < (3,), ur"Tahoe-LAFS does not run under Pyt
 #
 # See the docs/about.rst file for licensing information.
 
-import glob, os, stat, subprocess, re
+import glob, os, stat, subprocess, re, sys
 
 ##### sys.path management
 
@@ -61,7 +60,7 @@ else:
 # the _auto_deps.install_requires list, which is used in the call to setup()
 # below.
 adglobals = {}
-execfile('src/allmydata/_auto_deps.py', adglobals)
+exec(compile(open('src/allmydata/_auto_deps.py').read(), 'src/allmydata/_auto_deps.py', 'exec'), adglobals)
 install_requires = adglobals['install_requires']
 
 if len(sys.argv) > 1 and sys.argv[1] == '--fakedependency':
index 4bafed474fb19e56e85efc44292cb937eb394295..8e2e120e50f0b29ce9e80007738a0f3a96cd9053 100644 (file)
@@ -146,15 +146,18 @@ from allmydata.util import verlib
 def normalized_version(verstr, what=None):
     try:
         return verlib.NormalizedVersion(verlib.suggest_normalized_version(verstr))
-    except (StandardError, verlib.IrrationalVersionError):
+    except (Exception, verlib.IrrationalVersionError):
         cls, value, trace = sys.exc_info()
-        raise PackagingError, ("could not parse %s due to %s: %s"
-                               % (what or repr(verstr), cls.__name__, value)), trace
+        msg = "could not parse %s due to %s: %s" % (what or repr(verstr), cls.__name__, value)
+        if sys.version_info[0] >= 3:
+            raise PackagingError(msg).with_traceback(trace)
+        else:
+            exec("raise c, v, t", {"c": PackagingError, "v": msg, "t": trace})
 
 
 def get_package_versions_and_locations():
     import warnings
-    from _auto_deps import package_imports, global_deprecation_messages, deprecation_messages, \
+    from ._auto_deps import package_imports, global_deprecation_messages, deprecation_messages, \
         user_warning_messages, runtime_warning_messages, warning_imports
 
     def package_dir(srcfile):
@@ -269,7 +272,7 @@ def cross_check_pkg_resources_versus_import():
     """This function returns a list of errors due to any failed cross-checks."""
 
     import pkg_resources
-    from _auto_deps import install_requires
+    from ._auto_deps import install_requires
 
     pkg_resources_vers_and_locs = dict([(p.project_name.lower(), (str(p.version), p.location))
                                         for p in pkg_resources.require(install_requires)])
@@ -311,7 +314,7 @@ def cross_check(pkg_resources_vers_and_locs, imported_vers_and_locs_list):
 
             try:
                 pr_normver = normalized_version(pr_ver)
-            except Exception, e:
+            except Exception as e:
                 errors.append("Warning: version number %r found for dependency %r by pkg_resources could not be parsed. "
                               "The version found by import was %r from %r. "
                               "pkg_resources thought it should be found at %r. "
@@ -326,7 +329,7 @@ def cross_check(pkg_resources_vers_and_locs, imported_vers_and_locs_list):
                 else:
                     try:
                         imp_normver = normalized_version(imp_ver)
-                    except Exception, e:
+                    except Exception as e:
                         errors.append("Warning: version number %r found for dependency %r (imported from %r) could not be parsed. "
                                       "pkg_resources thought it should be version %r at %r. "
                                       "The exception was %s: %s"
@@ -339,7 +342,7 @@ def cross_check(pkg_resources_vers_and_locs, imported_vers_and_locs_list):
                                               % (name, pr_ver, str(pr_normver), pr_loc, imp_ver, str(imp_normver), imp_loc))
 
     imported_packages = set([p.lower() for (p, _) in imported_vers_and_locs_list])
-    for pr_name, (pr_ver, pr_loc) in pkg_resources_vers_and_locs.iteritems():
+    for pr_name, (pr_ver, pr_loc) in pkg_resources_vers_and_locs.items():
         if pr_name not in imported_packages and pr_name not in ignorable:
             errors.append("Warning: dependency %r (version %r) found by pkg_resources not found by import."
                           % (pr_name, pr_ver))
@@ -373,7 +376,7 @@ def check_all_requirements():
     # (On Python 3, we'll have failed long before this point.)
     if sys.version_info < (2, 6):
         try:
-            version_string = ".".join(map(str, sys.version_info))
+            version_string = ".".join([str(v) for v in sys.version_info])
         except Exception:
             version_string = repr(sys.version_info)
         errors.append("Tahoe-LAFS currently requires Python v2.6 or greater (but less than v3), not %s"
@@ -383,7 +386,7 @@ def check_all_requirements():
     for requirement in install_requires:
         try:
             check_requirement(requirement, vers_and_locs)
-        except (ImportError, PackagingError), e:
+        except (ImportError, PackagingError) as e:
             errors.append("%s: %s" % (e.__class__.__name__, e))
 
     if errors:
index b7be32128e6f4f44e85231df769ded65fc838d30..4e2cd133f4f81277bd355dd5a934933156b7dd44 100644 (file)
@@ -64,6 +64,8 @@ install_requires = [
     # pycryptopp-0.6.0 includes ed25519
     "pycryptopp >= 0.6.0",
 
+    "six >= 1.3.0",
+
     # Will be needed to test web apps, but not yet. See #1001.
     #"windmill >= 1.3",
 ]
@@ -85,6 +87,7 @@ package_imports = [
     ('pycrypto',        'Crypto'),
     ('pyasn1',          'pyasn1'),
     ('mock',            'mock'),
+    ('six',             'six'),
 ]
 
 def require_more():
index 9652c7025f1d7b2af4abb8413f56a41454ca9ceb..fd9bf33270fa4b2ab439cce70b568222bd40273a 100644 (file)
@@ -42,7 +42,7 @@ class Blacklist:
                     si = base32.a2b(si_s) # must be valid base32
                     self.entries[si] = reason
                 self.last_mtime = current_mtime
-        except Exception, e:
+        except Exception as e:
             twisted_log.err(e, "unparseable blacklist file")
             raise
 
index 217a7b47448f145153095f87611c61483032480d..0b163a8f36e16e8ee52c13573f9e95068665ecab 100644 (file)
@@ -79,7 +79,8 @@ class KeyGenerator:
         keysize = keysize or self.default_keysize
         if self._remote:
             d = self._remote.callRemote('get_rsa_key_pair', keysize)
-            def make_key_objs((verifying_key, signing_key)):
+            def make_key_objs(xxx_todo_changeme):
+                (verifying_key, signing_key) = xxx_todo_changeme
                 v = rsa.create_verifying_key_from_string(verifying_key)
                 s = rsa.create_signing_key_from_string(signing_key)
                 return v, s
@@ -489,7 +490,7 @@ class Client(node.Node, pollmixin.PollMixin):
                 s = drop_upload.DropUploader(self, upload_dircap, local_dir_utf8)
                 s.setServiceParent(self)
                 s.startService()
-            except Exception, e:
+            except Exception as e:
                 self.log("couldn't start drop-uploader: %r", args=(e,))
 
     def _check_hotline(self, hotline_file):
index 25e600227702c4268c291a368f26a54b0c79841e..07ac2d75542df749481321e9d63703a35fbbb9f3 100644 (file)
@@ -37,7 +37,7 @@ class CRSEncoder(object):
         precondition(desired_share_ids is None or len(desired_share_ids) <= self.max_shares, desired_share_ids, self.max_shares)
 
         if desired_share_ids is None:
-            desired_share_ids = range(self.max_shares)
+            desired_share_ids = list(range(self.max_shares))
 
         for inshare in inshares:
             assert len(inshare) == self.share_size, (len(inshare), self.share_size, self.data_size, self.required_shares)
index 15ab382d6ecad26b4ca93564ec5b267efe32d6ee..bcde6f2f0eee620b8c67e1e8df7181bb738395d9 100644 (file)
@@ -10,6 +10,7 @@ from allmydata.util import fileutil, mathutil
 from allmydata.immutable import upload
 from allmydata.mutable.publish import MutableData
 from twisted.python import log
+import six
 
 def get_memory_usage():
     # this is obviously linux-specific
@@ -116,7 +117,7 @@ class ControlServer(Referenceable, service.Service):
         d.addCallback(self._do_one_ping, everyone_left, results)
         def _average(res):
             averaged = {}
-            for server_name,times in results.iteritems():
+            for server_name,times in six.iteritems(results):
                 averaged[server_name] = sum(times) / len(times)
             return averaged
         d.addCallback(_average)
index 5fddec41a1a723084fbe2c82b216a4b2680028e5..86e2f7c8cd7812e44d4f28255d7ea9f278376731 100644 (file)
@@ -23,6 +23,7 @@ from allmydata.util.consumer import download_to_data
 from allmydata.uri import LiteralFileURI, from_string, wrap_dirnode_cap
 from pycryptopp.cipher.aes import AES
 from allmydata.util.dictutil import AuxValueDict
+import six
 
 
 def update_metadata(metadata, new_metadata, now):
@@ -147,7 +148,7 @@ class Adder:
     def modify(self, old_contents, servermap, first_time):
         children = self.node._unpack_contents(old_contents)
         now = time.time()
-        for (namex, (child, new_metadata)) in self.entries.iteritems():
+        for (namex, (child, new_metadata)) in six.iteritems(self.entries):
             name = normalize(namex)
             precondition(IFilesystemNode.providedBy(child), child)
 
@@ -189,7 +190,7 @@ def _encrypt_rw_uri(writekey, rw_uri):
 def pack_children(childrenx, writekey, deep_immutable=False):
     # initial_children must have metadata (i.e. {} instead of None)
     children = {}
-    for (namex, (node, metadata)) in childrenx.iteritems():
+    for (namex, (node, metadata)) in six.iteritems(childrenx):
         precondition(isinstance(metadata, dict),
                      "directory creation requires metadata to be a dict, not None", metadata)
         children[normalize(namex)] = (node, metadata)
@@ -366,7 +367,7 @@ class DirectoryNode:
                     log.msg(format="mutable cap for child %(name)s unpacked from an immutable directory",
                                    name=quote_output(name, encoding='utf-8'),
                                    facility="tahoe.webish", level=log.UNUSUAL)
-            except CapConstraintError, e:
+            except CapConstraintError as e:
                 log.msg(format="unmet constraint on cap for child %(name)s unpacked from a directory:\n"
                                "%(message)s", message=e.args[0], name=quote_output(name, encoding='utf-8'),
                                facility="tahoe.webish", level=log.UNUSUAL)
@@ -436,7 +437,7 @@ class DirectoryNode:
         exists a child of the given name, False if not."""
         name = normalize(namex)
         d = self._read()
-        d.addCallback(lambda children: children.has_key(name))
+        d.addCallback(lambda children: name in children)
         return d
 
     def _get(self, children, name):
@@ -496,7 +497,7 @@ class DirectoryNode:
         path-name elements.
         """
         d = self.get_child_and_metadata_at_path(pathx)
-        d.addCallback(lambda (node, metadata): node)
+        d.addCallback(lambda node_metadata: node_metadata[0])
         return d
 
     def get_child_and_metadata_at_path(self, pathx):
@@ -537,7 +538,7 @@ class DirectoryNode:
         # this takes URIs
         a = Adder(self, overwrite=overwrite,
                   create_readonly_node=self._create_readonly_node)
-        for (namex, e) in entries.iteritems():
+        for (namex, e) in six.iteritems(entries):
             assert isinstance(namex, unicode), namex
             if len(e) == 2:
                 writecap, readcap = e
@@ -669,7 +670,8 @@ class DirectoryNode:
             return defer.succeed("redundant rename/relink")
 
         d = self.get_child_and_metadata(current_child_name)
-        def _got_child( (child, metadata) ):
+        def _got_child(xxx_todo_changeme ):
+            (child, metadata) = xxx_todo_changeme
             return new_parent.set_node(new_child_name, child, metadata,
                                        overwrite=overwrite)
         d.addCallback(_got_child)
@@ -742,7 +744,7 @@ class DirectoryNode:
         # in the nodecache) seem to consume about 2000 bytes.
         dirkids = []
         filekids = []
-        for name, (child, metadata) in sorted(children.iteritems()):
+        for name, (child, metadata) in sorted(six.iteritems(children)):
             childpath = path + [name]
             if isinstance(child, UnknownNode):
                 walker.add_node(child, childpath)
index 82ef1c658a54a1485db94451566d4f793c65d124..e33bab2c912a904e3e2a90d88ed3eb1dcba425d3 100644 (file)
@@ -4,6 +4,7 @@ from twisted.web.client import getPage
 from twisted.internet import defer
 from twisted.cred import error, checkers, credentials
 from allmydata.util import base32
+import six
 
 class NeedRootcapLookupScheme(Exception):
     """Accountname+Password-based access schemes require some kind of
@@ -72,7 +73,7 @@ class AccountURLChecker:
                   "email": username,
                   "passwd": password,
                   }
-        for name, value in fields.iteritems():
+        for name, value in six.iteritems(fields):
             form.append('Content-Disposition: form-data; name="%s"' % name)
             form.append('')
             assert isinstance(value, str)
index 4ccb091919536f241c9e113b52898233aa52fbe8..826aac66f23e328e1da8e587b319c88f299c8d9a 100644 (file)
@@ -10,6 +10,7 @@ from allmydata.interfaces import IDirectoryNode, ExistingChildError, \
      NoSuchChildError
 from allmydata.immutable.upload import FileHandle
 from allmydata.util.fileutil import EncryptedTemporaryFile
+import six
 
 class ReadFile:
     implements(ftp.IReadFile)
@@ -68,8 +69,8 @@ class Handler:
 
     def makeDirectory(self, path):
         d = self._get_root(path)
-        d.addCallback(lambda (root,path):
-                      self._get_or_create_directories(root, path))
+        d.addCallback(lambda root_path:
+                      self._get_or_create_directories(root_path[0], root_path[1]))
         return d
 
     def _get_or_create_directories(self, node, path):
@@ -95,7 +96,8 @@ class Handler:
             raise NoParentError
         childname = path[-1]
         d = self._get_root(path)
-        def _got_root((root, path)):
+        def _got_root(xxx_todo_changeme):
+            (root, path) = xxx_todo_changeme
             if not path:
                 raise NoParentError
             return root.get_child_at_path(path[:-1])
@@ -111,7 +113,8 @@ class Handler:
             f.trap(NoParentError)
             raise ftp.PermissionDeniedError("cannot delete root directory")
         d.addErrback(_convert_error)
-        def _got_parent( (parent, childname) ):
+        def _got_parent(xxx_todo_changeme1 ):
+            (parent, childname) = xxx_todo_changeme1
             d = parent.get(childname)
             def _got_child(child):
                 if must_be_directory and not IDirectoryNode.providedBy(child):
@@ -134,11 +137,12 @@ class Handler:
     def rename(self, fromPath, toPath):
         # the target directory must already exist
         d = self._get_parent(fromPath)
-        def _got_from_parent( (fromparent, childname) ):
+        def _got_from_parent(xxx_todo_changeme2 ):
+            (fromparent, childname) = xxx_todo_changeme2
             d = self._get_parent(toPath)
-            d.addCallback(lambda (toparent, tochildname):
+            d.addCallback(lambda toparent_tochildname:
                           fromparent.move_child_to(childname,
-                                                   toparent, tochildname,
+                                                   toparent_tochildname[0], toparent_tochildname[1],
                                                    overwrite=False))
             return d
         d.addCallback(_got_from_parent)
@@ -177,7 +181,8 @@ class Handler:
 
     def _get_node_and_metadata_for_path(self, path):
         d = self._get_root(path)
-        def _got_root((root,path)):
+        def _got_root(xxx_todo_changeme3):
+            (root,path) = xxx_todo_changeme3
             if path:
                 return root.get_child_and_metadata_at_path(path)
             else:
@@ -185,7 +190,8 @@ class Handler:
         d.addCallback(_got_root)
         return d
 
-    def _populate_row(self, keys, (childnode, metadata)):
+    def _populate_row(self, keys, xxx_todo_changeme7):
+        (childnode, metadata) = xxx_todo_changeme7
         values = []
         isdir = bool(IDirectoryNode.providedBy(childnode))
         for key in keys:
@@ -197,7 +203,7 @@ class Handler:
             elif key == "directory":
                 value = isdir
             elif key == "permissions":
-                value = 0600
+                value = 0o600
             elif key == "hardlinks":
                 value = 1
             elif key == "modified":
@@ -218,7 +224,8 @@ class Handler:
     def stat(self, path, keys=()):
         # for files only, I think
         d = self._get_node_and_metadata_for_path(path)
-        def _render((node,metadata)):
+        def _render(xxx_todo_changeme4):
+            (node,metadata) = xxx_todo_changeme4
             assert not IDirectoryNode.providedBy(node)
             return self._populate_row(keys, (node,metadata))
         d.addCallback(_render)
@@ -229,14 +236,15 @@ class Handler:
         # the interface claims that path is a list of unicodes, but in
         # practice it is not
         d = self._get_node_and_metadata_for_path(path)
-        def _list((node, metadata)):
+        def _list(xxx_todo_changeme5):
+            (node, metadata) = xxx_todo_changeme5
             if IDirectoryNode.providedBy(node):
                 return node.list()
             return { path[-1]: (node, metadata) } # need last-edge metadata
         d.addCallback(_list)
         def _render(children):
             results = []
-            for (name, childnode) in children.iteritems():
+            for (name, childnode) in six.iteritems(children):
                 # the interface claims that the result should have a unicode
                 # object as the name, but it fails unless you give it a
                 # bytestring
@@ -249,7 +257,7 @@ class Handler:
 
     def openForReading(self, path):
         d = self._get_node_and_metadata_for_path(path)
-        d.addCallback(lambda (node,metadata): ReadFile(node))
+        d.addCallback(lambda node_metadata: ReadFile(node_metadata[0]))
         d.addErrback(self._convert_error)
         return d
 
@@ -259,7 +267,8 @@ class Handler:
             raise ftp.PermissionDeniedError("cannot STOR to root directory")
         childname = path[-1]
         d = self._get_root(path)
-        def _got_root((root, path)):
+        def _got_root(xxx_todo_changeme6):
+            (root, path) = xxx_todo_changeme6
             if not path:
                 raise ftp.PermissionDeniedError("cannot STOR to root directory")
             return root.get_child_at_path(path[:-1])
index f6e11cedf0022e64ba8f9c8120acac1e48782ddc..1ce0383b6d4c63c4306f2732b9800406a8af4618 100644 (file)
@@ -1,3 +1,4 @@
+from __future__ import print_function
 
 import heapq, traceback, array, stat, struct
 from types import NoneType
@@ -35,6 +36,7 @@ from allmydata.mutable.publish import MutableFileHandle
 from allmydata.immutable.upload import FileHandle
 from allmydata.dirnode import update_metadata
 from allmydata.util.fileutil import EncryptedTemporaryFile
+import six
 
 noisy = True
 use_foolscap_logging = True
@@ -46,14 +48,14 @@ if use_foolscap_logging:
     (logmsg, logerr, PrefixingLogMixin) = (_msg, _err, _PrefixingLogMixin)
 else:  # pragma: no cover
     def logmsg(s, level=None):
-        print s
+        print(s)
     def logerr(s, level=None):
-        print s
+        print(s)
     class PrefixingLogMixin:
         def __init__(self, facility=None, prefix=''):
             self.prefix = prefix
         def log(self, s, level=None):
-            print "%r %s" % (self.prefix, s)
+            print("%r %s" % (self.prefix, s))
 
 
 def eventually_callback(d):
@@ -75,7 +77,7 @@ def _to_sftp_time(t):
     """SFTP times are unsigned 32-bit integers representing UTC seconds
     (ignoring leap seconds) since the Unix epoch, January 1 1970 00:00 UTC.
     A Tahoe time is the corresponding float."""
-    return long(t) & 0xFFFFFFFFL
+    return long(t) & 0xFFFFFFFF
 
 
 def _convert_error(res, request):
@@ -228,7 +230,7 @@ def _populate_attrs(childnode, metadata, size=None):
     if childnode and childnode.is_unknown():
         perms = 0
     elif childnode and IDirectoryNode.providedBy(childnode):
-        perms = S_IFDIR | 0777
+        perms = S_IFDIR | 0o777
     else:
         # For files, omit the size if we don't immediately know it.
         if childnode and size is None:
@@ -236,11 +238,11 @@ def _populate_attrs(childnode, metadata, size=None):
         if size is not None:
             _assert(isinstance(size, (int, long)) and not isinstance(size, bool), size=size)
             attrs['size'] = size
-        perms = S_IFREG | 0666
+        perms = S_IFREG | 0o666
 
     if metadata:
         if metadata.get('no-write', False):
-            perms &= S_IFDIR | S_IFREG | 0555  # clear 'w' bits
+            perms &= S_IFDIR | S_IFREG | 0o555  # clear 'w' bits
 
         # See webapi.txt for what these times mean.
         # We would prefer to omit atime, but SFTP version 3 can only
@@ -550,7 +552,7 @@ class OverwriteableFileConsumer(PrefixingLogMixin):
             self.is_closed = True
             try:
                 self.f.close()
-            except Exception, e:
+            except Exception as e:
                 self.log("suppressed %r from close of temporary file %r" % (e, self.f), level=WEIRD)
         self.download_done("closed")
         return self.done_status
@@ -1023,7 +1025,7 @@ class SFTPUserHandler(ConchUser, PrefixingLogMixin):
     def logout(self):
         self.log(".logout()", level=OPERATIONAL)
 
-        for files in self._heisenfiles.itervalues():
+        for files in six.itervalues(self._heisenfiles):
             for f in files:
                 f.abandon()
 
@@ -1357,7 +1359,8 @@ class SFTPUserHandler(ConchUser, PrefixingLogMixin):
 
         d = delay or defer.succeed(None)
         d.addCallback(lambda ign: self._get_root(path))
-        def _got_root( (root, path) ):
+        def _got_root(xxx_todo_changeme2 ):
+            (root, path) = xxx_todo_changeme2
             if root.is_unknown():
                 raise SFTPError(FX_PERMISSION_DENIED,
                                 "cannot open an unknown cap (or child of an unknown object). "
@@ -1435,7 +1438,8 @@ class SFTPUserHandler(ConchUser, PrefixingLogMixin):
                         if noisy: self.log("%r.get_child_and_metadata(%r)" % (parent, childname), level=NOISY)
                         d3.addCallback(lambda ign: parent.get_child_and_metadata(childname))
 
-                    def _got_child( (filenode, current_metadata) ):
+                    def _got_child(xxx_todo_changeme ):
+                        (filenode, current_metadata) = xxx_todo_changeme
                         if noisy: self.log("_got_child( (%r, %r) )" % (filenode, current_metadata), level=NOISY)
 
                         metadata = update_metadata(current_metadata, desired_metadata, time())
@@ -1496,7 +1500,8 @@ class SFTPUserHandler(ConchUser, PrefixingLogMixin):
         # the target directory must already exist
         d = deferredutil.gatherResults([self._get_parent_or_node(from_path),
                                         self._get_parent_or_node(to_path)])
-        def _got( (from_pair, to_pair) ):
+        def _got(xxx_todo_changeme3 ):
+            (from_pair, to_pair) = xxx_todo_changeme3
             if noisy: self.log("_got( (%r, %r) ) in .renameFile(%r, %r, overwrite=%r)" %
                                (from_pair, to_pair, from_pathstring, to_pathstring, overwrite), level=NOISY)
             (from_parent, from_childname) = from_pair
@@ -1567,8 +1572,8 @@ class SFTPUserHandler(ConchUser, PrefixingLogMixin):
             return defer.execute(_denied)
 
         d = self._get_root(path)
-        d.addCallback(lambda (root, path):
-                      self._get_or_create_directories(root, path, metadata))
+        d.addCallback(lambda root_path:
+                      self._get_or_create_directories(root_path[0], root_path[1], metadata))
         d.addBoth(_convert_error, request)
         return d
 
@@ -1610,7 +1615,8 @@ class SFTPUserHandler(ConchUser, PrefixingLogMixin):
     def _remove_object(self, path, must_be_directory=False, must_be_file=False):
         userpath = self._path_to_utf8(path)
         d = self._get_parent_or_node(path)
-        def _got_parent( (parent, childname) ):
+        def _got_parent(xxx_todo_changeme4 ):
+            (parent, childname) = xxx_todo_changeme4
             if childname is None:
                 raise SFTPError(FX_NO_SUCH_FILE, "cannot remove an object specified by URI")
 
@@ -1632,7 +1638,8 @@ class SFTPUserHandler(ConchUser, PrefixingLogMixin):
 
         path = self._path_from_string(pathstring)
         d = self._get_parent_or_node(path)
-        def _got_parent_or_node( (parent_or_node, childname) ):
+        def _got_parent_or_node(xxx_todo_changeme5 ):
+            (parent_or_node, childname) = xxx_todo_changeme5
             if noisy: self.log("_got_parent_or_node( (%r, %r) ) in openDirectory(%r)" %
                                (parent_or_node, childname, pathstring), level=NOISY)
             if childname is None:
@@ -1653,7 +1660,7 @@ class SFTPUserHandler(ConchUser, PrefixingLogMixin):
             def _render(children):
                 parent_readonly = dirnode.is_readonly()
                 results = []
-                for filename, (child, metadata) in children.iteritems():
+                for filename, (child, metadata) in six.iteritems(children):
                     # The file size may be cached or absent.
                     metadata['no-write'] = _no_write(parent_readonly, child, metadata)
                     attrs = _populate_attrs(child, metadata)
@@ -1679,7 +1686,8 @@ class SFTPUserHandler(ConchUser, PrefixingLogMixin):
         path = self._path_from_string(pathstring)
         userpath = self._path_to_utf8(path)
         d = self._get_parent_or_node(path)
-        def _got_parent_or_node( (parent_or_node, childname) ):
+        def _got_parent_or_node(xxx_todo_changeme6 ):
+            (parent_or_node, childname) = xxx_todo_changeme6
             if noisy: self.log("_got_parent_or_node( (%r, %r) )" % (parent_or_node, childname), level=NOISY)
 
             # Some clients will incorrectly try to get the attributes
@@ -1699,7 +1707,8 @@ class SFTPUserHandler(ConchUser, PrefixingLogMixin):
             else:
                 parent = parent_or_node
                 d2.addCallback(lambda ign: parent.get_child_and_metadata_at_path([childname]))
-                def _got( (child, metadata) ):
+                def _got(xxx_todo_changeme1 ):
+                    (child, metadata) = xxx_todo_changeme1
                     if noisy: self.log("_got( (%r, %r) )" % (child, metadata), level=NOISY)
                     _assert(IDirectoryNode.providedBy(parent), parent=parent)
                     metadata['no-write'] = _no_write(parent.is_readonly(), child, metadata)
@@ -1737,7 +1746,8 @@ class SFTPUserHandler(ConchUser, PrefixingLogMixin):
         path = self._path_from_string(pathstring)
         userpath = self._path_to_utf8(path)
         d = self._get_parent_or_node(path)
-        def _got_parent_or_node( (parent_or_node, childname) ):
+        def _got_parent_or_node(xxx_todo_changeme7 ):
+            (parent_or_node, childname) = xxx_todo_changeme7
             if noisy: self.log("_got_parent_or_node( (%r, %r) )" % (parent_or_node, childname), level=NOISY)
 
             direntry = _direntry_for(parent_or_node, childname)
@@ -1882,7 +1892,8 @@ class SFTPUserHandler(ConchUser, PrefixingLogMixin):
     def _get_parent_or_node(self, path):
         # return Deferred (parent, childname) or (node, None)
         d = self._get_root(path)
-        def _got_root( (root, remaining_path) ):
+        def _got_root(xxx_todo_changeme8 ):
+            (root, remaining_path) = xxx_todo_changeme8
             if not remaining_path:
                 return (root, None)
             else:
index 9ecd2f064c82be3149cf135fd5666d3960143ff8..5be413322a5760cf2d6f3872d9be79fc698ca8c7 100644 (file)
@@ -1,6 +1,7 @@
 # -*- test-case-name: allmydata.test.test_hashtree -*-
 
 from allmydata.util import mathutil # from the pyutil library
+import six
 
 """
 Read and write chunks from files.
@@ -215,7 +216,7 @@ class HashTree(CompleteBinaryTreeMixin, list):
         while len(rows[-1]) != 1:
             last = rows[-1]
             rows += [[pair_hash(last[2*i], last[2*i+1])
-                                for i in xrange(len(last)//2)]]
+                                for i in range(len(last)//2)]]
         # Flatten the list of rows into a single list.
         rows.reverse()
         self[:] = sum(rows, [])
@@ -289,7 +290,7 @@ class IncompleteHashTree(CompleteBinaryTreeMixin, list):
         rows = [L]
         while len(rows[-1]) != 1:
             last = rows[-1]
-            rows += [[None for i in xrange(len(last)//2)]]
+            rows += [[None for i in range(len(last)//2)]]
         # Flatten the list of rows into a single list.
         rows.reverse()
         self[:] = sum(rows, [])
@@ -377,7 +378,7 @@ class IncompleteHashTree(CompleteBinaryTreeMixin, list):
         for h in leaves.values():
             assert isinstance(h, str)
         new_hashes = hashes.copy()
-        for leafnum,leafhash in leaves.iteritems():
+        for leafnum,leafhash in six.iteritems(leaves):
             hashnum = self.first_leaf_num + leafnum
             if hashnum in new_hashes:
                 if new_hashes[hashnum] != leafhash:
@@ -416,7 +417,7 @@ class IncompleteHashTree(CompleteBinaryTreeMixin, list):
 
             # first we provisionally add all hashes to the tree, comparing
             # any duplicates
-            for i,h in new_hashes.iteritems():
+            for i,h in six.iteritems(new_hashes):
                 if self[i]:
                     if self[i] != h:
                         raise BadHashError("new hash %s does not match "
@@ -430,7 +431,7 @@ class IncompleteHashTree(CompleteBinaryTreeMixin, list):
                     self[i] = h
                     remove_upon_failure.add(i)
 
-            for level in reversed(range(len(hashes_to_check))):
+            for level in reversed(list(range(len(hashes_to_check)))):
                 this_level = hashes_to_check[level]
                 while this_level:
                     i = this_level.pop()
index 41000f7e0d14df68ae0a5bf0faedefeb017867aa..0cb0c587df5dd034d646fb8cdc00629362540b0c 100644 (file)
@@ -116,7 +116,7 @@ class ValidatedExtendedURIProxy:
 
 
         # Next: things that are optional and not redundant: crypttext_hash
-        if d.has_key('crypttext_hash'):
+        if 'crypttext_hash' in d:
             self.crypttext_hash = d['crypttext_hash']
             if len(self.crypttext_hash) != CRYPTO_VAL_SIZE:
                 raise BadURIExtension('crypttext_hash is required to be hashutil.CRYPTO_VAL_SIZE bytes, not %s bytes' % (len(self.crypttext_hash),))
@@ -125,11 +125,11 @@ class ValidatedExtendedURIProxy:
         # Next: things that are optional, redundant, and required to be
         # consistent: codec_name, codec_params, tail_codec_params,
         # num_segments, size, needed_shares, total_shares
-        if d.has_key('codec_name'):
+        if 'codec_name' in d:
             if d['codec_name'] != "crs":
                 raise UnsupportedErasureCodec(d['codec_name'])
 
-        if d.has_key('codec_params'):
+        if 'codec_params' in d:
             ucpss, ucpns, ucpts = codec.parse_params(d['codec_params'])
             if ucpss != self.segment_size:
                 raise BadURIExtension("inconsistent erasure code params: "
@@ -144,7 +144,7 @@ class ValidatedExtendedURIProxy:
                                       "self._verifycap.total_shares: %s" %
                                       (ucpts, self._verifycap.total_shares))
 
-        if d.has_key('tail_codec_params'):
+        if 'tail_codec_params' in d:
             utcpss, utcpns, utcpts = codec.parse_params(d['tail_codec_params'])
             if utcpss != self.tail_segment_size:
                 raise BadURIExtension("inconsistent erasure code params: utcpss: %s != "
@@ -161,7 +161,7 @@ class ValidatedExtendedURIProxy:
                                       "self._verifycap.total_shares: %s" % (utcpts,
                                                                             self._verifycap.total_shares))
 
-        if d.has_key('num_segments'):
+        if 'num_segments' in d:
             if d['num_segments'] != self.num_segments:
                 raise BadURIExtension("inconsistent num_segments: size: %s, "
                                       "segment_size: %s, computed_num_segments: %s, "
@@ -169,18 +169,18 @@ class ValidatedExtendedURIProxy:
                                                                 self.segment_size,
                                                                 self.num_segments, d['num_segments']))
 
-        if d.has_key('size'):
+        if 'size' in d:
             if d['size'] != self._verifycap.size:
                 raise BadURIExtension("inconsistent size: URI size: %s, UEB size: %s" %
                                       (self._verifycap.size, d['size']))
 
-        if d.has_key('needed_shares'):
+        if 'needed_shares' in d:
             if d['needed_shares'] != self._verifycap.needed_shares:
                 raise BadURIExtension("inconsistent needed shares: URI needed shares: %s, UEB "
                                       "needed shares: %s" % (self._verifycap.total_shares,
                                                              d['needed_shares']))
 
-        if d.has_key('total_shares'):
+        if 'total_shares' in d:
             if d['total_shares'] != self._verifycap.total_shares:
                 raise BadURIExtension("inconsistent total shares: URI total shares: %s, UEB "
                                       "total shares: %s" % (self._verifycap.total_shares,
@@ -255,9 +255,9 @@ class ValidatedReadBucketProxy(log.PrefixingLogMixin):
             sharehashes = dict(sh)
             try:
                 self.share_hash_tree.set_hashes(sharehashes)
-            except IndexError, le:
+            except IndexError as le:
                 raise BadOrMissingHash(le)
-            except (hashtree.BadHashError, hashtree.NotEnoughHashesError), le:
+            except (hashtree.BadHashError, hashtree.NotEnoughHashesError) as le:
                 raise BadOrMissingHash(le)
         d.addCallback(_got_share_hashes)
         return d
@@ -288,9 +288,9 @@ class ValidatedReadBucketProxy(log.PrefixingLogMixin):
 
             try:
                 self.block_hash_tree.set_hashes(bh)
-            except IndexError, le:
+            except IndexError as le:
                 raise BadOrMissingHash(le)
-            except (hashtree.BadHashError, hashtree.NotEnoughHashesError), le:
+            except (hashtree.BadHashError, hashtree.NotEnoughHashesError) as le:
                 raise BadOrMissingHash(le)
         d.addCallback(_got_block_hashes)
         return d
@@ -315,9 +315,9 @@ class ValidatedReadBucketProxy(log.PrefixingLogMixin):
             ct_hashes = dict(enumerate(hashes))
             try:
                 crypttext_hash_tree.set_hashes(ct_hashes)
-            except IndexError, le:
+            except IndexError as le:
                 raise BadOrMissingHash(le)
-            except (hashtree.BadHashError, hashtree.NotEnoughHashesError), le:
+            except (hashtree.BadHashError, hashtree.NotEnoughHashesError) as le:
                 raise BadOrMissingHash(le)
         d.addCallback(_got_crypttext_hashes)
         return d
@@ -358,7 +358,7 @@ class ValidatedReadBucketProxy(log.PrefixingLogMixin):
         sharehashes, blockhashes, blockdata = results
         try:
             sharehashes = dict(sharehashes)
-        except ValueError, le:
+        except ValueError as le:
             le.args = tuple(le.args + (sharehashes,))
             raise
         blockhashes = dict(enumerate(blockhashes))
@@ -372,7 +372,7 @@ class ValidatedReadBucketProxy(log.PrefixingLogMixin):
                 # match the root node of self.share_hash_tree.
                 try:
                     self.share_hash_tree.set_hashes(sharehashes)
-                except IndexError, le:
+                except IndexError as le:
                     # Weird -- sharehashes contained index numbers outside of
                     # the range that fit into this hash tree.
                     raise BadOrMissingHash(le)
@@ -399,7 +399,7 @@ class ValidatedReadBucketProxy(log.PrefixingLogMixin):
             #        (self.sharenum, blocknum, len(blockdata),
             #         blockdata[:50], blockdata[-50:], base32.b2a(blockhash)))
 
-        except (hashtree.BadHashError, hashtree.NotEnoughHashesError), le:
+        except (hashtree.BadHashError, hashtree.NotEnoughHashesError) as le:
             # log.WEIRD: indicates undetected disk/network error, or more
             # likely a programming error
             self.log("hash failure in block=%d, shnum=%d on %s" %
index ae76da876a5b5df3ecf533239d6ce387a9e8f973..2f79292a4fa9529d76cecebdd0f2d92a23d05974 100644 (file)
@@ -4,7 +4,7 @@ from foolscap.api import eventually
 from allmydata.interfaces import NotEnoughSharesError, NoSharesError
 from allmydata.util import log
 from allmydata.util.dictutil import DictOfSets
-from common import OVERDUE, COMPLETE, CORRUPT, DEAD, BADSEGNUM, \
+from .common import OVERDUE, COMPLETE, CORRUPT, DEAD, BADSEGNUM, \
      BadSegmentNumberError
 
 class SegmentFetcher:
index 8bcdca76ff3522d0e564710ce81daf385d5a4ac7..8be450c65ecd4ebb9d15c4c564837ad5f3b9702c 100644 (file)
@@ -1,11 +1,12 @@
 
 import time
+import six
 now = time.time
 from foolscap.api import eventually
 from allmydata.util import base32, log
 from twisted.internet import reactor
 
-from share import Share, CommonShare
+from .share import Share, CommonShare
 
 def incidentally(res, f, *args, **kwargs):
     """Add me to a Deferred chain like this:
@@ -106,7 +107,7 @@ class ShareFinder:
         server = None
         try:
             if self._servers:
-                server = self._servers.next()
+                server = six.advance_iterator(self._servers)
         except StopIteration:
             self._servers = None
 
@@ -175,7 +176,7 @@ class ShareFinder:
                  shnums=shnums_s, name=server.get_name(),
                  level=log.NOISY, parent=lp, umid="0fcEZw")
         shares = []
-        for shnum, bucket in buckets.iteritems():
+        for shnum, bucket in six.iteritems(buckets):
             s = self._create_share(shnum, bucket, server, dyhb_rtt)
             shares.append(s)
         self._deliver_shares(shares)
index e852fd35e3d22a5f2015a3b7c57561b2f481024b..eb858da1eea45c76514a78e04ad5d03328370fe4 100644 (file)
@@ -1,5 +1,6 @@
 
 import time
+import six
 now = time.time
 from zope.interface import Interface
 from twisted.python.failure import Failure
@@ -13,10 +14,10 @@ from allmydata.hashtree import IncompleteHashTree, BadHashError, \
      NotEnoughHashesError
 
 # local imports
-from finder import ShareFinder
-from fetcher import SegmentFetcher
-from segmentation import Segmentation
-from common import BadCiphertextHashError
+from .finder import ShareFinder
+from .fetcher import SegmentFetcher
+from .segmentation import Segmentation
+from .common import BadCiphertextHashError
 
 class IDownloadStatusHandlingConsumer(Interface):
     def set_download_status_read_event(read_ev):
@@ -352,8 +353,8 @@ class DownloadNode:
 
         # each segment is turned into N blocks. All but the last are of size
         # block_size, and the last is of size tail_block_size
-        block_size = segment_size / k
-        tail_block_size = tail_segment_padded / k
+        block_size = segment_size // k
+        tail_block_size = tail_segment_padded // k
 
         return { "tail_segment_size": tail_segment_size,
                  "tail_segment_padded": tail_segment_padded,
@@ -454,7 +455,7 @@ class DownloadNode:
 
         shares = []
         shareids = []
-        for (shareid, share) in blocks.iteritems():
+        for (shareid, share) in six.iteritems(blocks):
             assert len(share) == block_size
             shareids.append(shareid)
             shares.append(share)
@@ -474,7 +475,8 @@ class DownloadNode:
         d.addCallback(_process)
         return d
 
-    def _check_ciphertext_hash(self, (segment, decodetime), segnum):
+    def _check_ciphertext_hash(self, xxx_todo_changeme, segnum):
+        (segment, decodetime) = xxx_todo_changeme
         start = now()
         assert self._active_segment.segnum == segnum
         assert self.segment_size is not None
index 07307c36705db0c353a445d1d775929d751b4135..f634ef0bca478ddd6e7552ec1622235121a31d4f 100644 (file)
@@ -9,7 +9,7 @@ from allmydata.util import log
 from allmydata.util.spans import overlap
 from allmydata.interfaces import DownloadStopped
 
-from common import BadSegmentNumberError, WrongSegmentError
+from .common import BadSegmentNumberError, WrongSegmentError
 
 class Segmentation:
     """I am responsible for a single offset+size read of the file. I handle
@@ -90,7 +90,8 @@ class Segmentation:
         self._cancel_segment_request = None
         return res
 
-    def _got_segment(self, (segment_start,segment,decodetime), wanted_segnum):
+    def _got_segment(self, xxx_todo_changeme, wanted_segnum):
+        (segment_start,segment,decodetime) = xxx_todo_changeme
         self._cancel_segment_request = None
         # we got file[segment_start:segment_start+len(segment)]
         # we want file[self._offset:self._offset+self._size]
index ae94af95eedd2a1e6dcfa3b4c9f19c27a2326c21..52781079910c81ee038fb87aac62a4714a9921c7 100644 (file)
@@ -13,7 +13,7 @@ from allmydata.hashtree import IncompleteHashTree, BadHashError, \
 
 from allmydata.immutable.layout import make_write_bucket_proxy
 from allmydata.util.observer import EventStreamObserver
-from common import COMPLETE, CORRUPT, DEAD, BADSEGNUM
+from .common import COMPLETE, CORRUPT, DEAD, BADSEGNUM
 
 
 class LayoutInvalid(Exception):
@@ -205,7 +205,7 @@ class Share:
                     level=log.NOISY, parent=self._lp, umid="BaL1zw")
             self._do_loop()
             # all exception cases call self._fail(), which clears self._alive
-        except (BadHashError, NotEnoughHashesError, LayoutInvalid), e:
+        except (BadHashError, NotEnoughHashesError, LayoutInvalid) as e:
             # Abandon this share. We do this if we see corruption in the
             # offset table, the UEB, or a hash tree. We don't abandon the
             # whole share if we see corruption in a data block (we abandon
@@ -222,7 +222,7 @@ class Share:
                     share=repr(self),
                     level=log.UNUSUAL, parent=self._lp, umid="gWspVw")
             self._fail(Failure(e), log.UNUSUAL)
-        except DataUnavailable, e:
+        except DataUnavailable as e:
             # Abandon this share.
             log.msg(format="need data that will never be available"
                     " from %s: pending=%s, received=%s, unavailable=%s" %
@@ -413,7 +413,7 @@ class Share:
         try:
             self._node.validate_and_store_UEB(UEB_s)
             return True
-        except (LayoutInvalid, BadHashError), e:
+        except (LayoutInvalid, BadHashError) as e:
             # TODO: if this UEB was bad, we'll keep trying to validate it
             # over and over again. Only log.err on the first one, or better
             # yet skip all but the first
@@ -449,7 +449,7 @@ class Share:
         try:
             self._node.process_share_hashes(share_hashes)
             # adds to self._node.share_hash_tree
-        except (BadHashError, NotEnoughHashesError), e:
+        except (BadHashError, NotEnoughHashesError) as e:
             f = Failure(e)
             self._signal_corruption(f, o["share_hashes"], hashlen)
             self.had_corruption = True
@@ -478,7 +478,7 @@ class Share:
         # cannot validate)
         try:
             self._commonshare.process_block_hashes(block_hashes)
-        except (BadHashError, NotEnoughHashesError), e:
+        except (BadHashError, NotEnoughHashesError) as e:
             f = Failure(e)
             hashnums = ",".join([str(n) for n in sorted(block_hashes.keys())])
             log.msg(format="hash failure in block_hashes=(%(hashnums)s),"
@@ -506,7 +506,7 @@ class Share:
         # gotten them all
         try:
             self._node.process_ciphertext_hashes(hashes)
-        except (BadHashError, NotEnoughHashesError), e:
+        except (BadHashError, NotEnoughHashesError) as e:
             f = Failure(e)
             hashnums = ",".join([str(n) for n in sorted(hashes.keys())])
             log.msg(format="hash failure in ciphertext_hashes=(%(hashnums)s),"
@@ -550,7 +550,7 @@ class Share:
             # now clear our received data, to dodge the #1170 spans.py
             # complexity bug
             self._received = DataSpans()
-        except (BadHashError, NotEnoughHashesError), e:
+        except (BadHashError, NotEnoughHashesError) as e:
             # rats, we have a corrupt block. Notify our clients that they
             # need to look elsewhere, and advise the server. Unlike
             # corruption in other parts of the share, this doesn't cause us
index e9174b6bc7fd5d68c7b6b5fefcea471221249589..d839a3ea5743ee615a8de8d49eaa637cc7e2ac37 100644 (file)
@@ -2,6 +2,7 @@
 import itertools
 from zope.interface import implements
 from allmydata.interfaces import IDownloadStatus
+import six
 
 class ReadEvent:
     def __init__(self, ev, ds):
@@ -73,7 +74,7 @@ class DownloadStatus:
     def __init__(self, storage_index, size):
         self.storage_index = storage_index
         self.size = size
-        self.counter = self.statusid_counter.next()
+        self.counter = six.advance_iterator(self.statusid_counter)
         self.helper = False
 
         self.first_timestamp = None
index cf308dd3ef861171546d5eb71f42bdc46729b891..e7a59cbb2d93afd366eb85d90ecbebc4e10da3ad 100644 (file)
@@ -12,6 +12,7 @@ from allmydata.util.assertutil import _assert, precondition
 from allmydata.codec import CRSEncoder
 from allmydata.interfaces import IEncoder, IStorageBucketWriter, \
      IEncryptedUploadable, IUploadStatus, UploadUnhappinessError
+import six
 
 
 """
@@ -198,7 +199,7 @@ class Encoder(object):
             assert IStorageBucketWriter.providedBy(landlords[k])
         self.landlords = landlords.copy()
         assert isinstance(servermap, dict)
-        for v in servermap.itervalues():
+        for v in six.itervalues(servermap):
             assert isinstance(v, set)
         self.servermap = servermap.copy()
 
@@ -417,12 +418,13 @@ class Encoder(object):
         d.addCallback(_got)
         return d
 
-    def _send_segment(self, (shares, shareids), segnum):
+    def _send_segment(self, xxx_todo_changeme, segnum):
         # To generate the URI, we must generate the roothash, so we must
         # generate all shares, even if we aren't actually giving them to
         # anybody. This means that the set of shares we create will be equal
         # to or larger than the set of landlords. If we have any landlord who
         # *doesn't* have a share, that's an error.
+        (shares, shareids) = xxx_todo_changeme
         _assert(set(self.landlords.keys()).issubset(set(shareids)),
                 shareids=shareids, landlords=self.landlords)
         start = time.time()
@@ -450,7 +452,7 @@ class Encoder(object):
                      (self,
                       self.segment_size*(segnum+1),
                       self.segment_size*self.num_segments,
-                      100 * (segnum+1) / self.num_segments,
+                      100 * (segnum+1) // self.num_segments,
                       ),
                      level=log.OPERATIONAL)
             elapsed = time.time() - start
index 6b54b2d03287c73ad08a660e28e851c26dc9d5f7..b3e92d5dfda8993f4cd0b4f568a41d7d76053c36 100644 (file)
@@ -1,6 +1,7 @@
 
 import binascii
 import time
+from functools import reduce
 now = time.time
 from zope.interface import implements
 from twisted.internet import defer
index c63240463cd1a510d411e8939d746fb3a5cd0b4f..3ef6980919ccf950399ba21a5eff7a554fc0ca8b 100644 (file)
@@ -26,6 +26,7 @@ from allmydata.immutable import layout
 from pycryptopp.cipher.aes import AES
 
 from cStringIO import StringIO
+import six
 
 
 # this wants to live in storage, not here
@@ -115,7 +116,7 @@ EXTENSION_SIZE = 1000
 # this.
 
 def pretty_print_shnum_to_servers(s):
-    return ', '.join([ "sh%s: %s" % (k, '+'.join([idlib.shortnodeid_b2a(x) for x in v])) for k, v in s.iteritems() ])
+    return ', '.join([ "sh%s: %s" % (k, '+'.join([idlib.shortnodeid_b2a(x) for x in v])) for k, v in six.iteritems(s) ])
 
 class ServerTracker:
     def __init__(self, server,
@@ -167,10 +168,11 @@ class ServerTracker:
         rref = self._server.get_rref()
         return rref.callRemote("get_buckets", self.storage_index)
 
-    def _got_reply(self, (alreadygot, buckets)):
+    def _got_reply(self, xxx_todo_changeme):
         #log.msg("%s._got_reply(%s)" % (self, (alreadygot, buckets)))
+        (alreadygot, buckets) = xxx_todo_changeme
         b = {}
-        for sharenum, rref in buckets.iteritems():
+        for sharenum, rref in six.iteritems(buckets):
             bp = self.wbp_class(rref, self._server, self.sharesize,
                                 self.blocksize,
                                 self.num_segments,
@@ -397,7 +399,7 @@ class Tahoe2ServerSelector(log.PrefixingLogMixin):
                        % (self, self._get_progress_message(),
                           pretty_print_shnum_to_servers(merged),
                           [', '.join([str_shareloc(k,v)
-                                      for k,v in st.buckets.iteritems()])
+                                      for k,v in six.iteritems(st.buckets)])
                            for st in self.use_trackers],
                           pretty_print_shnum_to_servers(self.preexisting_shares))
                 self.log(msg, level=log.OPERATIONAL)
@@ -853,7 +855,7 @@ class UploadStatus:
         self.progress = [0.0, 0.0, 0.0]
         self.active = True
         self.results = None
-        self.counter = self.statusid_counter.next()
+        self.counter = six.advance_iterator(self.statusid_counter)
         self.started = time.time()
 
     def get_started(self):
@@ -985,7 +987,7 @@ class CHKUploader:
         d.addCallback(_done)
         return d
 
-    def set_shareholders(self, (upload_trackers, already_serverids), encoder):
+    def set_shareholders(self, xxx_todo_changeme1, encoder):
         """
         @param upload_trackers: a sequence of ServerTracker objects that
                                 have agreed to hold some shares for us (the
@@ -995,9 +997,10 @@ class CHKUploader:
                                   serverids for servers that claim to already
                                   have this share
         """
+        (upload_trackers, already_serverids) = xxx_todo_changeme1
         msgtempl = "set_shareholders; upload_trackers is %s, already_serverids is %s"
         values = ([', '.join([str_shareloc(k,v)
-                              for k,v in st.buckets.iteritems()])
+                              for k,v in six.iteritems(st.buckets)])
                    for st in upload_trackers], already_serverids)
         self.log(msgtempl % values, level=log.OPERATIONAL)
         # record already-present shares in self._results
@@ -1248,7 +1251,8 @@ class AssistedUploader:
         d.addCallback(self._contacted_helper)
         return d
 
-    def _contacted_helper(self, (helper_upload_results, upload_helper)):
+    def _contacted_helper(self, xxx_todo_changeme2):
+        (helper_upload_results, upload_helper) = xxx_todo_changeme2
         now = time.time()
         elapsed = now - self._time_contacting_helper_start
         self._elapsed_time_contacting_helper = elapsed
index 53d875ac942ff2bd9622b7d410bba6230cea357e..a1c1c0f2d37ac516aa515e672f5dd854d3a465e3 100644 (file)
@@ -2,7 +2,7 @@
 from zope.interface import Interface
 from foolscap.api import StringConstraint, TupleOf, SetOf, DictOf, Any, \
     RemoteInterface, Referenceable
-from old import RIIntroducerSubscriberClient_v1
+from .old import RIIntroducerSubscriberClient_v1
 FURL = StringConstraint(1000)
 
 # old introducer protocol (v1):
index 89c8baffbcc095ff0024404eb70db977358a09da..d75defb8b4e6ac3dd3926975a2c0d927cb3dfad1 100644 (file)
@@ -1,3 +1,4 @@
+from __future__ import print_function
 
 import os
 import time
@@ -107,4 +108,4 @@ class KeyGeneratorService(service.MultiService):
         kgf = os.path.join(self.basedir, self.furl_file)
         self.keygen_furl = self.tub.registerReference(self.key_generator, furlFile=kgf)
         if display_furl:
-            print 'key generator at:', self.keygen_furl
+            print('key generator at:', self.keygen_furl)
index 50bbdd34e06910c8166357aac2177446c80a229f..a1424d92e9e69cbbfe990e929e778b4b87f1ba92 100644 (file)
@@ -22,6 +22,7 @@ from allmydata.mutable.servermap import ServerMap, ServermapUpdater
 from allmydata.mutable.retrieve import Retrieve
 from allmydata.mutable.checker import MutableChecker, MutableCheckAndRepairer
 from allmydata.mutable.repairer import Repairer
+import six
 
 
 class BackoffAgent:
@@ -119,7 +120,7 @@ class MutableFileNode:
 
         return self
 
-    def create_with_keys(self, (pubkey, privkey), contents,
+    def create_with_keys(self, xxx_todo_changeme2, contents,
                          version=SDMF_VERSION):
         """Call this to create a brand-new mutable file. It will create the
         shares, find homes for them, and upload the initial contents (created
@@ -127,6 +128,7 @@ class MutableFileNode:
         Deferred that fires (with the MutableFileNode instance you should
         use) when it completes.
         """
+        (pubkey, privkey) = xxx_todo_changeme2
         self._pubkey, self._privkey = pubkey, privkey
         pubkey_s = self._pubkey.serialize()
         privkey_s = self._privkey.serialize()
@@ -338,7 +340,8 @@ class MutableFileNode:
         representing the best recoverable version of the file.
         """
         d = self._get_version_from_servermap(MODE_READ, servermap, version)
-        def _build_version((servermap, their_version)):
+        def _build_version(xxx_todo_changeme):
+            (servermap, their_version) = xxx_todo_changeme
             assert their_version in servermap.recoverable_versions()
             assert their_version in servermap.make_versionmap()
 
@@ -490,8 +493,9 @@ class MutableFileNode:
         # get_mutable_version => write intent, so we require that the
         # servermap is updated in MODE_WRITE
         d = self._get_version_from_servermap(MODE_WRITE, servermap, version)
-        def _build_version((servermap, smap_version)):
+        def _build_version(xxx_todo_changeme1):
             # these should have been set by the servermap update.
+            (servermap, smap_version) = xxx_todo_changeme1
             assert self._secret_holder
             assert self._writekey
 
@@ -1137,7 +1141,7 @@ class MutableFileVersion:
         start_segments = {} # shnum -> start segment
         end_segments = {} # shnum -> end segment
         blockhashes = {} # shnum -> blockhash tree
-        for (shnum, original_data) in update_data.iteritems():
+        for (shnum, original_data) in six.iteritems(update_data):
             data = [d[1] for d in original_data if d[0] == self._version]
             # data is [(blockhashes,start,end)..]
 
index b938794f947281428e5c8ba16fa57880df4faa08..4d1202939885cf7542533d2f35b734447a02e74a 100644 (file)
@@ -8,6 +8,8 @@ from allmydata.util import mathutil
 from twisted.python import failure
 from twisted.internet import defer
 from zope.interface import implements
+import six
+from six.moves import filter
 
 
 # These strings describe the format of the packed structs they help process.
@@ -255,7 +257,7 @@ class SDMFSlotWriteProxy:
                                                        self._required_shares)
         assert expected_segment_size == segment_size
 
-        self._block_size = self._segment_size / self._required_shares
+        self._block_size = self._segment_size // self._required_shares
 
         # This is meant to mimic how SDMF files were built before MDMF
         # entered the picture: we generate each share in its entirety,
@@ -354,7 +356,7 @@ class SDMFSlotWriteProxy:
         Add the share hash chain to the share.
         """
         assert isinstance(sharehashes, dict)
-        for h in sharehashes.itervalues():
+        for h in six.itervalues(sharehashes):
             assert len(h) == HASH_SIZE
 
         # serialize the sharehashes, then set them.
@@ -787,7 +789,7 @@ class MDMFSlotWriteProxy:
         # and also because it provides a useful amount of bounds checking.
         self._num_segments = mathutil.div_ceil(self._data_length,
                                                self._segment_size)
-        self._block_size = self._segment_size / self._required_shares
+        self._block_size = self._segment_size // self._required_shares
         # We also calculate the share size, to help us with block
         # constraints later.
         tail_size = self._data_length % self._segment_size
@@ -796,7 +798,7 @@ class MDMFSlotWriteProxy:
         else:
             self._tail_block_size = mathutil.next_multiple(tail_size,
                                                            self._required_shares)
-            self._tail_block_size /= self._required_shares
+            self._tail_block_size //= self._required_shares
 
         # We already know where the sharedata starts; right after the end
         # of the header (which is defined as the signable part + the offsets)
@@ -1315,7 +1317,7 @@ class MDMFSlotReadProxy:
         self._segment_size = segsize
         self._data_length = datalen
 
-        self._block_size = self._segment_size / self._required_shares
+        self._block_size = self._segment_size // self._required_shares
         # We can upload empty files, and need to account for this fact
         # so as to avoid zero-division and zero-modulo errors.
         if datalen > 0:
@@ -1327,7 +1329,7 @@ class MDMFSlotReadProxy:
         else:
             self._tail_block_size = mathutil.next_multiple(tail_size,
                                                     self._required_shares)
-            self._tail_block_size /= self._required_shares
+            self._tail_block_size //= self._required_shares
 
         return encoding_parameters
 
index 4acc2d6c3687444bafdd8736fa9a9452fe009b07..125ac7804c3df8ca20d6dfaa85030c2132e64a70 100644 (file)
@@ -23,6 +23,7 @@ from allmydata.mutable.layout import get_version_from_checkstring,\
                                      unpack_sdmf_checkstring, \
                                      MDMFSlotWriteProxy, \
                                      SDMFSlotWriteProxy
+import six
 
 KiB = 1024
 DEFAULT_MAX_SEGMENT_SIZE = 128 * KiB
@@ -47,7 +48,7 @@ class PublishStatus:
         self.size = None
         self.status = "Not started"
         self.progress = 0.0
-        self.counter = self.statusid_counter.next()
+        self.counter = six.advance_iterator(self.statusid_counter)
         self.started = time.time()
 
     def add_per_server_time(self, server, elapsed):
@@ -306,7 +307,7 @@ class Publish:
         # Our update process fetched these for us. We need to update
         # them in place as publishing happens.
         self.blockhashes = {} # (shnum, [blochashes])
-        for (i, bht) in blockhashes.iteritems():
+        for (i, bht) in six.iteritems(blockhashes):
             # We need to extract the leaves from our old hash tree.
             old_segcount = mathutil.div_ceil(version[4],
                                              version[3])
@@ -314,7 +315,7 @@ class Publish:
             bht = dict(enumerate(bht))
             h.set_hashes(bht)
             leaves = h[h.get_leaf_index(0):]
-            for j in xrange(self.num_segments - len(leaves)):
+            for j in range(self.num_segments - len(leaves)):
                 leaves.append(None)
 
             assert len(leaves) >= self.num_segments
@@ -510,10 +511,10 @@ class Publish:
         # This will eventually hold the block hash chain for each share
         # that we publish. We define it this way so that empty publishes
         # will still have something to write to the remote slot.
-        self.blockhashes = dict([(i, []) for i in xrange(self.total_shares)])
-        for i in xrange(self.total_shares):
+        self.blockhashes = dict([(i, []) for i in range(self.total_shares)])
+        for i in range(self.total_shares):
             blocks = self.blockhashes[i]
-            for j in xrange(self.num_segments):
+            for j in range(self.num_segments):
                 blocks.append(None)
         self.sharehash_leaves = None # eventually [sharehashes]
         self.sharehashes = {} # shnum -> [sharehash leaves necessary to
@@ -685,7 +686,7 @@ class Publish:
         salt = os.urandom(16)
         assert self._version == SDMF_VERSION
 
-        for shnum, writers in self.writers.iteritems():
+        for shnum, writers in six.iteritems(self.writers):
             for writer in writers:
                 writer.put_salt(salt)
 
@@ -752,7 +753,7 @@ class Publish:
         results, salt = encoded_and_salt
         shares, shareids = results
         self._status.set_status("Pushing segment")
-        for i in xrange(len(shares)):
+        for i in range(len(shares)):
             sharedata = shares[i]
             shareid = shareids[i]
             if self._version == MDMF_VERSION:
@@ -787,7 +788,7 @@ class Publish:
     def push_encprivkey(self):
         encprivkey = self._encprivkey
         self._status.set_status("Pushing encrypted private key")
-        for shnum, writers in self.writers.iteritems():
+        for shnum, writers in six.iteritems(self.writers):
             for writer in writers:
                 writer.put_encprivkey(encprivkey)
 
@@ -795,7 +796,7 @@ class Publish:
     def push_blockhashes(self):
         self.sharehash_leaves = [None] * len(self.blockhashes)
         self._status.set_status("Building and pushing block hash tree")
-        for shnum, blockhashes in self.blockhashes.iteritems():
+        for shnum, blockhashes in six.iteritems(self.blockhashes):
             t = hashtree.HashTree(blockhashes)
             self.blockhashes[shnum] = list(t)
             # set the leaf for future use.
@@ -809,7 +810,7 @@ class Publish:
     def push_sharehashes(self):
         self._status.set_status("Building and pushing share hash chain")
         share_hash_tree = hashtree.HashTree(self.sharehash_leaves)
-        for shnum in xrange(len(self.sharehash_leaves)):
+        for shnum in range(len(self.sharehash_leaves)):
             needed_indices = share_hash_tree.needed_hashes(shnum)
             self.sharehashes[shnum] = dict( [ (i, share_hash_tree[i])
                                              for i in needed_indices] )
@@ -825,7 +826,7 @@ class Publish:
         #   - Get the checkstring of the resulting layout; sign that.
         #   - Push the signature
         self._status.set_status("Pushing root hashes and signature")
-        for shnum in xrange(self.total_shares):
+        for shnum in range(self.total_shares):
             writers = self.writers[shnum]
             for writer in writers:
                 writer.put_root_hash(self.root_hash)
@@ -853,7 +854,7 @@ class Publish:
         signable = self._get_some_writer().get_signable()
         self.signature = self._privkey.sign(signable)
 
-        for (shnum, writers) in self.writers.iteritems():
+        for (shnum, writers) in six.iteritems(self.writers):
             for writer in writers:
                 writer.put_signature(self.signature)
         self._status.timings['sign'] = time.time() - started
@@ -868,7 +869,7 @@ class Publish:
         ds = []
         verification_key = self._pubkey.serialize()
 
-        for (shnum, writers) in self.writers.copy().iteritems():
+        for (shnum, writers) in six.iteritems(self.writers.copy()):
             for writer in writers:
                 writer.put_verification_key(verification_key)
                 self.num_outstanding += 1
@@ -1007,7 +1008,7 @@ class Publish:
 
         # TODO: Precompute this.
         shares = []
-        for shnum, writers in self.writers.iteritems():
+        for shnum, writers in six.iteritems(self.writers):
             shares.extend([x.shnum for x in writers if x.server == server])
         known_shnums = set(shares)
         surprise_shares -= known_shnums
index 2be92163ec822df2c972bee9005a5ee264aa6a1f..d46d751f716ffefae9a008892b418485d7618c5c 100644 (file)
@@ -19,6 +19,7 @@ from pycryptopp.publickey import rsa
 from allmydata.mutable.common import CorruptShareError, BadShareError, \
      UncoordinatedWriteError
 from allmydata.mutable.layout import MDMFSlotReadProxy
+import six
 
 class RetrieveStatus:
     implements(IRetrieveStatus)
@@ -37,7 +38,7 @@ class RetrieveStatus:
         self.size = None
         self.status = "Not started"
         self.progress = 0.0
-        self.counter = self.statusid_counter.next()
+        self.counter = six.advance_iterator(self.statusid_counter)
         self.started = time.time()
 
     def get_started(self):
@@ -304,7 +305,7 @@ class Retrieve:
         self._active_readers = [] # list of active readers for this dl.
         self._block_hash_trees = {} # shnum => hashtree
 
-        for i in xrange(self._total_shares):
+        for i in range(self._total_shares):
             # So we don't have to do this later.
             self._block_hash_trees[i] = hashtree.IncompleteHashTree(self._num_segments)
 
@@ -747,7 +748,7 @@ class Retrieve:
             try:
                 bht.set_hashes(blockhashes)
             except (hashtree.BadHashError, hashtree.NotEnoughHashesError, \
-                    IndexError), e:
+                    IndexError) as e:
                 raise CorruptShareError(server,
                                         reader.shnum,
                                         "block hash tree failure: %s" % e)
@@ -761,7 +762,7 @@ class Retrieve:
         try:
            bht.set_hashes(leaves={segnum: blockhash})
         except (hashtree.BadHashError, hashtree.NotEnoughHashesError, \
-                IndexError), e:
+                IndexError) as e:
             raise CorruptShareError(server,
                                     reader.shnum,
                                     "block hash tree failure: %s" % e)
@@ -779,7 +780,7 @@ class Retrieve:
             self.share_hash_tree.set_hashes(hashes=sharehashes,
                                         leaves={reader.shnum: bht[0]})
         except (hashtree.BadHashError, hashtree.NotEnoughHashesError, \
-                IndexError), e:
+                IndexError) as e:
             raise CorruptShareError(server,
                                     reader.shnum,
                                     "corrupt hashes: %s" % e)
index 149e1a259e054adc9c068f36ef917ba221d32590..89b15fbe50b1e942f667afb148e1d3e00e7c97aa 100644 (file)
@@ -1,3 +1,4 @@
+from __future__ import print_function
 
 import sys, time, copy
 from zope.interface import implements
@@ -15,6 +16,7 @@ from pycryptopp.publickey import rsa
 from allmydata.mutable.common import MODE_CHECK, MODE_ANYTHING, MODE_WRITE, \
      MODE_READ, MODE_REPAIR, CorruptShareError
 from allmydata.mutable.layout import SIGNED_PREFIX_LENGTH, MDMFSlotReadProxy
+import six
 
 class UpdateStatus:
     implements(IServermapUpdaterStatus)
@@ -30,7 +32,7 @@ class UpdateStatus:
         self.mode = "?"
         self.status = "Not started"
         self.progress = 0.0
-        self.counter = self.statusid_counter.next()
+        self.counter = six.advance_iterator(self.statusid_counter)
         self.started = time.time()
         self.finished = None
 
@@ -182,19 +184,19 @@ class ServerMap:
         return (self._last_update_mode, self._last_update_time)
 
     def dump(self, out=sys.stdout):
-        print >>out, "servermap:"
+        print("servermap:", file=out)
 
         for ( (server, shnum), (verinfo, timestamp) ) in self._known_shares.items():
             (seqnum, root_hash, IV, segsize, datalength, k, N, prefix,
              offsets_tuple) = verinfo
-            print >>out, ("[%s]: sh#%d seq%d-%s %d-of-%d len%d" %
+            print(("[%s]: sh#%d seq%d-%s %d-of-%d len%d" %
                           (server.get_name(), shnum,
                            seqnum, base32.b2a(root_hash)[:4], k, N,
-                           datalength))
+                           datalength)), file=out)
         if self._problems:
-            print >>out, "%d PROBLEMS" % len(self._problems)
+            print("%d PROBLEMS" % len(self._problems), file=out)
             for f in self._problems:
-                print >>out, str(f)
+                print(str(f), file=out)
         return out
 
     def all_servers(self):
index 8873e5c798232ab1d009de76e05b5436b3803d25..a60d0e3848256841d75d19f6f55d2c728b0fdfd1 100644 (file)
@@ -1,3 +1,4 @@
+from __future__ import print_function
 import datetime, os.path, re, types, ConfigParser, tempfile
 from base64 import b32decode, b32encode
 
@@ -12,10 +13,11 @@ from allmydata.util import fileutil, iputil, observer
 from allmydata.util.assertutil import precondition, _assert
 from allmydata.util.fileutil import abspath_expanduser_unicode
 from allmydata.util.encodingutil import get_filesystem_encoding, quote_output
+import six
 
 # Add our application versions to the data that Foolscap's LogPublisher
 # reports.
-for thing, things_version in get_package_versions().iteritems():
+for thing, things_version in six.iteritems(get_package_versions()):
     app_versions.add_version(thing, str(things_version))
 
 # group 1 will be addr (dotted quad string), group 3 if any will be portnum (string)
@@ -69,7 +71,7 @@ class Node(service.MultiService):
         self.basedir = abspath_expanduser_unicode(unicode(basedir))
         self._portnumfile = os.path.join(self.basedir, self.PORTNUMFILE)
         self._tub_ready_observerlist = observer.OneShotObserverList()
-        fileutil.make_dirs(os.path.join(self.basedir, "private"), 0700)
+        fileutil.make_dirs(os.path.join(self.basedir, "private"), 0o700)
         open(os.path.join(self.basedir, "private", "README"), "w").write(PRIV_README)
 
         # creates self.config
@@ -280,7 +282,7 @@ class Node(service.MultiService):
         fn = os.path.join(self.basedir, name)
         try:
             fileutil.write(fn, value, mode)
-        except EnvironmentError, e:
+        except EnvironmentError as e:
             self.log("Unable to write config file '%s'" % fn)
             self.log(e)
 
@@ -293,7 +295,7 @@ class Node(service.MultiService):
         # need to send a pid to the foolscap log here.
         twlog.msg("My pid: %s" % os.getpid())
         try:
-            os.chmod("twistd.pid", 0644)
+            os.chmod("twistd.pid", 0o644)
         except EnvironmentError:
             pass
         # Delay until the reactor is running.
@@ -317,12 +319,12 @@ class Node(service.MultiService):
     def _service_startup_failed(self, failure):
         self.log('_startService() failed')
         log.err(failure)
-        print "Node._startService failed, aborting"
-        print failure
+        print("Node._startService failed, aborting")
+        print(failure)
         #reactor.stop() # for unknown reasons, reactor.stop() isn't working.  [ ] TODO
         self.log('calling os.abort()')
         twlog.msg('calling os.abort()') # make sure it gets into twistd.log
-        print "calling os.abort()"
+        print("calling os.abort()")
         os.abort()
 
     def stopService(self):
@@ -347,7 +349,7 @@ class Node(service.MultiService):
         for o in twlog.theLogPublisher.observers:
             # o might be a FileLogObserver's .emit method
             if type(o) is type(self.setup_logging): # bound method
-                ob = o.im_self
+                ob = o.__self__
                 if isinstance(ob, twlog.FileLogObserver):
                     newmeth = types.UnboundMethodType(formatTimeTahoeStyle, ob, ob.__class__)
                     ob.formatTime = newmeth
index d0c002461e0383538f056a15b088015cc585719b..1879925d817cca32efa81433b39175e3f087be83 100644 (file)
@@ -11,6 +11,7 @@ from allmydata.dirnode import DirectoryNode, pack_children
 from allmydata.unknown import UnknownNode
 from allmydata.blacklist import ProhibitedNode
 from allmydata import uri
+import six
 
 
 class NodeMaker:
@@ -126,7 +127,7 @@ class NodeMaker:
 
     def create_new_mutable_directory(self, initial_children={}, version=None):
         # initial_children must have metadata (i.e. {} instead of None)
-        for (name, (node, metadata)) in initial_children.iteritems():
+        for (name, (node, metadata)) in six.iteritems(initial_children):
             precondition(isinstance(metadata, dict),
                          "create_new_mutable_directory requires metadata to be a dict, not None", metadata)
             node.raise_error()
index 092d90a9c143c10c7dd03e971a477098deac3a1d..6c864e9082afba5f4d6cd03265b52f72d0385f20 100644 (file)
@@ -1,3 +1,4 @@
+from __future__ import print_function
 
 from twisted.python import usage
 from allmydata.scripts.common import BaseOptions
@@ -18,8 +19,8 @@ def print_keypair(options):
     from allmydata.util.keyutil import make_keypair
     out = options.stdout
     privkey_vs, pubkey_vs = make_keypair()
-    print >>out, "private:", privkey_vs
-    print >>out, "public:", pubkey_vs
+    print("private:", privkey_vs, file=out)
+    print("public:", pubkey_vs, file=out)
 
 class DerivePubkeyOptions(BaseOptions):
     def parseArgs(self, privkey):
@@ -42,8 +43,8 @@ def derive_pubkey(options):
     from allmydata.util import keyutil
     privkey_vs = options.privkey
     sk, pubkey_vs = keyutil.parse_privkey(privkey_vs)
-    print >>out, "private:", privkey_vs
-    print >>out, "public:", pubkey_vs
+    print("private:", privkey_vs, file=out)
+    print("public:", pubkey_vs, file=out)
     return 0
 
 class AdminCommand(BaseOptions):
index 75ee0d9ce0c670c4cc5898b09ea98858a20b629c..faf0c4e110906c2188a3c95a5fc6f22426b38f27 100644 (file)
@@ -1,3 +1,4 @@
+from __future__ import print_function
 
 import os.path, sys, time, random, stat
 
@@ -68,8 +69,8 @@ def get_backupdb(dbfile, stderr=sys.stderr,
     must_create = not os.path.exists(dbfile)
     try:
         db = sqlite3.connect(dbfile)
-    except (EnvironmentError, sqlite3.OperationalError), e:
-        print >>stderr, "Unable to create/open backupdb file %s: %s" % (dbfile, e)
+    except (EnvironmentError, sqlite3.OperationalError) as e:
+        print("Unable to create/open backupdb file %s: %s" % (dbfile, e), file=stderr)
         return None
 
     c = db.cursor()
@@ -82,10 +83,10 @@ def get_backupdb(dbfile, stderr=sys.stderr,
     try:
         c.execute("SELECT version FROM version")
         version = c.fetchone()[0]
-    except sqlite3.DatabaseError, e:
+    except sqlite3.DatabaseError as e:
         # this indicates that the file is not a compatible database format.
         # Perhaps it was created with an old version, or it might be junk.
-        print >>stderr, "backupdb file is unusable: %s" % e
+        print("backupdb file is unusable: %s" % e, file=stderr)
         return None
 
     if just_create: # for tests
@@ -97,7 +98,7 @@ def get_backupdb(dbfile, stderr=sys.stderr,
         version = 2
     if version == 2:
         return BackupDB_v2(sqlite3, db)
-    print >>stderr, "Unable to handle backupdb version %s" % version
+    print("Unable to handle backupdb version %s" % version, file=stderr)
     return None
 
 class FileResult:
index b0e4e6dec70834496eab7739654ed29ad717b0ff..47d8170e1ef2f25516d522c8ffa8c4f144c29f64 100644 (file)
@@ -1,8 +1,10 @@
+from __future__ import print_function
 import os.path, re, fnmatch
 from twisted.python import usage
 from allmydata.scripts.common import get_aliases, get_default_nodedir, \
      DEFAULT_ALIAS, BaseOptions
 from allmydata.util.encodingutil import argv_to_unicode, argv_to_abspath, quote_output
+from allmydata.util.sixutil import map
 
 NODEURL_RE=re.compile("http(s?)://([^:]*)(:([1-9][0-9]*))?")
 
@@ -537,8 +539,8 @@ def get(options):
             # enough to have picked an empty file
             pass
         else:
-            print >>options.stderr, "%s retrieved and written to %s" % \
-                  (options.from_file, options.to_file)
+            print("%s retrieved and written to %s" % \
+                  (options.from_file, options.to_file), file=options.stderr)
     return rc
 
 def put(options):
index bd78430cc9d8e10a57bb5ea9069762fa5fcdfc86..236f1f4c6b867c3e675ab749a6ec533a8725b684 100644 (file)
@@ -1,3 +1,4 @@
+from __future__ import print_function
 
 import os, sys, urllib
 import codecs
@@ -114,7 +115,7 @@ class TahoeError(Exception):
         self.msg = msg
 
     def display(self, err):
-        print >>err, self.msg
+        print(self.msg, file=err)
 
 
 class UnknownAliasError(TahoeError):
index 7b965525deced23e202d361180943818e6ea78a6..81cf6827fcfc994cb62795560871d0e22229b74c 100644 (file)
@@ -1,3 +1,4 @@
+from __future__ import print_function
 
 import os
 from cStringIO import StringIO
@@ -68,7 +69,7 @@ def do_http(method, url, body=""):
 
     try:
         c.endheaders()
-    except socket_error, err:
+    except socket_error as err:
         return BadResponse(url, err)
 
     while True:
@@ -89,7 +90,7 @@ def format_http_error(msg, resp):
 
 def check_http_error(resp, stderr):
     if resp.status < 200 or resp.status >= 300:
-        print >>stderr, format_http_error("Error during HTTP request", resp)
+        print(format_http_error("Error during HTTP request", resp), file=stderr)
         return 1
 
 
index a27ed82719570fdf6785ea9aaf77a63e67e8f6b0..6f3b82da5b836ccd287ee7fa1ef359da752e5da7 100644 (file)
@@ -1,3 +1,4 @@
+from __future__ import print_function
 
 import os, sys
 from allmydata.scripts.common import BasedirOptions
@@ -104,9 +105,9 @@ def create_node(config, out=sys.stdout, err=sys.stderr):
 
     if os.path.exists(basedir):
         if listdir_unicode(basedir):
-            print >>err, "The base directory %s is not empty." % quote_output(basedir)
-            print >>err, "To avoid clobbering anything, I am going to quit now."
-            print >>err, "Please use a different directory, or empty this one."
+            print("The base directory %s is not empty." % quote_output(basedir), file=err)
+            print("To avoid clobbering anything, I am going to quit now.", file=err)
+            print("Please use a different directory, or empty this one.", file=err)
             return -1
         # we're willing to use an empty directory
     else:
@@ -159,13 +160,13 @@ def create_node(config, out=sys.stdout, err=sys.stderr):
     c.close()
 
     from allmydata.util import fileutil
-    fileutil.make_dirs(os.path.join(basedir, "private"), 0700)
-    print >>out, "Node created in %s" % quote_output(basedir)
+    fileutil.make_dirs(os.path.join(basedir, "private"), 0o700)
+    print("Node created in %s" % quote_output(basedir), file=out)
     if not config.get("introducer", ""):
-        print >>out, " Please set [client]introducer.furl= in tahoe.cfg!"
-        print >>out, " The node cannot connect to a grid without it."
+        print(" Please set [client]introducer.furl= in tahoe.cfg!", file=out)
+        print(" The node cannot connect to a grid without it.", file=out)
     if not config.get("nickname", ""):
-        print >>out, " Please set [node]nickname= in tahoe.cfg"
+        print(" Please set [node]nickname= in tahoe.cfg", file=out)
     return 0
 
 def create_client(config, out=sys.stdout, err=sys.stderr):
@@ -180,9 +181,9 @@ def create_introducer(config, out=sys.stdout, err=sys.stderr):
 
     if os.path.exists(basedir):
         if listdir_unicode(basedir):
-            print >>err, "The base directory %s is not empty." % quote_output(basedir)
-            print >>err, "To avoid clobbering anything, I am going to quit now."
-            print >>err, "Please use a different directory, or empty this one."
+            print("The base directory %s is not empty." % quote_output(basedir), file=err)
+            print("To avoid clobbering anything, I am going to quit now.", file=err)
+            print("Please use a different directory, or empty this one.", file=err)
             return -1
         # we're willing to use an empty directory
     else:
@@ -195,7 +196,7 @@ def create_introducer(config, out=sys.stdout, err=sys.stderr):
     write_node_config(c, config)
     c.close()
 
-    print >>out, "Introducer created in %s" % quote_output(basedir)
+    print("Introducer created in %s" % quote_output(basedir), file=out)
     return 0
 
 
index 34afbc1fbc5472c4bf634352d575599ddf6e38f0..cd222ddddceb72b7d911858509be370f03a3f0fc 100644 (file)
@@ -1,3 +1,4 @@
+from __future__ import print_function
 
 # do not import any allmydata modules at this level. Do that from inside
 # individual functions instead.
@@ -7,6 +8,7 @@ from twisted.internet import defer
 from twisted.scripts import trial as twisted_trial
 from foolscap.logging import cli as foolscap_cli
 from allmydata.scripts.common import BaseOptions
+from allmydata.util.sixutil import map
 
 
 class DumpOptions(BaseOptions):
@@ -42,7 +44,7 @@ def dump_share(options):
     out = options.stdout
 
     # check the version, to see if we have a mutable or immutable share
-    print >>out, "share filename: %s" % quote_output(options['filename'])
+    print("share filename: %s" % quote_output(options['filename']), file=out)
 
     f = open(options['filename'], "rb")
     prefix = f.read(32)
@@ -60,7 +62,7 @@ def dump_immutable_share(options):
     if not options["leases-only"]:
         dump_immutable_chk_share(f, out, options)
     dump_immutable_lease_info(f, out)
-    print >>out
+    print(file=out)
     return 0
 
 def dump_immutable_chk_share(f, out, options):
@@ -72,7 +74,7 @@ def dump_immutable_chk_share(f, out, options):
     # use a ReadBucketProxy to parse the bucket and find the uri extension
     bp = ReadBucketProxy(None, None, '')
     offsets = bp._parse_offsets(f.read_share_data(0, 0x44))
-    print >>out, "%20s: %d" % ("version", bp._version)
+    print("%20s: %d" % ("version", bp._version), file=out)
     seek = offsets['uri_extension']
     length = struct.unpack(bp._fieldstruct,
                            f.read_share_data(seek, bp._fieldsize))[0]
@@ -90,24 +92,24 @@ def dump_immutable_chk_share(f, out, options):
     for k in keys1:
         if k in unpacked:
             dk = display_keys.get(k, k)
-            print >>out, "%20s: %s" % (dk, unpacked[k])
-    print >>out
+            print("%20s: %s" % (dk, unpacked[k]), file=out)
+    print(file=out)
     for k in keys2:
         if k in unpacked:
             dk = display_keys.get(k, k)
-            print >>out, "%20s: %s" % (dk, unpacked[k])
-    print >>out
+            print("%20s: %s" % (dk, unpacked[k]), file=out)
+    print(file=out)
     for k in keys3:
         if k in unpacked:
             dk = display_keys.get(k, k)
-            print >>out, "%20s: %s" % (dk, unpacked[k])
+            print("%20s: %s" % (dk, unpacked[k]), file=out)
 
     leftover = set(unpacked.keys()) - set(keys1 + keys2 + keys3)
     if leftover:
-        print >>out
-        print >>out, "LEFTOVER:"
+        print(file=out)
+        print("LEFTOVER:", file=out)
         for k in sorted(leftover):
-            print >>out, "%20s: %s" % (k, unpacked[k])
+            print("%20s: %s" % (k, unpacked[k]), file=out)
 
     # the storage index isn't stored in the share itself, so we depend upon
     # knowing the parent directory name to get it
@@ -121,7 +123,7 @@ def dump_immutable_chk_share(f, out, options):
                                       unpacked["needed_shares"],
                                       unpacked["total_shares"], unpacked["size"])
             verify_cap = u.to_string()
-            print >>out, "%20s: %s" % ("verify-cap", quote_output(verify_cap, quotemarks=False))
+            print("%20s: %s" % ("verify-cap", quote_output(verify_cap, quotemarks=False)), file=out)
 
     sizes = {}
     sizes['data'] = (offsets['plaintext_hash_tree'] -
@@ -129,33 +131,33 @@ def dump_immutable_chk_share(f, out, options):
     sizes['validation'] = (offsets['uri_extension'] -
                            offsets['plaintext_hash_tree'])
     sizes['uri-extension'] = len(UEB_data)
-    print >>out
-    print >>out, " Size of data within the share:"
+    print(file=out)
+    print(" Size of data within the share:", file=out)
     for k in sorted(sizes):
-        print >>out, "%20s: %s" % (k, sizes[k])
+        print("%20s: %s" % (k, sizes[k]), file=out)
 
     if options['offsets']:
-        print >>out
-        print >>out, " Section Offsets:"
-        print >>out, "%20s: %s" % ("share data", f._data_offset)
+        print(file=out)
+        print(" Section Offsets:", file=out)
+        print("%20s: %s" % ("share data", f._data_offset), file=out)
         for k in ["data", "plaintext_hash_tree", "crypttext_hash_tree",
                   "block_hashes", "share_hashes", "uri_extension"]:
             name = {"data": "block data"}.get(k,k)
             offset = f._data_offset + offsets[k]
-            print >>out, "  %20s: %s   (0x%x)" % (name, offset, offset)
-        print >>out, "%20s: %s" % ("leases", f._lease_offset)
+            print("  %20s: %s   (0x%x)" % (name, offset, offset), file=out)
+        print("%20s: %s" % ("leases", f._lease_offset), file=out)
 
 def dump_immutable_lease_info(f, out):
     # display lease information too
-    print >>out
+    print(file=out)
     leases = list(f.get_leases())
     if leases:
         for i,lease in enumerate(leases):
             when = format_expiration_time(lease.expiration_time)
-            print >>out, " Lease #%d: owner=%d, expire in %s" \
-                  % (i, lease.owner_num, when)
+            print(" Lease #%d: owner=%d, expire in %s" \
+                  % (i, lease.owner_num, when), file=out)
     else:
-        print >>out, " No leases."
+        print(" No leases.", file=out)
 
 def format_expiration_time(expiration_time):
     now = time.time()
@@ -191,27 +193,27 @@ def dump_mutable_share(options):
         share_type = "MDMF"
     f.close()
 
-    print >>out
-    print >>out, "Mutable slot found:"
-    print >>out, " share_type: %s" % share_type
-    print >>out, " write_enabler: %s" % base32.b2a(WE)
-    print >>out, " WE for nodeid: %s" % idlib.nodeid_b2a(nodeid)
-    print >>out, " num_extra_leases: %d" % num_extra_leases
-    print >>out, " container_size: %d" % container_size
-    print >>out, " data_length: %d" % data_length
+    print(file=out)
+    print("Mutable slot found:", file=out)
+    print(" share_type: %s" % share_type, file=out)
+    print(" write_enabler: %s" % base32.b2a(WE), file=out)
+    print(" WE for nodeid: %s" % idlib.nodeid_b2a(nodeid), file=out)
+    print(" num_extra_leases: %d" % num_extra_leases, file=out)
+    print(" container_size: %d" % container_size, file=out)
+    print(" data_length: %d" % data_length, file=out)
     if leases:
         for (leasenum, lease) in leases:
-            print >>out
-            print >>out, " Lease #%d:" % leasenum
-            print >>out, "  ownerid: %d" % lease.owner_num
+            print(file=out)
+            print(" Lease #%d:" % leasenum, file=out)
+            print("  ownerid: %d" % lease.owner_num, file=out)
             when = format_expiration_time(lease.expiration_time)
-            print >>out, "  expires in %s" % when
-            print >>out, "  renew_secret: %s" % base32.b2a(lease.renew_secret)
-            print >>out, "  cancel_secret: %s" % base32.b2a(lease.cancel_secret)
-            print >>out, "  secrets are for nodeid: %s" % idlib.nodeid_b2a(lease.nodeid)
+            print("  expires in %s" % when, file=out)
+            print("  renew_secret: %s" % base32.b2a(lease.renew_secret), file=out)
+            print("  cancel_secret: %s" % base32.b2a(lease.cancel_secret), file=out)
+            print("  secrets are for nodeid: %s" % idlib.nodeid_b2a(lease.nodeid), file=out)
     else:
-        print >>out, "No leases."
-    print >>out
+        print("No leases.", file=out)
+    print(file=out)
 
     if share_type == "SDMF":
         dump_SDMF_share(m, data_length, options)
@@ -238,7 +240,7 @@ def dump_SDMF_share(m, length, options):
 
     try:
         pieces = unpack_share(data)
-    except NeedMoreDataError, e:
+    except NeedMoreDataError as e:
         # retry once with the larger size
         size = e.needed_bytes
         f = open(options['filename'], "rb")
@@ -253,21 +255,21 @@ def dump_SDMF_share(m, length, options):
     (ig_version, ig_seqnum, ig_roothash, ig_IV, ig_k, ig_N, ig_segsize,
      ig_datalen, offsets) = unpack_header(data)
 
-    print >>out, " SDMF contents:"
-    print >>out, "  seqnum: %d" % seqnum
-    print >>out, "  root_hash: %s" % base32.b2a(root_hash)
-    print >>out, "  IV: %s" % base32.b2a(IV)
-    print >>out, "  required_shares: %d" % k
-    print >>out, "  total_shares: %d" % N
-    print >>out, "  segsize: %d" % segsize
-    print >>out, "  datalen: %d" % datalen
-    print >>out, "  enc_privkey: %d bytes" % len(enc_privkey)
-    print >>out, "  pubkey: %d bytes" % len(pubkey)
-    print >>out, "  signature: %d bytes" % len(signature)
+    print(" SDMF contents:", file=out)
+    print("  seqnum: %d" % seqnum, file=out)
+    print("  root_hash: %s" % base32.b2a(root_hash), file=out)
+    print("  IV: %s" % base32.b2a(IV), file=out)
+    print("  required_shares: %d" % k, file=out)
+    print("  total_shares: %d" % N, file=out)
+    print("  segsize: %d" % segsize, file=out)
+    print("  datalen: %d" % datalen, file=out)
+    print("  enc_privkey: %d bytes" % len(enc_privkey), file=out)
+    print("  pubkey: %d bytes" % len(pubkey), file=out)
+    print("  signature: %d bytes" % len(signature), file=out)
     share_hash_ids = ",".join(sorted([str(hid)
                                       for hid in share_hash_chain.keys()]))
-    print >>out, "  share_hash_chain: %s" % share_hash_ids
-    print >>out, "  block_hash_tree: %d nodes" % len(block_hash_tree)
+    print("  share_hash_chain: %s" % share_hash_ids, file=out)
+    print("  block_hash_tree: %d nodes" % len(block_hash_tree), file=out)
 
     # the storage index isn't stored in the share itself, so we depend upon
     # knowing the parent directory name to get it
@@ -279,15 +281,15 @@ def dump_SDMF_share(m, length, options):
             fingerprint = hashutil.ssk_pubkey_fingerprint_hash(pubkey)
             u = SSKVerifierURI(storage_index, fingerprint)
             verify_cap = u.to_string()
-            print >>out, "  verify-cap:", quote_output(verify_cap, quotemarks=False)
+            print("  verify-cap:", quote_output(verify_cap, quotemarks=False), file=out)
 
     if options['offsets']:
         # NOTE: this offset-calculation code is fragile, and needs to be
         # merged with MutableShareFile's internals.
-        print >>out
-        print >>out, " Section Offsets:"
+        print(file=out)
+        print(" Section Offsets:", file=out)
         def printoffset(name, value, shift=0):
-            print >>out, "%s%20s: %s   (0x%x)" % (" "*shift, name, value, value)
+            print("%s%20s: %s   (0x%x)" % (" "*shift, name, value, value), file=out)
         printoffset("first lease", m.HEADER_SIZE)
         printoffset("share data", m.DATA_OFFSET)
         o_seqnum = m.DATA_OFFSET + struct.calcsize(">B")
@@ -305,7 +307,7 @@ def dump_SDMF_share(m, length, options):
         printoffset("extra leases", m._read_extra_lease_offset(f) + 4)
         f.close()
 
-    print >>out
+    print(file=out)
 
 def dump_MDMF_share(m, length, options):
     from allmydata.mutable.layout import MDMFSlotReadProxy
@@ -347,21 +349,21 @@ def dump_MDMF_share(m, length, options):
     (seqnum, root_hash, salt_to_use, segsize, datalen, k, N, prefix,
      offsets) = verinfo
 
-    print >>out, " MDMF contents:"
-    print >>out, "  seqnum: %d" % seqnum
-    print >>out, "  root_hash: %s" % base32.b2a(root_hash)
+    print(" MDMF contents:", file=out)
+    print("  seqnum: %d" % seqnum, file=out)
+    print("  root_hash: %s" % base32.b2a(root_hash), file=out)
     #print >>out, "  IV: %s" % base32.b2a(IV)
-    print >>out, "  required_shares: %d" % k
-    print >>out, "  total_shares: %d" % N
-    print >>out, "  segsize: %d" % segsize
-    print >>out, "  datalen: %d" % datalen
-    print >>out, "  enc_privkey: %d bytes" % len(encprivkey)
-    print >>out, "  pubkey: %d bytes" % len(pubkey)
-    print >>out, "  signature: %d bytes" % len(signature)
+    print("  required_shares: %d" % k, file=out)
+    print("  total_shares: %d" % N, file=out)
+    print("  segsize: %d" % segsize, file=out)
+    print("  datalen: %d" % datalen, file=out)
+    print("  enc_privkey: %d bytes" % len(encprivkey), file=out)
+    print("  pubkey: %d bytes" % len(pubkey), file=out)
+    print("  signature: %d bytes" % len(signature), file=out)
     share_hash_ids = ",".join([str(hid)
                                for hid in sorted(share_hash_chain.keys())])
-    print >>out, "  share_hash_chain: %s" % share_hash_ids
-    print >>out, "  block_hash_tree: %d nodes" % len(block_hash_tree)
+    print("  share_hash_chain: %s" % share_hash_ids, file=out)
+    print("  block_hash_tree: %d nodes" % len(block_hash_tree), file=out)
 
     # the storage index isn't stored in the share itself, so we depend upon
     # knowing the parent directory name to get it
@@ -373,16 +375,16 @@ def dump_MDMF_share(m, length, options):
             fingerprint = hashutil.ssk_pubkey_fingerprint_hash(pubkey)
             u = MDMFVerifierURI(storage_index, fingerprint)
             verify_cap = u.to_string()
-            print >>out, "  verify-cap:", quote_output(verify_cap, quotemarks=False)
+            print("  verify-cap:", quote_output(verify_cap, quotemarks=False), file=out)
 
     if options['offsets']:
         # NOTE: this offset-calculation code is fragile, and needs to be
         # merged with MutableShareFile's internals.
 
-        print >>out
-        print >>out, " Section Offsets:"
+        print(file=out)
+        print(" Section Offsets:", file=out)
         def printoffset(name, value, shift=0):
-            print >>out, "%s%.20s: %s   (0x%x)" % (" "*shift, name, value, value)
+            print("%s%.20s: %s   (0x%x)" % (" "*shift, name, value, value), file=out)
         printoffset("first lease", m.HEADER_SIZE, 2)
         printoffset("share data", m.DATA_OFFSET, 2)
         o_seqnum = m.DATA_OFFSET + struct.calcsize(">B")
@@ -402,7 +404,7 @@ def dump_MDMF_share(m, length, options):
         printoffset("extra leases", m._read_extra_lease_offset(f) + 4, 2)
         f.close()
 
-    print >>out
+    print(file=out)
 
 
 
@@ -468,7 +470,7 @@ def dump_cap(options):
 
     u = uri.from_string(cap)
 
-    print >>out
+    print(file=out)
     dump_uri_instance(u, nodeid, secret, out)
 
 def _dump_secrets(storage_index, secret, nodeid, out):
@@ -477,19 +479,19 @@ def _dump_secrets(storage_index, secret, nodeid, out):
 
     if secret:
         crs = hashutil.my_renewal_secret_hash(secret)
-        print >>out, " client renewal secret:", base32.b2a(crs)
+        print(" client renewal secret:", base32.b2a(crs), file=out)
         frs = hashutil.file_renewal_secret_hash(crs, storage_index)
-        print >>out, " file renewal secret:", base32.b2a(frs)
+        print(" file renewal secret:", base32.b2a(frs), file=out)
         if nodeid:
             renew = hashutil.bucket_renewal_secret_hash(frs, nodeid)
-            print >>out, " lease renewal secret:", base32.b2a(renew)
+            print(" lease renewal secret:", base32.b2a(renew), file=out)
         ccs = hashutil.my_cancel_secret_hash(secret)
-        print >>out, " client cancel secret:", base32.b2a(ccs)
+        print(" client cancel secret:", base32.b2a(ccs), file=out)
         fcs = hashutil.file_cancel_secret_hash(ccs, storage_index)
-        print >>out, " file cancel secret:", base32.b2a(fcs)
+        print(" file cancel secret:", base32.b2a(fcs), file=out)
         if nodeid:
             cancel = hashutil.bucket_cancel_secret_hash(fcs, nodeid)
-            print >>out, " lease cancel secret:", base32.b2a(cancel)
+            print(" lease cancel secret:", base32.b2a(cancel), file=out)
 
 def dump_uri_instance(u, nodeid, secret, out, show_header=True):
     from allmydata import uri
@@ -499,114 +501,114 @@ def dump_uri_instance(u, nodeid, secret, out, show_header=True):
 
     if isinstance(u, uri.CHKFileURI):
         if show_header:
-            print >>out, "CHK File:"
-        print >>out, " key:", base32.b2a(u.key)
-        print >>out, " UEB hash:", base32.b2a(u.uri_extension_hash)
-        print >>out, " size:", u.size
-        print >>out, " k/N: %d/%d" % (u.needed_shares, u.total_shares)
-        print >>out, " storage index:", si_b2a(u.get_storage_index())
+            print("CHK File:", file=out)
+        print(" key:", base32.b2a(u.key), file=out)
+        print(" UEB hash:", base32.b2a(u.uri_extension_hash), file=out)
+        print(" size:", u.size, file=out)
+        print(" k/N: %d/%d" % (u.needed_shares, u.total_shares), file=out)
+        print(" storage index:", si_b2a(u.get_storage_index()), file=out)
         _dump_secrets(u.get_storage_index(), secret, nodeid, out)
     elif isinstance(u, uri.CHKFileVerifierURI):
         if show_header:
-            print >>out, "CHK Verifier URI:"
-        print >>out, " UEB hash:", base32.b2a(u.uri_extension_hash)
-        print >>out, " size:", u.size
-        print >>out, " k/N: %d/%d" % (u.needed_shares, u.total_shares)
-        print >>out, " storage index:", si_b2a(u.get_storage_index())
+            print("CHK Verifier URI:", file=out)
+        print(" UEB hash:", base32.b2a(u.uri_extension_hash), file=out)
+        print(" size:", u.size, file=out)
+        print(" k/N: %d/%d" % (u.needed_shares, u.total_shares), file=out)
+        print(" storage index:", si_b2a(u.get_storage_index()), file=out)
 
     elif isinstance(u, uri.LiteralFileURI):
         if show_header:
-            print >>out, "Literal File URI:"
-        print >>out, " data:", quote_output(u.data)
+            print("Literal File URI:", file=out)
+        print(" data:", quote_output(u.data), file=out)
 
     elif isinstance(u, uri.WriteableSSKFileURI): # SDMF
         if show_header:
-            print >>out, "SDMF Writeable URI:"
-        print >>out, " writekey:", base32.b2a(u.writekey)
-        print >>out, " readkey:", base32.b2a(u.readkey)
-        print >>out, " storage index:", si_b2a(u.get_storage_index())
-        print >>out, " fingerprint:", base32.b2a(u.fingerprint)
-        print >>out
+            print("SDMF Writeable URI:", file=out)
+        print(" writekey:", base32.b2a(u.writekey), file=out)
+        print(" readkey:", base32.b2a(u.readkey), file=out)
+        print(" storage index:", si_b2a(u.get_storage_index()), file=out)
+        print(" fingerprint:", base32.b2a(u.fingerprint), file=out)
+        print(file=out)
         if nodeid:
             we = hashutil.ssk_write_enabler_hash(u.writekey, nodeid)
-            print >>out, " write_enabler:", base32.b2a(we)
-            print >>out
+            print(" write_enabler:", base32.b2a(we), file=out)
+            print(file=out)
         _dump_secrets(u.get_storage_index(), secret, nodeid, out)
     elif isinstance(u, uri.ReadonlySSKFileURI):
         if show_header:
-            print >>out, "SDMF Read-only URI:"
-        print >>out, " readkey:", base32.b2a(u.readkey)
-        print >>out, " storage index:", si_b2a(u.get_storage_index())
-        print >>out, " fingerprint:", base32.b2a(u.fingerprint)
+            print("SDMF Read-only URI:", file=out)
+        print(" readkey:", base32.b2a(u.readkey), file=out)
+        print(" storage index:", si_b2a(u.get_storage_index()), file=out)
+        print(" fingerprint:", base32.b2a(u.fingerprint), file=out)
     elif isinstance(u, uri.SSKVerifierURI):
         if show_header:
-            print >>out, "SDMF Verifier URI:"
-        print >>out, " storage index:", si_b2a(u.get_storage_index())
-        print >>out, " fingerprint:", base32.b2a(u.fingerprint)
+            print("SDMF Verifier URI:", file=out)
+        print(" storage index:", si_b2a(u.get_storage_index()), file=out)
+        print(" fingerprint:", base32.b2a(u.fingerprint), file=out)
 
     elif isinstance(u, uri.WriteableMDMFFileURI): # MDMF
         if show_header:
-            print >>out, "MDMF Writeable URI:"
-        print >>out, " writekey:", base32.b2a(u.writekey)
-        print >>out, " readkey:", base32.b2a(u.readkey)
-        print >>out, " storage index:", si_b2a(u.get_storage_index())
-        print >>out, " fingerprint:", base32.b2a(u.fingerprint)
-        print >>out
+            print("MDMF Writeable URI:", file=out)
+        print(" writekey:", base32.b2a(u.writekey), file=out)
+        print(" readkey:", base32.b2a(u.readkey), file=out)
+        print(" storage index:", si_b2a(u.get_storage_index()), file=out)
+        print(" fingerprint:", base32.b2a(u.fingerprint), file=out)
+        print(file=out)
         if nodeid:
             we = hashutil.ssk_write_enabler_hash(u.writekey, nodeid)
-            print >>out, " write_enabler:", base32.b2a(we)
-            print >>out
+            print(" write_enabler:", base32.b2a(we), file=out)
+            print(file=out)
         _dump_secrets(u.get_storage_index(), secret, nodeid, out)
     elif isinstance(u, uri.ReadonlyMDMFFileURI):
         if show_header:
-            print >>out, "MDMF Read-only URI:"
-        print >>out, " readkey:", base32.b2a(u.readkey)
-        print >>out, " storage index:", si_b2a(u.get_storage_index())
-        print >>out, " fingerprint:", base32.b2a(u.fingerprint)
+            print("MDMF Read-only URI:", file=out)
+        print(" readkey:", base32.b2a(u.readkey), file=out)
+        print(" storage index:", si_b2a(u.get_storage_index()), file=out)
+        print(" fingerprint:", base32.b2a(u.fingerprint), file=out)
     elif isinstance(u, uri.MDMFVerifierURI):
         if show_header:
-            print >>out, "MDMF Verifier URI:"
-        print >>out, " storage index:", si_b2a(u.get_storage_index())
-        print >>out, " fingerprint:", base32.b2a(u.fingerprint)
+            print("MDMF Verifier URI:", file=out)
+        print(" storage index:", si_b2a(u.get_storage_index()), file=out)
+        print(" fingerprint:", base32.b2a(u.fingerprint), file=out)
 
 
     elif isinstance(u, uri.ImmutableDirectoryURI): # CHK-based directory
         if show_header:
-            print >>out, "CHK Directory URI:"
+            print("CHK Directory URI:", file=out)
         dump_uri_instance(u._filenode_uri, nodeid, secret, out, False)
     elif isinstance(u, uri.ImmutableDirectoryURIVerifier):
         if show_header:
-            print >>out, "CHK Directory Verifier URI:"
+            print("CHK Directory Verifier URI:", file=out)
         dump_uri_instance(u._filenode_uri, nodeid, secret, out, False)
 
     elif isinstance(u, uri.DirectoryURI): # SDMF-based directory
         if show_header:
-            print >>out, "Directory Writeable URI:"
+            print("Directory Writeable URI:", file=out)
         dump_uri_instance(u._filenode_uri, nodeid, secret, out, False)
     elif isinstance(u, uri.ReadonlyDirectoryURI):
         if show_header:
-            print >>out, "Directory Read-only URI:"
+            print("Directory Read-only URI:", file=out)
         dump_uri_instance(u._filenode_uri, nodeid, secret, out, False)
     elif isinstance(u, uri.DirectoryURIVerifier):
         if show_header:
-            print >>out, "Directory Verifier URI:"
+            print("Directory Verifier URI:", file=out)
         dump_uri_instance(u._filenode_uri, nodeid, secret, out, False)
 
     elif isinstance(u, uri.MDMFDirectoryURI): # MDMF-based directory
         if show_header:
-            print >>out, "Directory Writeable URI:"
+            print("Directory Writeable URI:", file=out)
         dump_uri_instance(u._filenode_uri, nodeid, secret, out, False)
     elif isinstance(u, uri.ReadonlyMDMFDirectoryURI):
         if show_header:
-            print >>out, "Directory Read-only URI:"
+            print("Directory Read-only URI:", file=out)
         dump_uri_instance(u._filenode_uri, nodeid, secret, out, False)
     elif isinstance(u, uri.MDMFDirectoryURIVerifier):
         if show_header:
-            print >>out, "Directory Verifier URI:"
+            print("Directory Verifier URI:", file=out)
         dump_uri_instance(u._filenode_uri, nodeid, secret, out, False)
 
     else:
-        print >>out, "unknown cap type"
+        print("unknown cap type", file=out)
 
 class FindSharesOptions(BaseOptions):
     def getSynopsis(self):
@@ -653,7 +655,7 @@ def find_shares(options):
         d = os.path.join(d, "storage/shares", sharedir)
         if os.path.exists(d):
             for shnum in listdir_unicode(d):
-                print >>out, os.path.join(d, shnum)
+                print(os.path.join(d, shnum), file=out)
 
     return 0
 
@@ -742,7 +744,7 @@ def describe_share(abs_sharefile, si_s, shnum_s, now, out):
 
             try:
                 pieces = unpack_share(data)
-            except NeedMoreDataError, e:
+            except NeedMoreDataError as e:
                 # retry once with the larger size
                 size = e.needed_bytes
                 f.seek(m.DATA_OFFSET)
@@ -752,10 +754,10 @@ def describe_share(abs_sharefile, si_s, shnum_s, now, out):
              pubkey, signature, share_hash_chain, block_hash_tree,
              share_data, enc_privkey) = pieces
 
-            print >>out, "SDMF %s %d/%d %d #%d:%s %d %s" % \
+            print("SDMF %s %d/%d %d #%d:%s %d %s" % \
                   (si_s, k, N, datalen,
                    seqnum, base32.b2a(root_hash),
-                   expiration, quote_output(abs_sharefile))
+                   expiration, quote_output(abs_sharefile)), file=out)
         elif share_type == "MDMF":
             from allmydata.mutable.layout import MDMFSlotReadProxy
             fake_shnum = 0
@@ -781,12 +783,12 @@ def describe_share(abs_sharefile, si_s, shnum_s, now, out):
             verinfo = extract(p.get_verinfo)
             (seqnum, root_hash, salt_to_use, segsize, datalen, k, N, prefix,
              offsets) = verinfo
-            print >>out, "MDMF %s %d/%d %d #%d:%s %d %s" % \
+            print("MDMF %s %d/%d %d #%d:%s %d %s" % \
                   (si_s, k, N, datalen,
                    seqnum, base32.b2a(root_hash),
-                   expiration, quote_output(abs_sharefile))
+                   expiration, quote_output(abs_sharefile)), file=out)
         else:
-            print >>out, "UNKNOWN mutable %s" % quote_output(abs_sharefile)
+            print("UNKNOWN mutable %s" % quote_output(abs_sharefile), file=out)
 
     elif struct.unpack(">L", prefix[:4]) == (1,):
         # immutable
@@ -816,12 +818,12 @@ def describe_share(abs_sharefile, si_s, shnum_s, now, out):
         filesize = unpacked["size"]
         ueb_hash = unpacked["UEB_hash"]
 
-        print >>out, "CHK %s %d/%d %d %s %d %s" % (si_s, k, N, filesize,
+        print("CHK %s %d/%d %d %s %d %s" % (si_s, k, N, filesize,
                                                    ueb_hash, expiration,
-                                                   quote_output(abs_sharefile))
+                                                   quote_output(abs_sharefile)), file=out)
 
     else:
-        print >>out, "UNKNOWN really-unknown %s" % quote_output(abs_sharefile)
+        print("UNKNOWN really-unknown %s" % quote_output(abs_sharefile), file=out)
 
     f.close()
 
@@ -852,7 +854,7 @@ def catalog_shares(options):
                         si_dir = os.path.join(abbrevdir, si_s)
                         catalog_shares_one_abbrevdir(si_s, si_dir, now, out,err)
                 except:
-                    print >>err, "Error processing %s" % quote_output(abbrevdir)
+                    print("Error processing %s" % quote_output(abbrevdir), file=err)
                     failure.Failure().printTraceback(err)
 
     return 0
@@ -874,10 +876,10 @@ def catalog_shares_one_abbrevdir(si_s, si_dir, now, out, err):
                 describe_share(abs_sharefile, si_s, shnum_s, now,
                                out)
             except:
-                print >>err, "Error processing %s" % quote_output(abs_sharefile)
+                print("Error processing %s" % quote_output(abs_sharefile), file=err)
                 failure.Failure().printTraceback(err)
     except:
-        print >>err, "Error processing %s" % quote_output(si_dir)
+        print("Error processing %s" % quote_output(si_dir), file=err)
         failure.Failure().printTraceback(err)
 
 class CorruptShareOptions(BaseOptions):
@@ -920,7 +922,7 @@ def corrupt_share(options):
     def flip_bit(start, end):
         offset = random.randrange(start, end)
         bit = random.randrange(0, 8)
-        print >>out, "[%d..%d):  %d.b%d" % (start, end, offset, bit)
+        print("[%d..%d):  %d.b%d" % (start, end, offset, bit), file=out)
         f = open(fn, "rb+")
         f.seek(offset)
         d = f.read(1)
@@ -1000,7 +1002,8 @@ def trial(config):
     twisted_trial.run()
 
 
-def fixOptionsClass( (subcmd, shortcut, OptionsClass, desc) ):
+def fixOptionsClass(xxx_todo_changeme ):
+    (subcmd, shortcut, OptionsClass, desc) = xxx_todo_changeme
     class FixedOptionsClass(OptionsClass):
         def getSynopsis(self):
             t = OptionsClass.getSynopsis(self)
@@ -1035,7 +1038,7 @@ subcommand.
         return t
 
     def opt_help(self):
-        print str(self)
+        print(str(self))
         sys.exit(0)
 
 def flogtool(config):
index 0982b0d80b858d458a18df9e0328295be75fb305..616588465d46781e1ddc3189acb5da94f1442ba2 100644 (file)
@@ -1,3 +1,4 @@
+from __future__ import print_function
 
 import os, sys
 from allmydata.scripts.common import BasedirOptions
@@ -36,9 +37,9 @@ def create_key_generator(config, out=sys.stdout, err=sys.stderr):
 
     if os.path.exists(basedir):
         if listdir_unicode(basedir):
-            print >>err, "The base directory %s is not empty." % quote_output(basedir)
-            print >>err, "To avoid clobbering anything, I am going to quit now."
-            print >>err, "Please use a different directory, or empty this one."
+            print("The base directory %s is not empty." % quote_output(basedir), file=err)
+            print("To avoid clobbering anything, I am going to quit now.", file=err)
+            print("Please use a different directory, or empty this one.", file=err)
             return -1
         # we're willing to use an empty directory
     else:
index 085967079d7842102a2ca3151ab22af3455549a9..b013a3e8e1163ab21f6443a4f7dd72e87b5a52b6 100644 (file)
@@ -1,3 +1,4 @@
+from __future__ import print_function
 
 import os, sys
 from cStringIO import StringIO
@@ -58,12 +59,12 @@ class Options(usage.Options):
 
     def opt_version(self):
         import allmydata
-        print >>self.stdout, allmydata.get_package_versions_string(debug=True)
+        print(allmydata.get_package_versions_string(debug=True), file=self.stdout)
         self.no_command_needed = True
 
     def opt_version_and_path(self):
         import allmydata
-        print >>self.stdout, allmydata.get_package_versions_string(show_paths=True, debug=True)
+        print(allmydata.get_package_versions_string(show_paths=True, debug=True), file=self.stdout)
         self.no_command_needed = True
 
     def getSynopsis(self):
@@ -105,18 +106,18 @@ def runner(argv,
 
     try:
         config.parseOptions(argv)
-    except usage.error, e:
+    except usage.error as e:
         if not run_by_human:
             raise
         c = config
         while hasattr(c, 'subOptions'):
             c = c.subOptions
-        print >>stdout, str(c)
+        print(str(c), file=stdout)
         try:
             msg = e.args[0].decode(get_io_encoding())
         except Exception:
             msg = repr(e)
-        print >>stdout, "%s:  %s\n" % (sys.argv[0], quote_output(msg, quotemarks=False))
+        print("%s:  %s\n" % (sys.argv[0], quote_output(msg, quotemarks=False)), file=stdout)
         return 1
 
     command = config.subCommand
index 3da511f9ae5e01ff38a88293dd24db2a66c17282..82e3b03e7b27249a86b1a4dc5da10b10c6884961 100644 (file)
@@ -1,3 +1,4 @@
+from __future__ import print_function
 
 import os, time
 from allmydata.scripts.common import get_alias, DEFAULT_ALIAS, escape_path, \
@@ -21,7 +22,7 @@ class SlowOperationRunner:
         where = options.where
         try:
             rootcap, path = get_alias(options.aliases, where, DEFAULT_ALIAS)
-        except UnknownAliasError, e:
+        except UnknownAliasError as e:
             e.display(stderr)
             return 1
         if path == '/':
@@ -33,7 +34,7 @@ class SlowOperationRunner:
         url = self.make_url(url, ophandle)
         resp = do_http("POST", url)
         if resp.status not in (200, 302):
-            print >>stderr, format_http_error("ERROR", resp)
+            print(format_http_error("ERROR", resp), file=stderr)
             return 1
         # now we poll for results. We nominally poll at t=1, 5, 10, 30, 60,
         # 90, k*120 seconds, but if the poll takes non-zero time, that will
@@ -66,7 +67,7 @@ class SlowOperationRunner:
         stderr = self.options.stderr
         resp = do_http("GET", url)
         if resp.status != 200:
-            print >>stderr, format_http_error("ERROR", resp)
+            print(format_http_error("ERROR", resp), file=stderr)
             return True
         jdata = resp.read()
         data = simplejson.loads(jdata)
@@ -74,9 +75,9 @@ class SlowOperationRunner:
             return False
         if self.options.get("raw"):
             if is_printable_ascii(jdata):
-                print >>stdout, jdata
+                print(jdata, file=stdout)
             else:
-                print >>stderr, "The JSON response contained unprintable characters:\n%s" % quote_output(jdata)
+                print("The JSON response contained unprintable characters:\n%s" % quote_output(jdata), file=stderr)
             return True
         self.write_results(data)
         return True
index 9ecbf06924f739b7a8491face29352dd874a1491..f82bc567fb43ae709ca89475abe425de5ac84e20 100644 (file)
@@ -1,3 +1,4 @@
+from __future__ import print_function
 
 import os, sys, signal, time
 from allmydata.scripts.common import BasedirOptions
@@ -40,16 +41,16 @@ class RunOptions(BasedirOptions):
 
 def start(opts, out=sys.stdout, err=sys.stderr):
     basedir = opts['basedir']
-    print >>out, "STARTING", quote_output(basedir)
+    print("STARTING", quote_output(basedir), file=out)
     if not os.path.isdir(basedir):
-        print >>err, "%s does not look like a directory at all" % quote_output(basedir)
+        print("%s does not look like a directory at all" % quote_output(basedir), file=err)
         return 1
     for fn in listdir_unicode(basedir):
         if fn.endswith(u".tac"):
             tac = str(fn)
             break
     else:
-        print >>err, "%s does not look like a node directory (no .tac file)" % quote_output(basedir)
+        print("%s does not look like a node directory (no .tac file)" % quote_output(basedir), file=err)
         return 1
     if "client" in tac:
         nodetype = "client"
@@ -77,10 +78,10 @@ def start(opts, out=sys.stdout, err=sys.stderr):
 
 def stop(config, out=sys.stdout, err=sys.stderr):
     basedir = config['basedir']
-    print >>out, "STOPPING", quote_output(basedir)
+    print("STOPPING", quote_output(basedir), file=out)
     pidfile = os.path.join(basedir, "twistd.pid")
     if not os.path.exists(pidfile):
-        print >>err, "%s does not look like a running node directory (no twistd.pid)" % quote_output(basedir)
+        print("%s does not look like a running node directory (no twistd.pid)" % quote_output(basedir), file=err)
         # we define rc=2 to mean "nothing is running, but it wasn't me who
         # stopped it"
         return 2
@@ -92,9 +93,9 @@ def stop(config, out=sys.stdout, err=sys.stderr):
     # the user but keep waiting until they give up.
     try:
         os.kill(pid, signal.SIGKILL)
-    except OSError, oserr:
+    except OSError as oserr:
         if oserr.errno == 3:
-            print oserr.strerror
+            print(oserr.strerror)
             # the process didn't exist, so wipe the pid file
             os.remove(pidfile)
             return 2
@@ -113,20 +114,20 @@ def stop(config, out=sys.stdout, err=sys.stderr):
         try:
             os.kill(pid, 0)
         except OSError:
-            print >>out, "process %d is dead" % pid
+            print("process %d is dead" % pid, file=out)
             return
         wait -= 1
         if wait < 0:
             if first_time:
-                print >>err, ("It looks like pid %d is still running "
+                print(("It looks like pid %d is still running "
                               "after %d seconds" % (pid,
-                                                    (time.time() - start)))
-                print >>err, "I will keep watching it until you interrupt me."
+                                                    (time.time() - start))), file=err)
+                print("I will keep watching it until you interrupt me.", file=err)
                 wait = 10
                 first_time = False
             else:
-                print >>err, "pid %d still running after %d seconds" % \
-                      (pid, (time.time() - start))
+                print("pid %d still running after %d seconds" % \
+                      (pid, (time.time() - start)), file=err)
                 wait = 10
         time.sleep(1)
     # we define rc=1 to mean "I think something is still running, sorry"
@@ -135,10 +136,10 @@ def stop(config, out=sys.stdout, err=sys.stderr):
 def restart(config, stdout, stderr):
     rc = stop(config, stdout, stderr)
     if rc == 2:
-        print >>stderr, "ignoring couldn't-stop"
+        print("ignoring couldn't-stop", file=stderr)
         rc = 0
     if rc:
-        print >>stderr, "not restarting"
+        print("not restarting", file=stderr)
         return rc
     return start(config, stdout, stderr)
 
@@ -151,19 +152,19 @@ def run(config, stdout, stderr):
     precondition(isinstance(basedir, unicode), basedir)
 
     if not os.path.isdir(basedir):
-        print >>stderr, "%s does not look like a directory at all" % quote_output(basedir)
+        print("%s does not look like a directory at all" % quote_output(basedir), file=stderr)
         return 1
     for fn in listdir_unicode(basedir):
         if fn.endswith(u".tac"):
             tac = str(fn)
             break
     else:
-        print >>stderr, "%s does not look like a node directory (no .tac file)" % quote_output(basedir)
+        print("%s does not look like a node directory (no .tac file)" % quote_output(basedir), file=stderr)
         return 1
     if "client" not in tac:
-        print >>stderr, ("%s looks like it contains a non-client node (%s).\n"
+        print(("%s looks like it contains a non-client node (%s).\n"
                          "Use 'tahoe start' instead of 'tahoe run'."
-                         % (quote_output(basedir), tac))
+                         % (quote_output(basedir), tac)), file=stderr)
         return 1
 
     os.chdir(basedir)
index 7762b2c50236a4e535bfea09fca89218568b2845..79fd307ab0c8f13577db90956e3a374430a0af6d 100644 (file)
@@ -1,3 +1,4 @@
+from __future__ import print_function
 
 import os, sys
 from allmydata.scripts.common import BasedirOptions
@@ -32,9 +33,9 @@ def create_stats_gatherer(config, out=sys.stdout, err=sys.stderr):
 
     if os.path.exists(basedir):
         if listdir_unicode(basedir):
-            print >>err, "The base directory %s is not empty." % quote_output(basedir)
-            print >>err, "To avoid clobbering anything, I am going to quit now."
-            print >>err, "Please use a different directory, or empty this one."
+            print("The base directory %s is not empty." % quote_output(basedir), file=err)
+            print("To avoid clobbering anything, I am going to quit now.", file=err)
+            print("Please use a different directory, or empty this one.", file=err)
             return -1
         # we're willing to use an empty directory
     else:
index f3ed15c4ad49ba617a2f0876032ab6af1241ce58..2e2d0af0195226a53cb0eed8cbec11a024e8811b 100644 (file)
@@ -1,3 +1,4 @@
+from __future__ import print_function
 
 import os.path
 import codecs
@@ -34,22 +35,22 @@ def add_alias(options):
     stderr = options.stderr
     if u":" in alias:
         # a single trailing colon will already have been stripped if present
-        print >>stderr, "Alias names cannot contain colons."
+        print("Alias names cannot contain colons.", file=stderr)
         return 1
     if u" " in alias:
-        print >>stderr, "Alias names cannot contain spaces."
+        print("Alias names cannot contain spaces.", file=stderr)
         return 1
 
     old_aliases = get_aliases(nodedir)
     if alias in old_aliases:
-        print >>stderr, "Alias %s already exists!" % quote_output(alias)
+        print("Alias %s already exists!" % quote_output(alias), file=stderr)
         return 1
     aliasfile = os.path.join(nodedir, "private", "aliases")
     cap = uri.from_string_dirnode(cap).to_string()
 
     add_line_to_aliasfile(aliasfile, alias, cap)
 
-    print >>stdout, "Alias %s added" % quote_output(alias)
+    print("Alias %s added" % quote_output(alias), file=stdout)
     return 0
 
 def create_alias(options):
@@ -60,15 +61,15 @@ def create_alias(options):
     stderr = options.stderr
     if u":" in alias:
         # a single trailing colon will already have been stripped if present
-        print >>stderr, "Alias names cannot contain colons."
+        print("Alias names cannot contain colons.", file=stderr)
         return 1
     if u" " in alias:
-        print >>stderr, "Alias names cannot contain spaces."
+        print("Alias names cannot contain spaces.", file=stderr)
         return 1
 
     old_aliases = get_aliases(nodedir)
     if alias in old_aliases:
-        print >>stderr, "Alias %s already exists!" % quote_output(alias)
+        print("Alias %s already exists!" % quote_output(alias), file=stderr)
         return 1
 
     aliasfile = os.path.join(nodedir, "private", "aliases")
@@ -87,7 +88,7 @@ def create_alias(options):
 
     add_line_to_aliasfile(aliasfile, alias, new_uri)
 
-    print >>stdout, "Alias %s created" % (quote_output(alias),)
+    print("Alias %s created" % (quote_output(alias),), file=stdout)
     return 0
 
 def list_aliases(options):
@@ -101,12 +102,12 @@ def list_aliases(options):
     rc = 0
     for name in alias_names:
         try:
-            print >>stdout, fmt % (unicode_to_output(name), unicode_to_output(aliases[name].decode('utf-8')))
+            print(fmt % (unicode_to_output(name), unicode_to_output(aliases[name].decode('utf-8'))), file=stdout)
         except (UnicodeEncodeError, UnicodeDecodeError):
-            print >>stderr, fmt % (quote_output(name), quote_output(aliases[name]))
+            print(fmt % (quote_output(name), quote_output(aliases[name])), file=stderr)
             rc = 1
 
     if rc == 1:
-        print >>stderr, "\nThis listing included aliases or caps that could not be converted to the terminal" \
-                        "\noutput encoding. These are shown using backslash escapes and in quotes."
+        print("\nThis listing included aliases or caps that could not be converted to the terminal" \
+                        "\noutput encoding. These are shown using backslash escapes and in quotes.", file=stderr)
     return rc
index ef3da34b4fb543dbf9d7626506b931284d8770a4..4d3b929a1ab3a946d1d1273ad9290d5e5e41b6ef 100644 (file)
@@ -1,3 +1,4 @@
+from __future__ import print_function
 
 import os.path
 import time
@@ -89,12 +90,12 @@ class BackerUpper:
         bdbfile = abspath_expanduser_unicode(bdbfile)
         self.backupdb = backupdb.get_backupdb(bdbfile, stderr)
         if not self.backupdb:
-            print >>stderr, "ERROR: Unable to load backup db."
+            print("ERROR: Unable to load backup db.", file=stderr)
             return 1
 
         try:
             rootcap, path = get_alias(options.aliases, options.to_dir, DEFAULT_ALIAS)
-        except UnknownAliasError, e:
+        except UnknownAliasError as e:
             e.display(stderr)
             return 1
         to_url = nodeurl + "uri/%s/" % urllib.quote(rootcap)
@@ -111,7 +112,7 @@ class BackerUpper:
         if resp.status == 404:
             resp = do_http("POST", archives_url + "?t=mkdir")
             if resp.status != 200:
-                print >>stderr, format_http_error("Unable to create target directory", resp)
+                print(format_http_error("Unable to create target directory", resp), file=stderr)
                 return 1
 
         # second step: process the tree
@@ -127,7 +128,7 @@ class BackerUpper:
         elapsed_time = str(end_timestamp - start_timestamp).split('.')[0]
 
         if self.verbosity >= 1:
-            print >>stdout, (" %d files uploaded (%d reused), "
+            print((" %d files uploaded (%d reused), "
                              "%d files skipped, "
                              "%d directories created (%d reused), "
                              "%d directories skipped"
@@ -136,12 +137,12 @@ class BackerUpper:
                                 self.files_skipped,
                                 self.directories_created,
                                 self.directories_reused,
-                                self.directories_skipped))
+                                self.directories_skipped)), file=stdout)
             if self.verbosity >= 2:
-                print >>stdout, (" %d files checked, %d directories checked"
+                print((" %d files checked, %d directories checked"
                                  % (self.files_checked,
-                                    self.directories_checked))
-            print >>stdout, " backup done, elapsed time: %s" % elapsed_time
+                                    self.directories_checked)), file=stdout)
+            print(" backup done, elapsed time: %s" % elapsed_time, file=stdout)
 
         # The command exits with code 2 if files or directories were skipped
         if self.files_skipped or self.directories_skipped:
@@ -153,11 +154,11 @@ class BackerUpper:
     def verboseprint(self, msg):
         precondition(isinstance(msg, str), msg)
         if self.verbosity >= 2:
-            print >>self.options.stdout, msg
+            print(msg, file=self.options.stdout)
 
     def warn(self, msg):
         precondition(isinstance(msg, str), msg)
-        print >>self.options.stderr, msg
+        print(msg, file=self.options.stderr)
 
     def process(self, localpath):
         precondition(isinstance(localpath, unicode), localpath)
index 4ac669215b24e1b58824a9c1874b3d5c6c93ef0e..47528485c0a599f51b516716a5011cc9cc377526 100644 (file)
@@ -1,3 +1,4 @@
+from __future__ import print_function
 
 import urllib
 import simplejson
@@ -23,7 +24,7 @@ def check_location(options, where):
         nodeurl += "/"
     try:
         rootcap, path = get_alias(options.aliases, where, DEFAULT_ALIAS)
-    except UnknownAliasError, e:
+    except UnknownAliasError as e:
         e.display(stderr)
         return 1
     if path == '/':
@@ -42,7 +43,7 @@ def check_location(options, where):
 
     resp = do_http("POST", url)
     if resp.status != 200:
-        print >>stderr, format_http_error("ERROR", resp)
+        print(format_http_error("ERROR", resp), file=stderr)
         return 1
     jdata = resp.read()
     if options.get("raw"):
@@ -129,12 +130,12 @@ class DeepCheckOutput(LineOnlyReceiver):
 
     def lineReceived(self, line):
         if self.in_error:
-            print >>self.stderr, quote_output(line, quotemarks=False)
+            print(quote_output(line, quotemarks=False), file=self.stderr)
             return
         if line.startswith("ERROR:"):
             self.in_error = True
             self.streamer.rc = 1
-            print >>self.stderr, quote_output(line, quotemarks=False)
+            print(quote_output(line, quotemarks=False), file=self.stderr)
             return
 
         d = simplejson.loads(line)
@@ -144,7 +145,7 @@ class DeepCheckOutput(LineOnlyReceiver):
         self.num_objects += 1
         # non-verbose means print a progress marker every 100 files
         if self.num_objects % 100 == 0:
-            print >>stdout, "%d objects checked.." % self.num_objects
+            print("%d objects checked.." % self.num_objects, file=stdout)
         cr = d["check-results"]
         if cr["results"]["healthy"]:
             self.files_healthy += 1
@@ -158,19 +159,19 @@ class DeepCheckOutput(LineOnlyReceiver):
 
             # LIT files and directories do not have a "summary" field.
             summary = cr.get("summary", "Healthy (LIT)")
-            print >>stdout, "%s: %s" % (quote_path(path), quote_output(summary, quotemarks=False))
+            print("%s: %s" % (quote_path(path), quote_output(summary, quotemarks=False)), file=stdout)
 
         # always print out corrupt shares
         for shareloc in cr["results"].get("list-corrupt-shares", []):
             (serverid, storage_index, sharenum) = shareloc
-            print >>stdout, " corrupt: %s" % _quote_serverid_index_share(serverid, storage_index, sharenum)
+            print(" corrupt: %s" % _quote_serverid_index_share(serverid, storage_index, sharenum), file=stdout)
 
     def done(self):
         if self.in_error:
             return
         stdout = self.stdout
-        print >>stdout, "done: %d objects checked, %d healthy, %d unhealthy" \
-              % (self.num_objects, self.files_healthy, self.files_unhealthy)
+        print("done: %d objects checked, %d healthy, %d unhealthy" \
+              % (self.num_objects, self.files_healthy, self.files_unhealthy), file=stdout)
 
 class DeepCheckAndRepairOutput(LineOnlyReceiver):
     delimiter = "\n"
@@ -192,12 +193,12 @@ class DeepCheckAndRepairOutput(LineOnlyReceiver):
 
     def lineReceived(self, line):
         if self.in_error:
-            print >>self.stderr, quote_output(line, quotemarks=False)
+            print(quote_output(line, quotemarks=False), file=self.stderr)
             return
         if line.startswith("ERROR:"):
             self.in_error = True
             self.streamer.rc = 1
-            print >>self.stderr, quote_output(line, quotemarks=False)
+            print(quote_output(line, quotemarks=False), file=self.stderr)
             return
 
         d = simplejson.loads(line)
@@ -207,7 +208,7 @@ class DeepCheckAndRepairOutput(LineOnlyReceiver):
         self.num_objects += 1
         # non-verbose means print a progress marker every 100 files
         if self.num_objects % 100 == 0:
-            print >>stdout, "%d objects checked.." % self.num_objects
+            print("%d objects checked.." % self.num_objects, file=stdout)
         crr = d["check-and-repair-results"]
         if d["storage-index"]:
             if crr["pre-repair-results"]["results"]["healthy"]:
@@ -239,36 +240,36 @@ class DeepCheckAndRepairOutput(LineOnlyReceiver):
                 summary = "healthy"
             else:
                 summary = "not healthy"
-            print >>stdout, "%s: %s" % (quote_path(path), summary)
+            print("%s: %s" % (quote_path(path), summary), file=stdout)
 
         # always print out corrupt shares
         prr = crr.get("pre-repair-results", {})
         for shareloc in prr.get("results", {}).get("list-corrupt-shares", []):
             (serverid, storage_index, sharenum) = shareloc
-            print >>stdout, " corrupt: %s" % _quote_serverid_index_share(serverid, storage_index, sharenum)
+            print(" corrupt: %s" % _quote_serverid_index_share(serverid, storage_index, sharenum), file=stdout)
 
         # always print out repairs
         if crr["repair-attempted"]:
             if crr["repair-successful"]:
-                print >>stdout, " repair successful"
+                print(" repair successful", file=stdout)
             else:
-                print >>stdout, " repair failed"
+                print(" repair failed", file=stdout)
 
     def done(self):
         if self.in_error:
             return
         stdout = self.stdout
-        print >>stdout, "done: %d objects checked" % self.num_objects
-        print >>stdout, " pre-repair: %d healthy, %d unhealthy" \
+        print("done: %d objects checked" % self.num_objects, file=stdout)
+        print(" pre-repair: %d healthy, %d unhealthy" \
               % (self.pre_repair_files_healthy,
-                 self.pre_repair_files_unhealthy)
-        print >>stdout, " %d repairs attempted, %d successful, %d failed" \
+                 self.pre_repair_files_unhealthy), file=stdout)
+        print(" %d repairs attempted, %d successful, %d failed" \
               % (self.repairs_attempted,
                  self.repairs_successful,
-                 (self.repairs_attempted - self.repairs_successful))
-        print >>stdout, " post-repair: %d healthy, %d unhealthy" \
+                 (self.repairs_attempted - self.repairs_successful)), file=stdout)
+        print(" post-repair: %d healthy, %d unhealthy" \
               % (self.post_repair_files_healthy,
-                 self.post_repair_files_unhealthy)
+                 self.post_repair_files_unhealthy), file=stdout)
 
 class DeepCheckStreamer(LineOnlyReceiver):
 
@@ -284,7 +285,7 @@ class DeepCheckStreamer(LineOnlyReceiver):
 
         try:
             rootcap, path = get_alias(options.aliases, where, DEFAULT_ALIAS)
-        except UnknownAliasError, e:
+        except UnknownAliasError as e:
             e.display(stderr)
             return 1
         if path == '/':
@@ -305,7 +306,7 @@ class DeepCheckStreamer(LineOnlyReceiver):
             url += "&add-lease=true"
         resp = do_http("POST", url)
         if resp.status not in (200, 302):
-            print >>stderr, format_http_error("ERROR", resp)
+            print(format_http_error("ERROR", resp), file=stderr)
             return 1
 
         # use Twisted to split this into lines
index 1ad460d897403bec53ebc147dc1ecc8c95336b05..67b0d33c84ec530674322fb63498190774dbe37b 100644 (file)
@@ -1,3 +1,4 @@
+from __future__ import print_function
 
 import os.path
 import urllib
@@ -12,6 +13,7 @@ from allmydata.util import fileutil
 from allmydata.util.fileutil import abspath_expanduser_unicode
 from allmydata.util.encodingutil import unicode_to_url, listdir_unicode, quote_output, to_str
 from allmydata.util.assertutil import precondition
+import six
 
 
 class MissingSourceError(TahoeError):
@@ -221,7 +223,7 @@ class TahoeDirectorySource:
         self.mutable = d.get("mutable", False) # older nodes don't provide it
         self.children_d = dict( [(unicode(name),value)
                                  for (name,value)
-                                 in d["children"].iteritems()] )
+                                 in six.iteritems(d["children"])] )
         self.children = None
 
     def init_from_parsed(self, parsed):
@@ -231,7 +233,7 @@ class TahoeDirectorySource:
         self.mutable = d.get("mutable", False) # older nodes don't provide it
         self.children_d = dict( [(unicode(name),value)
                                  for (name,value)
-                                 in d["children"].iteritems()] )
+                                 in six.iteritems(d["children"])] )
         self.children = None
 
     def populate(self, recurse):
@@ -301,7 +303,7 @@ class TahoeDirectoryTarget:
         self.mutable = d.get("mutable", False) # older nodes don't provide it
         self.children_d = dict( [(unicode(name),value)
                                  for (name,value)
-                                 in d["children"].iteritems()] )
+                                 in six.iteritems(d["children"])] )
         self.children = None
 
     def init_from_grid(self, writecap, readcap):
@@ -318,7 +320,7 @@ class TahoeDirectoryTarget:
         self.mutable = d.get("mutable", False) # older nodes don't provide it
         self.children_d = dict( [(unicode(name),value)
                                  for (name,value)
-                                 in d["children"].iteritems()] )
+                                 in six.iteritems(d["children"])] )
         self.children = None
 
     def just_created(self, writecap):
@@ -443,17 +445,17 @@ class Copier:
         self.stderr = options.stderr
         if verbosity >= 2 and not self.progressfunc:
             def progress(message):
-                print >>self.stderr, message
+                print(message, file=self.stderr)
             self.progressfunc = progress
         self.caps_only = options["caps-only"]
         self.cache = {}
         try:
             status = self.try_copy()
             return status
-        except TahoeError, te:
+        except TahoeError as te:
             if verbosity >= 2:
                 Failure().printTraceback(self.stderr)
-                print >>self.stderr
+                print(file=self.stderr)
             te.display(self.stderr)
             return 1
 
@@ -515,7 +517,7 @@ class Copier:
         return 1
 
     def to_stderr(self, text):
-        print >>self.stderr, text
+        print(text, file=self.stderr)
 
     def get_target_info(self, destination_spec):
         rootcap, path = get_alias(self.aliases, destination_spec, None)
@@ -605,7 +607,7 @@ class Copier:
 
     def dump_graph(self, s, indent=" "):
         for name, child in s.children.items():
-            print "%s%s: %r" % (indent, quote_output(name), child)
+            print("%s%s: %r" % (indent, quote_output(name), child))
             if isinstance(child, (LocalDirectorySource, TahoeDirectorySource)):
                 self.dump_graph(child, indent+"  ")
 
@@ -717,7 +719,7 @@ class Copier:
 
     def announce_success(self, msg):
         if self.verbosity >= 1:
-            print >>self.stdout, "Success: %s" % msg
+            print("Success: %s" % msg, file=self.stdout)
         return 0
 
     def copy_file(self, source, target):
index 280d8c052abdd697d06e131c57551e79bbfe9dd6..860eed2f6e4c6303b78cd82189ca6cc386672ecd 100644 (file)
@@ -1,3 +1,4 @@
+from __future__ import print_function
 
 import os, urllib
 from allmydata.scripts.common import get_alias, DEFAULT_ALIAS, escape_path, \
@@ -16,7 +17,7 @@ def get(options):
         nodeurl += "/"
     try:
         rootcap, path = get_alias(aliases, from_file, DEFAULT_ALIAS)
-    except UnknownAliasError, e:
+    except UnknownAliasError as e:
         e.display(stderr)
         return 1
     url = nodeurl + "uri/%s" % urllib.quote(rootcap)
@@ -38,7 +39,7 @@ def get(options):
             outf.close()
         rc = 0
     else:
-        print >>stderr, format_http_error("Error during GET", resp)
+        print(format_http_error("Error during GET", resp), file=stderr)
         rc = 1
 
     return rc
index 78eea1f26ab1749bcc0147a46d6faad69c1f4a37..7b93efc2ea03acd3b2b2a6e7eedbfaf8f42d80d0 100644 (file)
@@ -1,3 +1,4 @@
+from __future__ import print_function
 
 import urllib, time
 import simplejson
@@ -19,7 +20,7 @@ def list(options):
         where = where[:-1]
     try:
         rootcap, path = get_alias(aliases, where, DEFAULT_ALIAS)
-    except UnknownAliasError, e:
+    except UnknownAliasError as e:
         e.display(stderr)
         return 1
     url = nodeurl + "uri/%s" % urllib.quote(rootcap)
@@ -30,10 +31,10 @@ def list(options):
     url += "?t=json"
     resp = do_http("GET", url)
     if resp.status == 404:
-        print >>stderr, "No such file or directory"
+        print("No such file or directory", file=stderr)
         return 2
     if resp.status != 200:
-        print >>stderr, format_http_error("Error during GET", resp)
+        print(format_http_error("Error during GET", resp), file=stderr)
         if resp.status == 0:
             return 3
         else:
@@ -44,19 +45,19 @@ def list(options):
     if options['json']:
         # The webapi server should always output printable ASCII.
         if is_printable_ascii(data):
-            print >>stdout, data
+            print(data, file=stdout)
             return 0
         else:
-            print >>stderr, "The JSON response contained unprintable characters:"
-            print >>stderr, quote_output(data, quotemarks=False)
+            print("The JSON response contained unprintable characters:", file=stderr)
+            print(quote_output(data, quotemarks=False), file=stderr)
             return 1
 
     try:
         parsed = simplejson.loads(data)
-    except Exception, e:
-        print >>stderr, "error: %s" % quote_output(e.args[0], quotemarks=False)
-        print >>stderr, "Could not parse JSON response:"
-        print >>stderr, quote_output(data, quotemarks=False)
+    except Exception as e:
+        print("error: %s" % quote_output(e.args[0], quotemarks=False), file=stderr)
+        print("Could not parse JSON response:", file=stderr)
+        print(quote_output(data, quotemarks=False), file=stderr)
         return 1
 
     nodetype, d = parsed
@@ -180,16 +181,16 @@ def list(options):
     rc = 0
     for (encoding_error, row) in rows:
         if encoding_error:
-            print >>stderr, (fmt % tuple(row)).rstrip()
+            print((fmt % tuple(row)).rstrip(), file=stderr)
             rc = 1
         else:
-            print >>stdout, (fmt % tuple(row)).rstrip()
+            print((fmt % tuple(row)).rstrip(), file=stdout)
 
     if rc == 1:
-        print >>stderr, "\nThis listing included files whose names could not be converted to the terminal" \
-                        "\noutput encoding. Their names are shown using backslash escapes and in quotes."
+        print("\nThis listing included files whose names could not be converted to the terminal" \
+                        "\noutput encoding. Their names are shown using backslash escapes and in quotes.", file=stderr)
     if has_unknowns:
-        print >>stderr, "\nThis listing included unknown objects. Using a webapi server that supports" \
-                        "\na later version of Tahoe may help."
+        print("\nThis listing included unknown objects. Using a webapi server that supports" \
+                        "\na later version of Tahoe may help.", file=stderr)
 
     return rc
index 0b9c64f6b5849b339736e13e1e2106a7a3482645..173c53824526ed7ef3a175a4c3178074e6f81bbf 100644 (file)
@@ -1,3 +1,4 @@
+from __future__ import print_function
 
 import urllib, simplejson
 from twisted.protocols.basic import LineOnlyReceiver
@@ -29,7 +30,7 @@ class ManifestStreamer(LineOnlyReceiver):
         where = options.where
         try:
             rootcap, path = get_alias(options.aliases, where, DEFAULT_ALIAS)
-        except UnknownAliasError, e:
+        except UnknownAliasError as e:
             e.display(stderr)
             return 1
         if path == '/':
@@ -41,7 +42,7 @@ class ManifestStreamer(LineOnlyReceiver):
         url += "?t=stream-manifest"
         resp = do_http("POST", url)
         if resp.status not in (200, 302):
-            print >>stderr, format_http_error("ERROR", resp)
+            print(format_http_error("ERROR", resp), file=stderr)
             return 1
         #print "RESP", dir(resp)
         # use Twisted to split this into lines
@@ -60,35 +61,35 @@ class ManifestStreamer(LineOnlyReceiver):
         stdout = self.options.stdout
         stderr = self.options.stderr
         if self.in_error:
-            print >>stderr, quote_output(line, quotemarks=False)
+            print(quote_output(line, quotemarks=False), file=stderr)
             return
         if line.startswith("ERROR:"):
             self.in_error = True
             self.rc = 1
-            print >>stderr, quote_output(line, quotemarks=False)
+            print(quote_output(line, quotemarks=False), file=stderr)
             return
 
         try:
             d = simplejson.loads(line.decode('utf-8'))
-        except Exception, e:
-            print >>stderr, "ERROR could not decode/parse %s\nERROR  %r" % (quote_output(line), e)
+        except Exception as e:
+            print("ERROR could not decode/parse %s\nERROR  %r" % (quote_output(line), e), file=stderr)
         else:
             if d["type"] in ("file", "directory"):
                 if self.options["storage-index"]:
                     si = d.get("storage-index", None)
                     if si:
-                        print >>stdout, quote_output(si, quotemarks=False)
+                        print(quote_output(si, quotemarks=False), file=stdout)
                 elif self.options["verify-cap"]:
                     vc = d.get("verifycap", None)
                     if vc:
-                        print >>stdout, quote_output(vc, quotemarks=False)
+                        print(quote_output(vc, quotemarks=False), file=stdout)
                 elif self.options["repair-cap"]:
                     vc = d.get("repaircap", None)
                     if vc:
-                        print >>stdout, quote_output(vc, quotemarks=False)
+                        print(quote_output(vc, quotemarks=False), file=stdout)
                 else:
-                    print >>stdout, "%s %s" % (quote_output(d["cap"], quotemarks=False),
-                                               quote_path(d["path"], quotemarks=False))
+                    print("%s %s" % (quote_output(d["cap"], quotemarks=False),
+                                               quote_path(d["path"], quotemarks=False)), file=stdout)
 
 def manifest(options):
     return ManifestStreamer().run(options)
@@ -113,18 +114,18 @@ class StatsGrabber(SlowOperationRunner):
                 "largest-immutable-file",
                 )
         width = max([len(k) for k in keys])
-        print >>stdout, "Counts and Total Sizes:"
+        print("Counts and Total Sizes:", file=stdout)
         for k in keys:
             fmt = "%" + str(width) + "s: %d"
             if k in data:
                 value = data[k]
                 if not k.startswith("count-") and value > 1000:
                     absize = abbreviate_space_both(value)
-                    print >>stdout, fmt % (k, data[k]), "  ", absize
+                    print(fmt % (k, data[k]), "  ", absize, file=stdout)
                 else:
-                    print >>stdout, fmt % (k, data[k])
+                    print(fmt % (k, data[k]), file=stdout)
         if data["size-files-histogram"]:
-            print >>stdout, "Size Histogram:"
+            print("Size Histogram:", file=stdout)
             prevmax = None
             maxlen = max([len(str(maxsize))
                           for (minsize, maxsize, count)
@@ -138,10 +139,10 @@ class StatsGrabber(SlowOperationRunner):
             linefmt = minfmt + "-" + maxfmt + " : " + countfmt + "    %s"
             for (minsize, maxsize, count) in data["size-files-histogram"]:
                 if prevmax is not None and minsize != prevmax+1:
-                    print >>stdout, " "*(maxlen-1) + "..."
+                    print(" "*(maxlen-1) + "...", file=stdout)
                 prevmax = maxsize
-                print >>stdout, linefmt % (minsize, maxsize, count,
-                                           abbreviate_space_both(maxsize))
+                print(linefmt % (minsize, maxsize, count,
+                                           abbreviate_space_both(maxsize)), file=stdout)
 
 def stats(options):
     return StatsGrabber().run(options)
index 9820ada76fe92d17504192942ac17225ac898692..a76adc8fc205f660e8ff6eb93b9eb8e04087c043 100644 (file)
@@ -1,3 +1,4 @@
+from __future__ import print_function
 
 import urllib
 from allmydata.scripts.common_http import do_http, check_http_error
@@ -15,7 +16,7 @@ def mkdir(options):
     if where:
         try:
             rootcap, path = get_alias(aliases, where, DEFAULT_ALIAS)
-        except UnknownAliasError, e:
+        except UnknownAliasError as e:
             e.display(stderr)
             return 1
 
@@ -30,7 +31,7 @@ def mkdir(options):
             return rc
         new_uri = resp.read().strip()
         # emit its write-cap
-        print >>stdout, quote_output(new_uri, quotemarks=False)
+        print(quote_output(new_uri, quotemarks=False), file=stdout)
         return 0
 
     # create a new directory at the given location
@@ -45,5 +46,5 @@ def mkdir(options):
     resp = do_http("POST", url)
     check_http_error(resp, stderr)
     new_uri = resp.read().strip()
-    print >>stdout, quote_output(new_uri, quotemarks=False)
+    print(quote_output(new_uri, quotemarks=False), file=stdout)
     return 0
index cd54f7a70978c9549ef03fb56fe50cb8520eb80a..b54aab9a001a8d8997dcb005d5b5477158c191db 100644 (file)
@@ -1,3 +1,4 @@
+from __future__ import print_function
 
 import re
 import urllib
@@ -21,7 +22,7 @@ def mv(options, mode="move"):
         nodeurl += "/"
     try:
         rootcap, from_path = get_alias(aliases, from_file, DEFAULT_ALIAS)
-    except UnknownAliasError, e:
+    except UnknownAliasError as e:
         e.display(stderr)
         return 1
     from_url = nodeurl + "uri/%s" % urllib.quote(rootcap)
@@ -30,7 +31,7 @@ def mv(options, mode="move"):
     # figure out the source cap
     resp = do_http("GET", from_url + "?t=json")
     if not re.search(r'^2\d\d$', str(resp.status)):
-        print >>stderr, format_http_error("Error", resp)
+        print(format_http_error("Error", resp), file=stderr)
         return 1
     data = resp.read()
     nodetype, attrs = simplejson.loads(data)
@@ -39,7 +40,7 @@ def mv(options, mode="move"):
     # now get the target
     try:
         rootcap, path = get_alias(aliases, to_file, DEFAULT_ALIAS)
-    except UnknownAliasError, e:
+    except UnknownAliasError as e:
         e.display(stderr)
         return 1
     to_url = nodeurl + "uri/%s" % urllib.quote(rootcap)
@@ -56,19 +57,19 @@ def mv(options, mode="move"):
     status = resp.status
     if not re.search(r'^2\d\d$', str(status)):
         if status == 409:
-            print >>stderr, "Error: You can't overwrite a directory with a file"
+            print("Error: You can't overwrite a directory with a file", file=stderr)
         else:
-            print >>stderr, format_http_error("Error", resp)
+            print(format_http_error("Error", resp), file=stderr)
             if mode == "move":
-                print >>stderr, "NOT removing the original"
+                print("NOT removing the original", file=stderr)
         return 1
 
     if mode == "move":
         # now remove the original
         resp = do_http("DELETE", from_url)
         if not re.search(r'^2\d\d$', str(resp.status)):
-            print >>stderr, format_http_error("Error deleting original after move", resp)
+            print(format_http_error("Error deleting original after move", resp), file=stderr)
             return 2
 
-    print >>stdout, "OK"
+    print("OK", file=stdout)
     return 0
index a85539efec87e0d96f4a839705a4261212848ac8..8dd2d0cfb85901716fee33e099047843187e36aa 100644 (file)
@@ -1,3 +1,4 @@
+from __future__ import print_function
 
 import os
 from cStringIO import StringIO
@@ -49,13 +50,13 @@ def put(options):
         else:
             try:
                 rootcap, path = get_alias(aliases, to_file, DEFAULT_ALIAS)
-            except UnknownAliasError, e:
+            except UnknownAliasError as e:
                 e.display(stderr)
                 return 1
             if path.startswith("/"):
                 suggestion = to_file.replace(u"/", u"", 1)
-                print >>stderr, "Error: The remote filename must not start with a slash"
-                print >>stderr, "Please try again, perhaps with %s" % quote_output(suggestion)
+                print("Error: The remote filename must not start with a slash", file=stderr)
+                print("Please try again, perhaps with %s" % quote_output(suggestion), file=stderr)
                 return 1
             url = nodeurl + "uri/%s/" % urllib.quote(rootcap)
             if path:
@@ -78,16 +79,16 @@ def put(options):
         # do_http() can't use stdin directly: for one thing, we need a
         # Content-Length field. So we currently must copy it.
         if verbosity > 0:
-            print >>stderr, "waiting for file data on stdin.."
+            print("waiting for file data on stdin..", file=stderr)
         data = stdin.read()
         infileobj = StringIO(data)
 
     resp = do_http("PUT", url, infileobj)
 
     if resp.status in (200, 201,):
-        print >>stderr, format_http_success(resp)
-        print >>stdout, quote_output(resp.read(), quotemarks=False)
+        print(format_http_success(resp), file=stderr)
+        print(quote_output(resp.read(), quotemarks=False), file=stdout)
         return 0
 
-    print >>stderr, format_http_error("Error", resp)
+    print(format_http_error("Error", resp), file=stderr)
     return 1
index 979fcc4e2e180e9d0c3c75f024ad8b98a3fd6589..bc1d43c9e570782257bc0c6705c8dfd06fb279e1 100644 (file)
@@ -1,3 +1,4 @@
+from __future__ import print_function
 
 import urllib
 from allmydata.scripts.common_http import do_http, format_http_success, format_http_error
@@ -18,12 +19,12 @@ def unlink(options, command="unlink"):
         nodeurl += "/"
     try:
         rootcap, path = get_alias(aliases, where, DEFAULT_ALIAS)
-    except UnknownAliasError, e:
+    except UnknownAliasError as e:
         e.display(stderr)
         return 1
     if not path:
-        print >>stderr, """
-'tahoe %s' can only unlink directory entries, so a path must be given.""" % (command,)
+        print("""
+'tahoe %s' can only unlink directory entries, so a path must be given.""" % (command,), file=stderr)
         return 1
 
     url = nodeurl + "uri/%s" % urllib.quote(rootcap)
@@ -32,8 +33,8 @@ def unlink(options, command="unlink"):
     resp = do_http("DELETE", url)
 
     if resp.status in (200,):
-        print >>stdout, format_http_success(resp)
+        print(format_http_success(resp), file=stdout)
         return 0
 
-    print >>stderr, format_http_error("ERROR", resp)
+    print(format_http_error("ERROR", resp), file=stderr)
     return 1
index 03500dc76254fc091e6040437a9349eaf0dec121..a7b7ca7e12a263a154dcadfb20ca1f12fa7d4afe 100644 (file)
@@ -12,7 +12,7 @@ def webopen(options, opener=None):
     if where:
         try:
             rootcap, path = get_alias(options.aliases, where, DEFAULT_ALIAS)
-        except UnknownAliasError, e:
+        except UnknownAliasError as e:
             e.display(stderr)
             return 1
         if path == '/':
index 7db323ba5ce68b2fe30101cb0dc47c1fc0188add..307f43f175209d3ed5b5e4318b6b2dbbb4c862a2 100644 (file)
@@ -1,3 +1,4 @@
+from __future__ import print_function
 
 import os
 import pickle
@@ -199,7 +200,7 @@ class StatsGatherer(Referenceable, service.MultiService):
     def remote_provide(self, provider, nickname):
         tubid = self.get_tubid(provider)
         if tubid == '<unauth>':
-            print "WARNING: failed to get tubid for %s (%s)" % (provider, nickname)
+            print("WARNING: failed to get tubid for %s (%s)" % (provider, nickname))
             # don't add to clients to poll (polluting data) don't care about disconnect
             return
         self.clients[tubid] = provider
@@ -232,15 +233,15 @@ class StdOutStatsGatherer(StatsGatherer):
     def remote_provide(self, provider, nickname):
         tubid = self.get_tubid(provider)
         if self.verbose:
-            print 'connect "%s" [%s]' % (nickname, tubid)
+            print('connect "%s" [%s]' % (nickname, tubid))
             provider.notifyOnDisconnect(self.announce_lost_client, tubid)
         StatsGatherer.remote_provide(self, provider, nickname)
 
     def announce_lost_client(self, tubid):
-        print 'disconnect "%s" [%s]' % (self.nicknames[tubid], tubid)
+        print('disconnect "%s" [%s]' % (self.nicknames[tubid], tubid))
 
     def got_stats(self, stats, tubid, nickname):
-        print '"%s" [%s]:' % (nickname, tubid)
+        print('"%s" [%s]:' % (nickname, tubid))
         pprint.pprint(stats)
 
 class PickleStatsGatherer(StdOutStatsGatherer):
@@ -256,9 +257,9 @@ class PickleStatsGatherer(StdOutStatsGatherer):
             try:
                 self.gathered_stats = pickle.load(f)
             except Exception:
-                print ("Error while attempting to load pickle file %s.\n"
+                print(("Error while attempting to load pickle file %s.\n"
                        "You may need to restore this file from a backup, or delete it if no backup is available.\n" %
-                       quote_output(os.path.abspath(self.picklefile)))
+                       quote_output(os.path.abspath(self.picklefile))))
                 raise
             f.close()
         else:
index 9c0397c0bffeaba894e1891d3e1b41a9f67f5db0..3c52e393d91a925936990d5acb683163a61d784d 100644 (file)
@@ -9,6 +9,7 @@ from allmydata.util import fileutil, idlib, log, time_format
 import allmydata # for __full_version__
 
 from allmydata.storage.common import si_b2a, si_a2b, storage_index_to_dir
+import six
 _pyflakes_hush = [si_b2a, si_a2b, storage_index_to_dir] # re-exported
 from allmydata.storage.lease import LeaseInfo
 from allmydata.storage.mutable import MutableShareFile, EmptyShare, \
@@ -395,7 +396,7 @@ class StorageServer(service.MultiService, Referenceable):
         # since all shares get the same lease data, we just grab the leases
         # from the first share
         try:
-            shnum, filename = self._get_bucket_shares(storage_index).next()
+            shnum, filename = six.advance_iterator(self._get_bucket_shares(storage_index))
             sf = ShareFile(filename)
             return sf.get_leases()
         except StopIteration:
index aecd06ce7b073780f7cd4530aca8e189a72c6433..443b9dd85045c5917f78f0b73d54486c21da83d8 100644 (file)
@@ -1,3 +1,4 @@
+from __future__ import print_function
 import hotshot.stats, os, random, sys
 
 from pyutil import benchutil, randutil # http://tahoe-lafs.org/trac/pyutil
@@ -89,7 +90,7 @@ class B(object):
         return self.random_fsnode(), random_metadata()
 
     def init_for_pack(self, N):
-        for i in xrange(len(self.children), N):
+        for i in range(len(self.children), N):
             name = random_unicode(random.randrange(0, 10))
             self.children.append( (name, self.random_child()) )
 
@@ -110,12 +111,12 @@ class B(object):
         for (initfunc, func) in [(self.init_for_unpack, self.unpack),
                                  (self.init_for_pack, self.pack),
                                  (self.init_for_unpack, self.unpack_and_repack)]:
-            print "benchmarking %s" % (func,)
+            print("benchmarking %s" % (func,))
             for N in 16, 512, 2048, 16384:
-                print "%5d" % N,
+                print("%5d" % N, end=' ')
                 benchutil.rep_bench(func, N, initfunc=initfunc, MAXREPS=20, UNITS_PER_SECOND=1000)
         benchutil.print_bench_footer(UNITS_PER_SECOND=1000)
-        print "(milliseconds)"
+        print("(milliseconds)")
 
     def prof_benchmarks(self):
         # This requires pyutil >= v1.3.34.
@@ -128,7 +129,7 @@ class B(object):
 if __name__ == "__main__":
     if '--profile' in sys.argv:
         if os.path.exists(PROF_FILE_NAME):
-            print "WARNING: profiling results file '%s' already exists -- the profiling results from this run will be added into the profiling results stored in that file and then the sum of them will be printed out after this run." % (PROF_FILE_NAME,)
+            print("WARNING: profiling results file '%s' already exists -- the profiling results from this run will be added into the profiling results stored in that file and then the sum of them will be printed out after this run." % (PROF_FILE_NAME,))
         b = B()
         b.prof_benchmarks()
         b.print_stats()
index e0f069abf9057487e4aaaab21487b3f29a239829..cbcd4bc6f6652b50ae5838b28cc129b50d595525 100644 (file)
@@ -52,6 +52,7 @@ This script will also keep track of speeds and latencies and will write them
 in a machine-readable logfile.
 
 """
+from __future__ import print_function
 
 import time, subprocess, md5, os.path, random
 from twisted.python import usage
@@ -95,13 +96,13 @@ class GridTester:
         rc = p.returncode
         if expected_rc != None and rc != expected_rc:
             if stderr:
-                print "STDERR:"
-                print stderr
+                print("STDERR:")
+                print(stderr)
             raise CommandFailed("command '%s' failed: rc=%d" % (cmd, rc))
         return stdout, stderr
 
     def cli(self, cmd, *args, **kwargs):
-        print "tahoe", cmd, " ".join(args)
+        print("tahoe", cmd, " ".join(args))
         stdout, stderr = self.command(self.tahoe, cmd, "-d", self.nodedir,
                                       *args, **kwargs)
         if not kwargs.get("ignore_stderr", False) and stderr != "":
@@ -110,16 +111,16 @@ class GridTester:
         return stdout
 
     def stop_old_node(self):
-        print "tahoe stop", self.nodedir, "(force)"
+        print("tahoe stop", self.nodedir, "(force)")
         self.command(self.tahoe, "stop", self.nodedir, expected_rc=None)
 
     def start_node(self):
-        print "tahoe start", self.nodedir
+        print("tahoe start", self.nodedir)
         self.command(self.tahoe, "start", self.nodedir)
         time.sleep(5)
 
     def stop_node(self):
-        print "tahoe stop", self.nodedir
+        print("tahoe stop", self.nodedir)
         self.command(self.tahoe, "stop", self.nodedir)
 
     def read_and_check(self, f):
@@ -146,7 +147,7 @@ class GridTester:
     def listdir(self, dirname):
         out = self.cli("ls", "testgrid:"+dirname).strip().split("\n")
         files = [f.strip() for f in out]
-        print " ", files
+        print(" ", files)
         return files
 
     def do_test(self):
index 197b36971df03ccdf822b992adf376af0e410d40..96c6fde87a886cc19f432a98e005a46783616865 100644 (file)
@@ -31,9 +31,11 @@ a mean of 10kB and a max of 100MB, so filesize=min(int(1.0/random(.0002)),1e8)
 
 
 """
+from __future__ import print_function
 
 import os, sys, httplib, binascii
 import urllib, simplejson, random, time, urlparse
+import six
 
 if sys.argv[1] == "--stats":
     statsfiles = sys.argv[2:]
@@ -55,24 +57,24 @@ if sys.argv[1] == "--stats":
         if last_stats:
             delta = dict( [ (name,stats[name]-last_stats[name])
                             for name in stats ] )
-            print "THIS SAMPLE:"
+            print("THIS SAMPLE:")
             for name in sorted(delta.keys()):
                 avg = float(delta[name]) / float(DELAY)
-                print "%20s: %0.2f per second" % (name, avg)
+                print("%20s: %0.2f per second" % (name, avg))
             totals.append(delta)
             while len(totals) > MAXSAMPLES:
                 totals.pop(0)
 
             # now compute average
-            print
-            print "MOVING WINDOW AVERAGE:"
+            print()
+            print("MOVING WINDOW AVERAGE:")
             for name in sorted(delta.keys()):
                 avg = sum([ s[name] for s in totals]) / (DELAY*len(totals))
-                print "%20s %0.2f per second" % (name, avg)
+                print("%20s %0.2f per second" % (name, avg))
 
         last_stats = stats
-        print
-        print
+        print()
+        print()
         time.sleep(DELAY)
 
 stats_out = sys.argv[1]
@@ -106,8 +108,8 @@ def listdir(nodeurl, root, remote_pathname):
     try:
         parsed = simplejson.loads(data)
     except ValueError:
-        print "URL was", url
-        print "DATA was", data
+        print("URL was", url)
+        print("DATA was", data)
         raise
     nodetype, d = parsed
     assert nodetype == "dirnode"
@@ -115,7 +117,7 @@ def listdir(nodeurl, root, remote_pathname):
     directories_read += 1
     children = dict( [(unicode(name),value)
                       for (name,value)
-                      in d["children"].iteritems()] )
+                      in six.iteritems(d["children"])] )
     return children
 
 
@@ -237,11 +239,11 @@ while True:
         op = "read"
     else:
         op = "write"
-    print "OP:", op
+    print("OP:", op)
     server = random.choice(server_urls)
     if op == "read":
         pathname = choose_random_descendant(server, root)
-        print "  reading", pathname
+        print("  reading", pathname)
         read_and_discard(server, root, pathname)
         files_downloaded += 1
     elif op == "write":
@@ -252,9 +254,9 @@ while True:
             pathname = current_writedir + "/" + filename
         else:
             pathname = filename
-        print "  writing", pathname
+        print("  writing", pathname)
         size = choose_size()
-        print "   size", size
+        print("   size", size)
         generate_and_put(server, root, pathname, size)
         files_uploaded += 1
 
index b9d79014e4a0316564454b65b7cdee5f18c3ee5d..8d32bc1b968c94e167c2b53cc066eb1877ef55b2 100644 (file)
@@ -1,3 +1,4 @@
+from __future__ import print_function
 import os, shutil, sys, urllib, time, stat
 from cStringIO import StringIO
 from twisted.internet import defer, reactor, protocol, error
@@ -11,6 +12,7 @@ from allmydata.util.fileutil import abspath_expanduser_unicode
 from allmydata.util.encodingutil import get_filesystem_encoding
 from foolscap.api import Tub, fireEventually, flushEventualQueue
 from twisted.python import log
+import six
 
 class StallableHTTPGetterDiscarder(tw_client.HTTPPageGetter):
     full_speed_ahead = False
@@ -24,17 +26,17 @@ class StallableHTTPGetterDiscarder(tw_client.HTTPPageGetter):
             return
         if self._bytes_so_far > 1e6+100:
             if not self.stalled:
-                print "STALLING"
+                print("STALLING")
                 self.transport.pauseProducing()
                 self.stalled = reactor.callLater(10.0, self._resume_speed)
     def _resume_speed(self):
-        print "RESUME SPEED"
+        print("RESUME SPEED")
         self.stalled = None
         self.full_speed_ahead = True
         self.transport.resumeProducing()
     def handleResponseEnd(self):
         if self.stalled:
-            print "CANCEL"
+            print("CANCEL")
             self.stalled.cancel()
             self.stalled = None
         return tw_client.HTTPPageGetter.handleResponseEnd(self)
@@ -97,7 +99,7 @@ class SystemFramework(pollmixin.PollMixin):
         def _err(err):
             self.failed = err
             log.err(err)
-            print err
+            print(err)
         d.addErrback(_err)
         def _done(res):
             reactor.stop()
@@ -131,15 +133,15 @@ class SystemFramework(pollmixin.PollMixin):
         return d
 
     def record_initial_memusage(self):
-        print
-        print "Client started (no connections yet)"
+        print()
+        print("Client started (no connections yet)")
         d = self._print_usage()
         d.addCallback(self.stash_stats, "init")
         return d
 
     def wait_for_client_connected(self):
-        print
-        print "Client connecting to other nodes.."
+        print()
+        print("Client connecting to other nodes..")
         return self.control_rref.callRemote("wait_for_client_connections",
                                             self.numnodes+1)
 
@@ -339,7 +341,7 @@ this file are ignored.
         form.append('')
         form.append('UTF-8')
         form.append(sep)
-        for name, value in fields.iteritems():
+        for name, value in six.iteritems(fields):
             if isinstance(value, tuple):
                 filename, value = value
                 form.append('Content-Disposition: form-data; name="%s"; '
@@ -363,16 +365,16 @@ this file are ignored.
     def _print_usage(self, res=None):
         d = self.control_rref.callRemote("get_memory_usage")
         def _print(stats):
-            print "VmSize: %9d  VmPeak: %9d" % (stats["VmSize"],
-                                                stats["VmPeak"])
+            print("VmSize: %9d  VmPeak: %9d" % (stats["VmSize"],
+                                                stats["VmPeak"]))
             return stats
         d.addCallback(_print)
         return d
 
     def _do_upload(self, res, size, files, uris):
         name = '%d' % size
-        print
-        print "uploading %s" % name
+        print()
+        print("uploading %s" % name)
         if self.mode in ("upload", "upload-self"):
             files[name] = self.create_data(name, size)
             d = self.control_rref.callRemote("upload_from_file_to_uri",
@@ -409,7 +411,7 @@ this file are ignored.
             raise ValueError("unknown mode=%s" % self.mode)
         def _complete(uri):
             uris[name] = uri
-            print "uploaded %s" % name
+            print("uploaded %s" % name)
         d.addCallback(_complete)
         return d
 
@@ -417,7 +419,7 @@ this file are ignored.
         if self.mode not in ("download", "download-GET", "download-GET-slow"):
             return
         name = '%d' % size
-        print "downloading %s" % name
+        print("downloading %s" % name)
         uri = uris[name]
 
         if self.mode == "download":
@@ -431,7 +433,7 @@ this file are ignored.
             d = self.GET_discard(urllib.quote(url), stall=True)
 
         def _complete(res):
-            print "downloaded %s" % name
+            print("downloaded %s" % name)
             return res
         d.addCallback(_complete)
         return d
@@ -474,7 +476,7 @@ this file are ignored.
 
         #d.addCallback(self.stall)
         def _done(res):
-            print "FINISHING"
+            print("FINISHING")
         d.addCallback(_done)
         return d
 
@@ -487,9 +489,9 @@ this file are ignored.
 class ClientWatcher(protocol.ProcessProtocol):
     ended = False
     def outReceived(self, data):
-        print "OUT:", data
+        print("OUT:", data)
     def errReceived(self, data):
-        print "ERR:", data
+        print("ERR:", data)
     def processEnded(self, reason):
         self.ended = reason
         self.d.callback(None)
index c0b0ad80e19058b3ce6ce1e461a54d4fec44b32b..d63652b7411aa9dd89099c5bd1c43f447e87bfe0 100644 (file)
@@ -1,3 +1,4 @@
+from __future__ import print_function
 import os, sys
 from twisted.internet import reactor, defer
 from twisted.python import log
@@ -23,7 +24,7 @@ class SpeedTest:
         self.download_times = {}
 
     def run(self):
-        print "STARTING"
+        print("STARTING")
         d = fireEventually()
         d.addCallback(lambda res: self.setUp())
         d.addCallback(lambda res: self.do_test())
@@ -31,7 +32,7 @@ class SpeedTest:
         def _err(err):
             self.failed = err
             log.err(err)
-            print err
+            print(err)
         d.addErrback(_err)
         def _done(res):
             reactor.stop()
@@ -39,8 +40,8 @@ class SpeedTest:
         d.addBoth(_done)
         reactor.run()
         if self.failed:
-            print "EXCEPTION"
-            print self.failed
+            print("EXCEPTION")
+            print(self.failed)
             sys.exit(1)
 
     def setUp(self):
@@ -51,7 +52,7 @@ class SpeedTest:
         d = self.tub.getReference(self.control_furl)
         def _gotref(rref):
             self.client_rref = rref
-            print "Got Client Control reference"
+            print("Got Client Control reference")
             return self.stall(5)
         d.addCallback(_gotref)
         return d
@@ -62,7 +63,7 @@ class SpeedTest:
         return d
 
     def record_times(self, times, key):
-        print "TIME (%s): %s up, %s down" % (key, times[0], times[1])
+        print("TIME (%s): %s up, %s down" % (key, times[0], times[1]))
         self.upload_times[key], self.download_times[key] = times
 
     def one_test(self, res, name, count, size, mutable):
@@ -84,15 +85,15 @@ class SpeedTest:
             self.total_rtt = sum(times)
             self.average_rtt = sum(times) / len(times)
             self.max_rtt = max(times)
-            print "num-peers: %d" % len(times)
-            print "total-RTT: %f" % self.total_rtt
-            print "average-RTT: %f" % self.average_rtt
-            print "max-RTT: %f" % self.max_rtt
+            print("num-peers: %d" % len(times))
+            print("total-RTT: %f" % self.total_rtt)
+            print("average-RTT: %f" % self.average_rtt)
+            print("max-RTT: %f" % self.max_rtt)
         d.addCallback(_got)
         return d
 
     def do_test(self):
-        print "doing test"
+        print("doing test")
         d = defer.succeed(None)
         d.addCallback(self.one_test, "startup", 1, 1000, False) #ignore this one
         d.addCallback(self.measure_rtt)
@@ -103,7 +104,7 @@ class SpeedTest:
             d.addCallback(self.one_test, "10x 200B", 10, 200, False)
             def _maybe_do_100x_200B(res):
                 if self.upload_times["10x 200B"] < 5:
-                    print "10x 200B test went too fast, doing 100x 200B test"
+                    print("10x 200B test went too fast, doing 100x 200B test")
                     return self.one_test(None, "100x 200B", 100, 200, False)
                 return
             d.addCallback(_maybe_do_100x_200B)
@@ -111,7 +112,7 @@ class SpeedTest:
             d.addCallback(self.one_test, "10MB", 1, 10*MB, False)
             def _maybe_do_100MB(res):
                 if self.upload_times["10MB"] > 30:
-                    print "10MB test took too long, skipping 100MB test"
+                    print("10MB test took too long, skipping 100MB test")
                     return
                 return self.one_test(None, "100MB", 1, 100*MB, False)
             d.addCallback(_maybe_do_100MB)
@@ -126,7 +127,7 @@ class SpeedTest:
             d.addCallback(self.one_test, "10x 200B SSK", 10, 200, "upload")
             def _maybe_do_100x_200B_SSK(res):
                 if self.upload_times["10x 200B SSK"] < 5:
-                    print "10x 200B SSK test went too fast, doing 100x 200B SSK"
+                    print("10x 200B SSK test went too fast, doing 100x 200B SSK")
                     return self.one_test(None, "100x 200B SSK", 100, 200,
                                          "upload")
                 return
@@ -146,37 +147,37 @@ class SpeedTest:
                 B = self.upload_times["100x 200B"] / 100
             else:
                 B = self.upload_times["10x 200B"] / 10
-            print "upload per-file time: %.3fs" % B
-            print "upload per-file times-avg-RTT: %f" % (B / self.average_rtt)
-            print "upload per-file times-total-RTT: %f" % (B / self.total_rtt)
+            print("upload per-file time: %.3fs" % B)
+            print("upload per-file times-avg-RTT: %f" % (B / self.average_rtt))
+            print("upload per-file times-total-RTT: %f" % (B / self.total_rtt))
             A1 = 1*MB / (self.upload_times["1MB"] - B) # in bytes per second
-            print "upload speed (1MB):", self.number(A1, "Bps")
+            print("upload speed (1MB):", self.number(A1, "Bps"))
             A2 = 10*MB / (self.upload_times["10MB"] - B)
-            print "upload speed (10MB):", self.number(A2, "Bps")
+            print("upload speed (10MB):", self.number(A2, "Bps"))
             if "100MB" in self.upload_times:
                 A3 = 100*MB / (self.upload_times["100MB"] - B)
-                print "upload speed (100MB):", self.number(A3, "Bps")
+                print("upload speed (100MB):", self.number(A3, "Bps"))
 
             # download
             if "100x 200B" in self.download_times:
                 B = self.download_times["100x 200B"] / 100
             else:
                 B = self.download_times["10x 200B"] / 10
-            print "download per-file time: %.3fs" % B
-            print "download per-file times-avg-RTT: %f" % (B / self.average_rtt)
-            print "download per-file times-total-RTT: %f" % (B / self.total_rtt)
+            print("download per-file time: %.3fs" % B)
+            print("download per-file times-avg-RTT: %f" % (B / self.average_rtt))
+            print("download per-file times-total-RTT: %f" % (B / self.total_rtt))
             A1 = 1*MB / (self.download_times["1MB"] - B) # in bytes per second
-            print "download speed (1MB):", self.number(A1, "Bps")
+            print("download speed (1MB):", self.number(A1, "Bps"))
             A2 = 10*MB / (self.download_times["10MB"] - B)
-            print "download speed (10MB):", self.number(A2, "Bps")
+            print("download speed (10MB):", self.number(A2, "Bps"))
             if "100MB" in self.download_times:
                 A3 = 100*MB / (self.download_times["100MB"] - B)
-                print "download speed (100MB):", self.number(A3, "Bps")
+                print("download speed (100MB):", self.number(A3, "Bps"))
 
         if self.DO_MUTABLE_CREATE:
             # SSK creation
             B = self.upload_times["10x 200B SSK creation"] / 10
-            print "create per-file time SSK: %.3fs" % B
+            print("create per-file time SSK: %.3fs" % B)
 
         if self.DO_MUTABLE:
             # upload SSK
@@ -184,19 +185,19 @@ class SpeedTest:
                 B = self.upload_times["100x 200B SSK"] / 100
             else:
                 B = self.upload_times["10x 200B SSK"] / 10
-            print "upload per-file time SSK: %.3fs" % B
+            print("upload per-file time SSK: %.3fs" % B)
             A1 = 1*MB / (self.upload_times["1MB SSK"] - B) # in bytes per second
-            print "upload speed SSK (1MB):", self.number(A1, "Bps")
+            print("upload speed SSK (1MB):", self.number(A1, "Bps"))
 
             # download SSK
             if "100x 200B SSK" in self.download_times:
                 B = self.download_times["100x 200B SSK"] / 100
             else:
                 B = self.download_times["10x 200B SSK"] / 10
-            print "download per-file time SSK: %.3fs" % B
+            print("download per-file time SSK: %.3fs" % B)
             A1 = 1*MB / (self.download_times["1MB SSK"] - B) # in bytes per
                                                              # second
-            print "download speed SSK (1MB):", self.number(A1, "Bps")
+            print("download speed SSK (1MB):", self.number(A1, "Bps"))
 
     def number(self, value, suffix=""):
         scaling = 1
index 82ccbf6483e0cbf81da4848f92cca8d6661fae02..0b8add10878e09f1e2ddda26a92e95ef7ca2a810 100644 (file)
@@ -1,3 +1,4 @@
+from __future__ import print_function
 import os, random, struct
 from zope.interface import implements
 from twisted.internet import defer
@@ -114,7 +115,7 @@ class FakeCHKFileNode:
             return self.my_uri.get_size()
         try:
             data = self.all_contents[self.my_uri.to_string()]
-        except KeyError, le:
+        except KeyError as le:
             raise NotEnoughSharesError(le, 0, 3)
         return len(data)
     def get_current_size(self):
@@ -1073,7 +1074,7 @@ class WebErrorMixin:
         # this method as an errback handler, and it will reveal the hidden
         # message.
         f.trap(WebError)
-        print "Web Error:", f.value, ":", f.value.response
+        print("Web Error:", f.value, ":", f.value.response)
         return f
 
     def _shouldHTTPError(self, res, which, validator):
@@ -1110,7 +1111,7 @@ class WebErrorMixin:
 class ErrorMixin(WebErrorMixin):
     def explain_error(self, f):
         if f.check(defer.FirstError):
-            print "First Error:", f.value.subFailure
+            print("First Error:", f.value.subFailure)
         return f
 
 def corrupt_field(data, offset, size, debug=False):
index e51ab8b001c0da1a8874f1293ff55827b89ac70f..53508c48a5c87a0b6c177879bb0adaf90faae16b 100644 (file)
@@ -1,3 +1,4 @@
+from __future__ import print_function
 import os, signal, sys, time
 from random import randrange
 
@@ -6,6 +7,7 @@ from twisted.python import failure
 
 from allmydata.util import fileutil, log
 from allmydata.util.encodingutil import unicode_platform, get_filesystem_encoding
+from allmydata.util.sixutil import map
 
 
 def insecurerandstr(n):
@@ -48,7 +50,7 @@ class NonASCIIPathMixin:
                     if os.path.exists(dirpath):
                         msg = ("We were unable to delete a non-ASCII directory %r created by the test. "
                                "This is liable to cause failures on future builds." % (dirpath,))
-                        print msg
+                        print(msg)
                         log.err(msg)
             self.addCleanup(_cleanup)
         os.mkdir(dirpath)
@@ -169,7 +171,7 @@ class TestMixin(SignalMixin):
             if p.active():
                 p.cancel()
             else:
-                print "WEIRDNESS! pending timed call not active!"
+                print("WEIRDNESS! pending timed call not active!")
         if required_to_quiesce and active:
             self.fail("Reactor was still active when it was required to be quiescent.")
 
index 7c9049192fd0a22ca9de1415d13b27f0139ae0b2..f5137819e3a68357d80a978856ff32a6f85817dd 100644 (file)
@@ -393,7 +393,8 @@ class GridTestMixin:
         for sharefile, data in shares.items():
             open(sharefile, "wb").write(data)
 
-    def delete_share(self, (shnum, serverid, sharefile)):
+    def delete_share(self, xxx_todo_changeme):
+        (shnum, serverid, sharefile) = xxx_todo_changeme
         os.unlink(sharefile)
 
     def delete_shares_numbered(self, uri, shnums):
@@ -401,7 +402,8 @@ class GridTestMixin:
             if i_shnum in shnums:
                 os.unlink(i_sharefile)
 
-    def corrupt_share(self, (shnum, serverid, sharefile), corruptor_function):
+    def corrupt_share(self, xxx_todo_changeme1, corruptor_function):
+        (shnum, serverid, sharefile) = xxx_todo_changeme1
         sharedata = open(sharefile, "rb").read()
         corruptdata = corruptor_function(sharedata)
         open(sharefile, "wb").write(corruptdata)
index 79a0dbcbbe32515022be9294e9e70be8a93f2c31..603fd38e0bc9b236b8eae16ea5710a94e526e23d 100644 (file)
@@ -1,6 +1,7 @@
 import random, unittest
 
 from allmydata.util import base62, mathutil
+from allmydata.util.sixutil import map
 
 def insecurerandstr(n):
     return ''.join(map(chr, map(random.randrange, [0]*n, [256]*n)))
@@ -12,7 +13,7 @@ class T(unittest.TestCase):
     def _test_ende(self, bs):
         ascii=base62.b2a(bs)
         bs2=base62.a2b(ascii)
-        assert bs2 == bs, "bs2: %s:%s, bs: %s:%s, ascii: %s:%s" % (len(bs2), `bs2`, len(bs), `bs`, len(ascii), `ascii`)
+        assert bs2 == bs, "bs2: %s:%s, bs: %s:%s, ascii: %s:%s" % (len(bs2), repr(bs2), len(bs), repr(bs), len(ascii), repr(ascii))
 
     def test_num_octets_that_encode_to_this_many_chars(self):
         return self._test_num_octets_that_encode_to_this_many_chars(2, 1)
index 65498e76ffce8a434972c269a0d79bbf9af709d8..1c3a6e767faf22890f99e7ef3b86f8ea70db7f69 100644 (file)
@@ -14,6 +14,7 @@ from allmydata.test.no_network import GridTestMixin
 from allmydata.immutable.upload import Data
 from allmydata.test.common_web import WebRenderingMixin
 from allmydata.mutable.publish import MutableData
+import six
 
 class FakeClient:
     def get_storage_broker(self):
@@ -357,7 +358,7 @@ class BalancingAct(GridTestMixin, unittest.TestCase):
             "This little printing function is only meant for < 26 servers"
         shares_chart = {}
         names = dict(zip([ss.my_nodeid
-                          for _,ss in self.g.servers_by_number.iteritems()],
+                          for _,ss in six.iteritems(self.g.servers_by_number)],
                          letters))
         for shnum, serverid, _ in self.find_uri_shares(uri):
             shares_chart.setdefault(shnum, []).append(names[serverid])
index 5e7d56ab4a47d66025a89fe0b5361b1992b48277..f4acbd360e8c383d858d8167552c16495f8c3ef8 100644 (file)
@@ -397,8 +397,8 @@ class CLI(CLITestMixin, unittest.TestCase):
                         "didn't see 'mqfblse6m5a6dh45isu2cg7oji' in '%s'" % err)
 
     def test_alias(self):
-        def s128(c): return base32.b2a(c*(128/8))
-        def s256(c): return base32.b2a(c*(256/8))
+        def s128(c): return base32.b2a(c*(128//8))
+        def s256(c): return base32.b2a(c*(256//8))
         TA = "URI:DIR2:%s:%s" % (s128("T"), s256("T"))
         WA = "URI:DIR2:%s:%s" % (s128("W"), s256("W"))
         CA = "URI:DIR2:%s:%s" % (s128("C"), s256("C"))
@@ -516,8 +516,8 @@ class CLI(CLITestMixin, unittest.TestCase):
         self.failUnlessRaises(common.UnknownAliasError, ga5, u"C:\\Windows")
 
     def test_alias_tolerance(self):
-        def s128(c): return base32.b2a(c*(128/8))
-        def s256(c): return base32.b2a(c*(256/8))
+        def s128(c): return base32.b2a(c*(128//8))
+        def s256(c): return base32.b2a(c*(256//8))
         TA = "URI:DIR2:%s:%s" % (s128("T"), s256("T"))
         aliases = {"present": TA,
                    "future": "URI-FROM-FUTURE:ooh:aah"}
@@ -706,7 +706,8 @@ class CreateAlias(GridTestMixin, CLITestMixin, unittest.TestCase):
         aliasfile = os.path.join(self.get_clientdir(), "private", "aliases")
 
         d = self.do_cli("create-alias", "tahoe")
-        def _done((rc,stdout,stderr)):
+        def _done(xxx_todo_changeme41):
+            (rc,stdout,stderr) = xxx_todo_changeme41
             self.failUnless("Alias 'tahoe' created" in stdout)
             self.failIf(stderr)
             aliases = get_aliases(self.get_clientdir())
@@ -728,7 +729,8 @@ class CreateAlias(GridTestMixin, CLITestMixin, unittest.TestCase):
         d.addCallback(_stash_urls)
 
         d.addCallback(lambda res: self.do_cli("create-alias", "two")) # dup
-        def _check_create_duplicate((rc,stdout,stderr)):
+        def _check_create_duplicate(xxx_todo_changeme42):
+            (rc,stdout,stderr) = xxx_todo_changeme42
             self.failIfEqual(rc, 0)
             self.failUnless("Alias 'two' already exists!" in stderr)
             aliases = get_aliases(self.get_clientdir())
@@ -736,14 +738,16 @@ class CreateAlias(GridTestMixin, CLITestMixin, unittest.TestCase):
         d.addCallback(_check_create_duplicate)
 
         d.addCallback(lambda res: self.do_cli("add-alias", "added", self.two_uri))
-        def _check_add((rc,stdout,stderr)):
+        def _check_add(xxx_todo_changeme43):
+            (rc,stdout,stderr) = xxx_todo_changeme43
             self.failUnlessReallyEqual(rc, 0)
             self.failUnless("Alias 'added' added" in stdout)
         d.addCallback(_check_add)
 
         # check add-alias with a duplicate
         d.addCallback(lambda res: self.do_cli("add-alias", "two", self.two_uri))
-        def _check_add_duplicate((rc,stdout,stderr)):
+        def _check_add_duplicate(xxx_todo_changeme44):
+            (rc,stdout,stderr) = xxx_todo_changeme44
             self.failIfEqual(rc, 0)
             self.failUnless("Alias 'two' already exists!" in stderr)
             aliases = get_aliases(self.get_clientdir())
@@ -751,7 +755,8 @@ class CreateAlias(GridTestMixin, CLITestMixin, unittest.TestCase):
         d.addCallback(_check_add_duplicate)
 
         # check create-alias and add-alias with invalid aliases
-        def _check_invalid((rc,stdout,stderr)):
+        def _check_invalid(xxx_todo_changeme45):
+            (rc,stdout,stderr) = xxx_todo_changeme45
             self.failIfEqual(rc, 0)
             self.failUnlessIn("cannot contain", stderr)
 
@@ -790,7 +795,8 @@ class CreateAlias(GridTestMixin, CLITestMixin, unittest.TestCase):
             fileutil.write(aliasfile, old.rstrip())
             return self.do_cli("create-alias", "un-corrupted1")
         d.addCallback(_remove_trailing_newline_and_create_alias)
-        def _check_not_corrupted1((rc,stdout,stderr)):
+        def _check_not_corrupted1(xxx_todo_changeme46):
+            (rc,stdout,stderr) = xxx_todo_changeme46
             self.failUnless("Alias 'un-corrupted1' created" in stdout, stdout)
             self.failIf(stderr)
             # the old behavior was to simply append the new record, causing a
@@ -813,7 +819,8 @@ class CreateAlias(GridTestMixin, CLITestMixin, unittest.TestCase):
             fileutil.write(aliasfile, old.rstrip())
             return self.do_cli("add-alias", "un-corrupted2", self.two_uri)
         d.addCallback(_remove_trailing_newline_and_add_alias)
-        def _check_not_corrupted((rc,stdout,stderr)):
+        def _check_not_corrupted(xxx_todo_changeme47):
+            (rc,stdout,stderr) = xxx_todo_changeme47
             self.failUnless("Alias 'un-corrupted2' added" in stdout, stdout)
             self.failIf(stderr)
             aliases = get_aliases(self.get_clientdir())
@@ -835,7 +842,8 @@ class CreateAlias(GridTestMixin, CLITestMixin, unittest.TestCase):
             raise unittest.SkipTest("A non-ASCII command argument could not be encoded on this platform.")
 
         d = self.do_cli("create-alias", etudes_arg)
-        def _check_create_unicode((rc, out, err)):
+        def _check_create_unicode(xxx_todo_changeme48):
+            (rc, out, err) = xxx_todo_changeme48
             self.failUnlessReallyEqual(rc, 0)
             self.failUnlessReallyEqual(err, "")
             self.failUnlessIn("Alias %s created" % quote_output(u"\u00E9tudes"), out)
@@ -845,7 +853,8 @@ class CreateAlias(GridTestMixin, CLITestMixin, unittest.TestCase):
         d.addCallback(_check_create_unicode)
 
         d.addCallback(lambda res: self.do_cli("ls", etudes_arg + ":"))
-        def _check_ls1((rc, out, err)):
+        def _check_ls1(xxx_todo_changeme49):
+            (rc, out, err) = xxx_todo_changeme49
             self.failUnlessReallyEqual(rc, 0)
             self.failUnlessReallyEqual(err, "")
             self.failUnlessReallyEqual(out, "")
@@ -855,14 +864,16 @@ class CreateAlias(GridTestMixin, CLITestMixin, unittest.TestCase):
                                               stdin="Blah blah blah"))
 
         d.addCallback(lambda res: self.do_cli("ls", etudes_arg + ":"))
-        def _check_ls2((rc, out, err)):
+        def _check_ls2(xxx_todo_changeme50):
+            (rc, out, err) = xxx_todo_changeme50
             self.failUnlessReallyEqual(rc, 0)
             self.failUnlessReallyEqual(err, "")
             self.failUnlessReallyEqual(out, "uploaded.txt\n")
         d.addCallback(_check_ls2)
 
         d.addCallback(lambda res: self.do_cli("get", etudes_arg + ":uploaded.txt"))
-        def _check_get((rc, out, err)):
+        def _check_get(xxx_todo_changeme51):
+            (rc, out, err) = xxx_todo_changeme51
             self.failUnlessReallyEqual(rc, 0)
             self.failUnlessReallyEqual(err, "")
             self.failUnlessReallyEqual(out, "Blah blah blah")
@@ -874,7 +885,8 @@ class CreateAlias(GridTestMixin, CLITestMixin, unittest.TestCase):
 
         d.addCallback(lambda res: self.do_cli("get",
                                               get_aliases(self.get_clientdir())[u"\u00E9tudes"] + "/" + lumiere_arg))
-        def _check_get2((rc, out, err)):
+        def _check_get2(xxx_todo_changeme52):
+            (rc, out, err) = xxx_todo_changeme52
             self.failUnlessReallyEqual(rc, 0)
             self.failUnlessReallyEqual(err, "")
             self.failUnlessReallyEqual(out, "Let the sunshine In!")
@@ -899,7 +911,8 @@ class Ln(GridTestMixin, CLITestMixin, unittest.TestCase):
         self.basedir = "cli/Ln/ln_without_alias"
         self.set_up_grid()
         d = self.do_cli("ln", "from", "to")
-        def _check((rc, out, err)):
+        def _check(xxx_todo_changeme53):
+            (rc, out, err) = xxx_todo_changeme53
             self.failUnlessReallyEqual(rc, 1)
             self.failUnlessIn("error:", err)
             self.failUnlessReallyEqual(out, "")
@@ -919,7 +932,8 @@ class Ln(GridTestMixin, CLITestMixin, unittest.TestCase):
         self.basedir = "cli/Ln/ln_with_nonexistent_alias"
         self.set_up_grid()
         d = self.do_cli("ln", "havasu:from", "havasu:to")
-        def _check((rc, out, err)):
+        def _check(xxx_todo_changeme54):
+            (rc, out, err) = xxx_todo_changeme54
             self.failUnlessReallyEqual(rc, 1)
             self.failUnlessIn("error:", err)
         d.addCallback(_check)
@@ -957,8 +971,8 @@ class Put(GridTestMixin, CLITestMixin, unittest.TestCase):
             self.failUnlessReallyEqual(out, DATA)
         d.addCallback(_downloaded)
         d.addCallback(lambda res: self.do_cli("put", "-", stdin=DATA))
-        d.addCallback(lambda (rc, out, err):
-                      self.failUnlessReallyEqual(out, self.readcap))
+        d.addCallback(lambda rc_out_err:
+                      self.failUnlessReallyEqual(rc_out_err[1], self.readcap))
         return d
 
     def test_unlinked_immutable_from_file(self):
@@ -974,17 +988,18 @@ class Put(GridTestMixin, CLITestMixin, unittest.TestCase):
         # we make the file small enough to fit in a LIT file, for speed
         fileutil.write(rel_fn, "short file")
         d = self.do_cli("put", rel_fn)
-        def _uploaded((rc, out, err)):
+        def _uploaded(xxx_todo_changeme55):
+            (rc, out, err) = xxx_todo_changeme55
             readcap = out
             self.failUnless(readcap.startswith("URI:LIT:"), readcap)
             self.readcap = readcap
         d.addCallback(_uploaded)
         d.addCallback(lambda res: self.do_cli("put", "./" + rel_fn))
-        d.addCallback(lambda (rc,stdout,stderr):
-                      self.failUnlessReallyEqual(stdout, self.readcap))
+        d.addCallback(lambda rc_stdout_stderr:
+                      self.failUnlessReallyEqual(rc_stdout_stderr[1], self.readcap))
         d.addCallback(lambda res: self.do_cli("put", abs_fn))
-        d.addCallback(lambda (rc,stdout,stderr):
-                      self.failUnlessReallyEqual(stdout, self.readcap))
+        d.addCallback(lambda rc_stdout_stderr1:
+                      self.failUnlessReallyEqual(rc_stdout_stderr1[1], self.readcap))
         # we just have to assume that ~ is handled properly
         return d
 
@@ -1009,7 +1024,8 @@ class Put(GridTestMixin, CLITestMixin, unittest.TestCase):
 
         d.addCallback(lambda res:
                       self.do_cli("put", rel_fn, "uploaded.txt"))
-        def _uploaded((rc, out, err)):
+        def _uploaded(xxx_todo_changeme56):
+            (rc, out, err) = xxx_todo_changeme56
             readcap = out.strip()
             self.failUnless(readcap.startswith("URI:LIT:"), readcap)
             self.failUnlessIn("201 Created", err)
@@ -1017,12 +1033,13 @@ class Put(GridTestMixin, CLITestMixin, unittest.TestCase):
         d.addCallback(_uploaded)
         d.addCallback(lambda res:
                       self.do_cli("get", "tahoe:uploaded.txt"))
-        d.addCallback(lambda (rc,stdout,stderr):
-                      self.failUnlessReallyEqual(stdout, DATA))
+        d.addCallback(lambda rc_stdout_stderr2:
+                      self.failUnlessReallyEqual(rc_stdout_stderr2[1], DATA))
 
         d.addCallback(lambda res:
                       self.do_cli("put", "-", "uploaded.txt", stdin=DATA2))
-        def _replaced((rc, out, err)):
+        def _replaced(xxx_todo_changeme57):
+            (rc, out, err) = xxx_todo_changeme57
             readcap = out.strip()
             self.failUnless(readcap.startswith("URI:LIT:"), readcap)
             self.failUnlessIn("200 OK", err)
@@ -1031,21 +1048,21 @@ class Put(GridTestMixin, CLITestMixin, unittest.TestCase):
         d.addCallback(lambda res:
                       self.do_cli("put", rel_fn, "subdir/uploaded2.txt"))
         d.addCallback(lambda res: self.do_cli("get", "subdir/uploaded2.txt"))
-        d.addCallback(lambda (rc,stdout,stderr):
-                      self.failUnlessReallyEqual(stdout, DATA))
+        d.addCallback(lambda rc_stdout_stderr3:
+                      self.failUnlessReallyEqual(rc_stdout_stderr3[1], DATA))
 
         d.addCallback(lambda res:
                       self.do_cli("put", rel_fn, "tahoe:uploaded3.txt"))
         d.addCallback(lambda res: self.do_cli("get", "tahoe:uploaded3.txt"))
-        d.addCallback(lambda (rc,stdout,stderr):
-                      self.failUnlessReallyEqual(stdout, DATA))
+        d.addCallback(lambda rc_stdout_stderr4:
+                      self.failUnlessReallyEqual(rc_stdout_stderr4[1], DATA))
 
         d.addCallback(lambda res:
                       self.do_cli("put", rel_fn, "tahoe:subdir/uploaded4.txt"))
         d.addCallback(lambda res:
                       self.do_cli("get", "tahoe:subdir/uploaded4.txt"))
-        d.addCallback(lambda (rc,stdout,stderr):
-                      self.failUnlessReallyEqual(stdout, DATA))
+        d.addCallback(lambda rc_stdout_stderr5:
+                      self.failUnlessReallyEqual(rc_stdout_stderr5[1], DATA))
 
         def _get_dircap(res):
             self.dircap = get_aliases(self.get_clientdir())["tahoe"]
@@ -1056,16 +1073,16 @@ class Put(GridTestMixin, CLITestMixin, unittest.TestCase):
                                   self.dircap+":./uploaded5.txt"))
         d.addCallback(lambda res:
                       self.do_cli("get", "tahoe:uploaded5.txt"))
-        d.addCallback(lambda (rc,stdout,stderr):
-                      self.failUnlessReallyEqual(stdout, DATA))
+        d.addCallback(lambda rc_stdout_stderr6:
+                      self.failUnlessReallyEqual(rc_stdout_stderr6[1], DATA))
 
         d.addCallback(lambda res:
                       self.do_cli("put", rel_fn,
                                   self.dircap+":./subdir/uploaded6.txt"))
         d.addCallback(lambda res:
                       self.do_cli("get", "tahoe:subdir/uploaded6.txt"))
-        d.addCallback(lambda (rc,stdout,stderr):
-                      self.failUnlessReallyEqual(stdout, DATA))
+        d.addCallback(lambda rc_stdout_stderr7:
+                      self.failUnlessReallyEqual(rc_stdout_stderr7[1], DATA))
 
         return d
 
@@ -1093,7 +1110,7 @@ class Put(GridTestMixin, CLITestMixin, unittest.TestCase):
             self.failUnless(self.filecap.startswith("URI:SSK:"), self.filecap)
         d.addCallback(_created)
         d.addCallback(lambda res: self.do_cli("get", self.filecap))
-        d.addCallback(lambda (rc,out,err): self.failUnlessReallyEqual(out, DATA))
+        d.addCallback(lambda rc_out_err8: self.failUnlessReallyEqual(rc_out_err8[1], DATA))
 
         d.addCallback(lambda res: self.do_cli("put", "-", self.filecap, stdin=DATA2))
         def _replaced(res):
@@ -1103,7 +1120,7 @@ class Put(GridTestMixin, CLITestMixin, unittest.TestCase):
             self.failUnlessReallyEqual(self.filecap, out)
         d.addCallback(_replaced)
         d.addCallback(lambda res: self.do_cli("get", self.filecap))
-        d.addCallback(lambda (rc,out,err): self.failUnlessReallyEqual(out, DATA2))
+        d.addCallback(lambda rc_out_err9: self.failUnlessReallyEqual(rc_out_err9[1], DATA2))
 
         d.addCallback(lambda res: self.do_cli("put", rel_fn, self.filecap))
         def _replaced2(res):
@@ -1112,7 +1129,7 @@ class Put(GridTestMixin, CLITestMixin, unittest.TestCase):
             self.failUnlessReallyEqual(self.filecap, out)
         d.addCallback(_replaced2)
         d.addCallback(lambda res: self.do_cli("get", self.filecap))
-        d.addCallback(lambda (rc,out,err): self.failUnlessReallyEqual(out, DATA3))
+        d.addCallback(lambda rc_out_err10: self.failUnlessReallyEqual(rc_out_err10[1], DATA3))
 
         return d
 
@@ -1150,10 +1167,11 @@ class Put(GridTestMixin, CLITestMixin, unittest.TestCase):
         d.addCallback(_check2)
         d.addCallback(lambda res:
                       self.do_cli("get", "tahoe:uploaded.txt"))
-        d.addCallback(lambda (rc,out,err): self.failUnlessReallyEqual(out, DATA2))
+        d.addCallback(lambda rc_out_err11: self.failUnlessReallyEqual(rc_out_err11[1], DATA2))
         return d
 
-    def _check_mdmf_json(self, (rc, json, err)):
+    def _check_mdmf_json(self, xxx_todo_changeme165):
+         (rc, json, err) = xxx_todo_changeme165
          self.failUnlessEqual(rc, 0)
          self.failUnlessEqual(err, "")
          self.failUnlessIn('"format": "MDMF"', json)
@@ -1162,7 +1180,8 @@ class Put(GridTestMixin, CLITestMixin, unittest.TestCase):
          self.failUnlessIn("URI:MDMF-RO", json)
          self.failUnlessIn("URI:MDMF-Verifier", json)
 
-    def _check_sdmf_json(self, (rc, json, err)):
+    def _check_sdmf_json(self, xxx_todo_changeme166):
+        (rc, json, err) = xxx_todo_changeme166
         self.failUnlessEqual(rc, 0)
         self.failUnlessEqual(err, "")
         self.failUnlessIn('"format": "SDMF"', json)
@@ -1171,7 +1190,8 @@ class Put(GridTestMixin, CLITestMixin, unittest.TestCase):
         self.failUnlessIn("URI:SSK-RO", json)
         self.failUnlessIn("URI:SSK-Verifier", json)
 
-    def _check_chk_json(self, (rc, json, err)):
+    def _check_chk_json(self, xxx_todo_changeme167):
+        (rc, json, err) = xxx_todo_changeme167
         self.failUnlessEqual(rc, 0)
         self.failUnlessEqual(err, "")
         self.failUnlessIn('"format": "CHK"', json)
@@ -1194,7 +1214,8 @@ class Put(GridTestMixin, CLITestMixin, unittest.TestCase):
                 # unlinked
                 args = ["put"] + cmdargs + [fn1]
             d2 = self.do_cli(*args)
-            def _list((rc, out, err)):
+            def _list(xxx_todo_changeme):
+                (rc, out, err) = xxx_todo_changeme
                 self.failUnlessEqual(rc, 0) # don't allow failure
                 if filename:
                     return self.do_cli("ls", "--json", filename)
@@ -1253,7 +1274,8 @@ class Put(GridTestMixin, CLITestMixin, unittest.TestCase):
         fn1 = os.path.join(self.basedir, "data")
         fileutil.write(fn1, data)
         d = self.do_cli("put", "--format=MDMF", fn1)
-        def _got_cap((rc, out, err)):
+        def _got_cap(xxx_todo_changeme58):
+            (rc, out, err) = xxx_todo_changeme58
             self.failUnlessEqual(rc, 0)
             self.cap = out.strip()
         d.addCallback(_got_cap)
@@ -1263,14 +1285,16 @@ class Put(GridTestMixin, CLITestMixin, unittest.TestCase):
         fileutil.write(fn2, data2)
         d.addCallback(lambda ignored:
             self.do_cli("put", fn2, self.cap))
-        def _got_put((rc, out, err)):
+        def _got_put(xxx_todo_changeme59):
+            (rc, out, err) = xxx_todo_changeme59
             self.failUnlessEqual(rc, 0)
             self.failUnlessIn(self.cap, out)
         d.addCallback(_got_put)
         # Now get the cap. We should see the data we just put there.
         d.addCallback(lambda ignored:
             self.do_cli("get", self.cap))
-        def _got_data((rc, out, err)):
+        def _got_data(xxx_todo_changeme60):
+            (rc, out, err) = xxx_todo_changeme60
             self.failUnlessEqual(rc, 0)
             self.failUnlessEqual(out, data2)
         d.addCallback(_got_data)
@@ -1286,7 +1310,8 @@ class Put(GridTestMixin, CLITestMixin, unittest.TestCase):
             self.do_cli("put", fn3, self.cap))
         d.addCallback(lambda ignored:
             self.do_cli("get", self.cap))
-        def _got_data3((rc, out, err)):
+        def _got_data3(xxx_todo_changeme61):
+            (rc, out, err) = xxx_todo_changeme61
             self.failUnlessEqual(rc, 0)
             self.failUnlessEqual(out, data3)
         d.addCallback(_got_data3)
@@ -1299,7 +1324,8 @@ class Put(GridTestMixin, CLITestMixin, unittest.TestCase):
         fn1 = os.path.join(self.basedir, "data")
         fileutil.write(fn1, data)
         d = self.do_cli("put", "--format=SDMF", fn1)
-        def _got_cap((rc, out, err)):
+        def _got_cap(xxx_todo_changeme62):
+            (rc, out, err) = xxx_todo_changeme62
             self.failUnlessEqual(rc, 0)
             self.cap = out.strip()
         d.addCallback(_got_cap)
@@ -1309,14 +1335,16 @@ class Put(GridTestMixin, CLITestMixin, unittest.TestCase):
         fileutil.write(fn2, data2)
         d.addCallback(lambda ignored:
             self.do_cli("put", fn2, self.cap))
-        def _got_put((rc, out, err)):
+        def _got_put(xxx_todo_changeme63):
+            (rc, out, err) = xxx_todo_changeme63
             self.failUnlessEqual(rc, 0)
             self.failUnlessIn(self.cap, out)
         d.addCallback(_got_put)
         # Now get the cap. We should see the data we just put there.
         d.addCallback(lambda ignored:
             self.do_cli("get", self.cap))
-        def _got_data((rc, out, err)):
+        def _got_data(xxx_todo_changeme64):
+            (rc, out, err) = xxx_todo_changeme64
             self.failUnlessEqual(rc, 0)
             self.failUnlessEqual(out, data2)
         d.addCallback(_got_data)
@@ -1334,7 +1362,8 @@ class Put(GridTestMixin, CLITestMixin, unittest.TestCase):
         self.basedir = "cli/Put/put_with_nonexistent_alias"
         self.set_up_grid()
         d = self.do_cli("put", "somefile", "fake:afile")
-        def _check((rc, out, err)):
+        def _check(xxx_todo_changeme65):
+            (rc, out, err) = xxx_todo_changeme65
             self.failUnlessReallyEqual(rc, 1)
             self.failUnlessIn("error:", err)
             self.failUnlessReallyEqual(out, "")
@@ -1363,7 +1392,8 @@ class Put(GridTestMixin, CLITestMixin, unittest.TestCase):
 
         d.addCallback(lambda res:
                       self.do_cli("put", rel_fn.encode(get_io_encoding()), a_trier_arg))
-        def _uploaded((rc, out, err)):
+        def _uploaded(xxx_todo_changeme66):
+            (rc, out, err) = xxx_todo_changeme66
             readcap = out.strip()
             self.failUnless(readcap.startswith("URI:LIT:"), readcap)
             self.failUnlessIn("201 Created", err)
@@ -1372,8 +1402,8 @@ class Put(GridTestMixin, CLITestMixin, unittest.TestCase):
 
         d.addCallback(lambda res:
                       self.do_cli("get", "tahoe:" + a_trier_arg))
-        d.addCallback(lambda (rc, out, err):
-                      self.failUnlessReallyEqual(out, DATA))
+        d.addCallback(lambda rc_out_err12:
+                      self.failUnlessReallyEqual(rc_out_err12[1], DATA))
 
         return d
 
@@ -1392,7 +1422,8 @@ class Admin(unittest.TestCase):
 
     def test_generate_keypair(self):
         d = self.do_cli("admin", "generate-keypair")
-        def _done( (stdout, stderr) ):
+        def _done(xxx_todo_changeme67 ):
+            (stdout, stderr) = xxx_todo_changeme67
             lines = [line.strip() for line in stdout.splitlines()]
             privkey_bits = lines[0].split()
             pubkey_bits = lines[1].split()
@@ -1412,7 +1443,8 @@ class Admin(unittest.TestCase):
     def test_derive_pubkey(self):
         priv1,pub1 = keyutil.make_keypair()
         d = self.do_cli("admin", "derive-pubkey", priv1)
-        def _done( (stdout, stderr) ):
+        def _done(xxx_todo_changeme68 ):
+            (stdout, stderr) = xxx_todo_changeme68
             lines = stdout.split("\n")
             privkey_line = lines[0].strip()
             pubkey_line = lines[1].strip()
@@ -1456,14 +1488,15 @@ class List(GridTestMixin, CLITestMixin, unittest.TestCase):
         d.addCallback(_stash_goodcap)
         d.addCallback(lambda ign: self.rootnode.create_subdirectory(u"1share"))
         d.addCallback(lambda n:
-                      self.delete_shares_numbered(n.get_uri(), range(1,10)))
+                      self.delete_shares_numbered(n.get_uri(), list(range(1,10))))
         d.addCallback(lambda ign: self.rootnode.create_subdirectory(u"0share"))
         d.addCallback(lambda n:
-                      self.delete_shares_numbered(n.get_uri(), range(0,10)))
+                      self.delete_shares_numbered(n.get_uri(), list(range(0,10))))
         d.addCallback(lambda ign:
                       self.do_cli("add-alias", "tahoe", self.rooturi))
         d.addCallback(lambda ign: self.do_cli("ls"))
-        def _check1((rc,out,err)):
+        def _check1(xxx_todo_changeme69):
+            (rc,out,err) = xxx_todo_changeme69
             if good_out is None:
                 self.failUnlessReallyEqual(rc, 1)
                 self.failUnlessIn("files whose names could not be converted", err)
@@ -1475,13 +1508,15 @@ class List(GridTestMixin, CLITestMixin, unittest.TestCase):
                 self.failUnlessReallyEqual(sorted(out.splitlines()), sorted(["0share", "1share", good_out]))
         d.addCallback(_check1)
         d.addCallback(lambda ign: self.do_cli("ls", "missing"))
-        def _check2((rc,out,err)):
+        def _check2(xxx_todo_changeme70):
+            (rc,out,err) = xxx_todo_changeme70
             self.failIfEqual(rc, 0)
             self.failUnlessReallyEqual(err.strip(), "No such file or directory")
             self.failUnlessReallyEqual(out, "")
         d.addCallback(_check2)
         d.addCallback(lambda ign: self.do_cli("ls", "1share"))
-        def _check3((rc,out,err)):
+        def _check3(xxx_todo_changeme71):
+            (rc,out,err) = xxx_todo_changeme71
             self.failIfEqual(rc, 0)
             self.failUnlessIn("Error during GET: 410 Gone", err)
             self.failUnlessIn("UnrecoverableFileError:", err)
@@ -1491,7 +1526,8 @@ class List(GridTestMixin, CLITestMixin, unittest.TestCase):
         d.addCallback(_check3)
         d.addCallback(lambda ign: self.do_cli("ls", "0share"))
         d.addCallback(_check3)
-        def _check4((rc, out, err)):
+        def _check4(xxx_todo_changeme72):
+            (rc, out, err) = xxx_todo_changeme72
             if good_out is None:
                 self.failUnlessReallyEqual(rc, 1)
                 self.failUnlessIn("files whose names could not be converted", err)
@@ -1515,9 +1551,10 @@ class List(GridTestMixin, CLITestMixin, unittest.TestCase):
             d.addCallback(lambda ign: self.do_cli("ls", "-l", self.rooturi + ":./" + good_arg))
             d.addCallback(_check4)
 
-        def _check5((rc, out, err)):
+        def _check5(xxx_todo_changeme73):
             # listing a raw filecap should not explode, but it will have no
             # metadata, just the size
+            (rc, out, err) = xxx_todo_changeme73
             self.failUnlessReallyEqual(rc, 0)
             self.failUnlessReallyEqual("-r-- %d -" % len(small), out.strip())
         d.addCallback(lambda ign: self.do_cli("ls", "-l", self.goodcap))
@@ -1528,14 +1565,16 @@ class List(GridTestMixin, CLITestMixin, unittest.TestCase):
         d.addCallback(lambda ign: self.rootnode.move_child_to(u"g\u00F6\u00F6d", self.rootnode, u"good"))
 
         d.addCallback(lambda ign: self.do_cli("ls"))
-        def _check1_ascii((rc,out,err)):
+        def _check1_ascii(xxx_todo_changeme74):
+            (rc,out,err) = xxx_todo_changeme74
             self.failUnlessReallyEqual(rc, 0)
             self.failUnlessReallyEqual(err, "")
             self.failUnlessReallyEqual(sorted(out.splitlines()), sorted(["0share", "1share", "good"]))
         d.addCallback(_check1_ascii)
-        def _check4_ascii((rc, out, err)):
+        def _check4_ascii(xxx_todo_changeme75):
             # listing a file (as dir/filename) should have the edge metadata,
             # including the filename
+            (rc, out, err) = xxx_todo_changeme75
             self.failUnlessReallyEqual(rc, 0)
             self.failUnlessIn("good", out)
             self.failIfIn("-r-- %d -" % len(small), out,
@@ -1557,19 +1596,21 @@ class List(GridTestMixin, CLITestMixin, unittest.TestCase):
             return self.rootnode.create_subdirectory(u"unknown", initial_children=kids,
                                                      mutable=False)
         d.addCallback(_create_unknown)
-        def _check6((rc, out, err)):
+        def _check6(xxx_todo_changeme76):
             # listing a directory referencing an unknown object should print
             # an extra message to stderr
+            (rc, out, err) = xxx_todo_changeme76
             self.failUnlessReallyEqual(rc, 0)
             self.failUnlessIn("?r-- ? - unknownchild-imm\n", out)
             self.failUnlessIn("included unknown objects", err)
         d.addCallback(lambda ign: self.do_cli("ls", "-l", "unknown"))
         d.addCallback(_check6)
-        def _check7((rc, out, err)):
+        def _check7(xxx_todo_changeme77):
             # listing an unknown cap directly should print an extra message
             # to stderr (currently this only works if the URI starts with 'URI:'
             # after any 'ro.' or 'imm.' prefix, otherwise it will be confused
             # with an alias).
+            (rc, out, err) = xxx_todo_changeme77
             self.failUnlessReallyEqual(rc, 0)
             self.failUnlessIn("?r-- ? -\n", out)
             self.failUnlessIn("included unknown objects", err)
@@ -1583,7 +1624,8 @@ class List(GridTestMixin, CLITestMixin, unittest.TestCase):
         self.basedir = "cli/List/list_without_alias"
         self.set_up_grid()
         d = self.do_cli("ls")
-        def _check((rc, out, err)):
+        def _check(xxx_todo_changeme78):
+            (rc, out, err) = xxx_todo_changeme78
             self.failUnlessReallyEqual(rc, 1)
             self.failUnlessIn("error:", err)
             self.failUnlessReallyEqual(out, "")
@@ -1596,7 +1638,8 @@ class List(GridTestMixin, CLITestMixin, unittest.TestCase):
         self.basedir = "cli/List/list_with_nonexistent_alias"
         self.set_up_grid()
         d = self.do_cli("ls", "nonexistent:")
-        def _check((rc, out, err)):
+        def _check(xxx_todo_changeme79):
+            (rc, out, err) = xxx_todo_changeme79
             self.failUnlessReallyEqual(rc, 1)
             self.failUnlessIn("error:", err)
             self.failUnlessIn("nonexistent", err)
@@ -1633,7 +1676,8 @@ class List(GridTestMixin, CLITestMixin, unittest.TestCase):
             d3 = n.add_file(u"immutable", immutable_data)
             ds = [d1, d2, d3]
             dl = defer.DeferredList(ds)
-            def _made_files((r1, r2, r3)):
+            def _made_files(xxx_todo_changeme38):
+                (r1, r2, r3) = xxx_todo_changeme38
                 self.failUnless(r1[0])
                 self.failUnless(r2[0])
                 self.failUnless(r3[0])
@@ -1665,7 +1709,8 @@ class List(GridTestMixin, CLITestMixin, unittest.TestCase):
         d = self._create_directory_structure()
         d.addCallback(lambda ignored:
             self.do_cli("ls", self._dircap))
-        def _got_ls((rc, out, err)):
+        def _got_ls(xxx_todo_changeme80):
+            (rc, out, err) = xxx_todo_changeme80
             self.failUnlessEqual(rc, 0)
             self.failUnlessEqual(err, "")
             self.failUnlessIn("immutable", out)
@@ -1682,7 +1727,8 @@ class List(GridTestMixin, CLITestMixin, unittest.TestCase):
         d = self._create_directory_structure()
         d.addCallback(lambda ignored:
             self.do_cli("ls", "--json", self._dircap))
-        def _got_json((rc, out, err)):
+        def _got_json(xxx_todo_changeme81):
+            (rc, out, err) = xxx_todo_changeme81
             self.failUnlessEqual(rc, 0)
             self.failUnlessEqual(err, "")
             self.failUnlessIn(self._mdmf_uri, out)
@@ -1717,16 +1763,16 @@ class Mv(GridTestMixin, CLITestMixin, unittest.TestCase):
         # (we should be able to rename files)
         d.addCallback(lambda res:
             self.do_cli("mv", "tahoe:file1", "tahoe:file3"))
-        d.addCallback(lambda (rc, out, err):
-            self.failUnlessIn("OK", out, "mv didn't rename a file"))
+        d.addCallback(lambda rc_out_err13:
+            self.failUnlessIn("OK", rc_out_err13[1], "mv didn't rename a file"))
 
         # do mv file3 file2
         # (This should succeed without issue)
         d.addCallback(lambda res:
             self.do_cli("mv", "tahoe:file3", "tahoe:file2"))
         # Out should contain "OK" to show that the transfer worked.
-        d.addCallback(lambda (rc,out,err):
-            self.failUnlessIn("OK", out, "mv didn't output OK after mving"))
+        d.addCallback(lambda rc_out_err14:
+            self.failUnlessIn("OK", rc_out_err14[1], "mv didn't output OK after mving"))
 
         # Next, make a remote directory.
         d.addCallback(lambda res:
@@ -1737,9 +1783,9 @@ class Mv(GridTestMixin, CLITestMixin, unittest.TestCase):
         #  client should support this)
         d.addCallback(lambda res:
             self.do_cli("mv", "tahoe:file2", "tahoe:directory"))
-        d.addCallback(lambda (rc, out, err):
+        d.addCallback(lambda rc_out_err15:
             self.failUnlessIn(
-                "Error: You can't overwrite a directory with a file", err,
+                "Error: You can't overwrite a directory with a file", rc_out_err15[2],
                 "mv shouldn't overwrite directories" ))
 
         # mv file2 directory/
@@ -1747,20 +1793,20 @@ class Mv(GridTestMixin, CLITestMixin, unittest.TestCase):
         d.addCallback(lambda res:
             self.do_cli("mv", "tahoe:file2", "tahoe:directory/"))
         # We should see an "OK"...
-        d.addCallback(lambda (rc, out, err):
-            self.failUnlessIn("OK", out,
+        d.addCallback(lambda rc_out_err16:
+            self.failUnlessIn("OK", rc_out_err16[1],
                             "mv didn't mv a file into a directory"))
         # ... and be able to GET the file
         d.addCallback(lambda res:
             self.do_cli("get", "tahoe:directory/file2", self.basedir + "new"))
-        d.addCallback(lambda (rc, out, err):
+        d.addCallback(lambda rc_out_err17:
             self.failUnless(os.path.exists(self.basedir + "new"),
                             "mv didn't write the destination file"))
         # ... and not find the file where it was before.
         d.addCallback(lambda res:
             self.do_cli("get", "tahoe:file2", "file2"))
-        d.addCallback(lambda (rc, out, err):
-            self.failUnlessIn("404", err,
+        d.addCallback(lambda rc_out_err18:
+            self.failUnlessIn("404", rc_out_err18[2],
                             "mv left the source file intact"))
 
         # Let's build:
@@ -1781,13 +1827,13 @@ class Mv(GridTestMixin, CLITestMixin, unittest.TestCase):
         # We should have just some_file in tahoe:directory3
         d.addCallback(lambda res:
             self.do_cli("get", "tahoe:directory3/some_file", "some_file"))
-        d.addCallback(lambda (rc, out, err):
-            self.failUnless("404" not in err,
+        d.addCallback(lambda rc_out_err19:
+            self.failUnless("404" not in rc_out_err19[2],
                               "mv didn't handle nested directories correctly"))
         d.addCallback(lambda res:
             self.do_cli("get", "tahoe:directory3/directory", "directory"))
-        d.addCallback(lambda (rc, out, err):
-            self.failUnlessIn("404", err,
+        d.addCallback(lambda rc_out_err20:
+            self.failUnlessIn("404", rc_out_err20[2],
                               "mv moved the wrong thing"))
         return d
 
@@ -1820,7 +1866,8 @@ class Mv(GridTestMixin, CLITestMixin, unittest.TestCase):
         # do mv file1 file2
         d.addCallback(lambda res:
             self.do_cli("mv", "tahoe:file1", "tahoe:file2"))
-        def _check( (rc, out, err) ):
+        def _check(xxx_todo_changeme82 ):
+            (rc, out, err) = xxx_todo_changeme82
             self.failIfIn("OK", out, "mv printed 'OK' even though the DELETE failed")
             self.failUnlessEqual(rc, 2)
         d.addCallback(_check)
@@ -1838,7 +1885,8 @@ class Mv(GridTestMixin, CLITestMixin, unittest.TestCase):
         self.basedir = "cli/Mv/mv_without_alias"
         self.set_up_grid()
         d = self.do_cli("mv", "afile", "anotherfile")
-        def _check((rc, out, err)):
+        def _check(xxx_todo_changeme83):
+            (rc, out, err) = xxx_todo_changeme83
             self.failUnlessReallyEqual(rc, 1)
             self.failUnlessIn("error:", err)
             self.failUnlessReallyEqual(out, "")
@@ -1864,7 +1912,8 @@ class Mv(GridTestMixin, CLITestMixin, unittest.TestCase):
         self.basedir = "cli/Mv/mv_with_nonexistent_alias"
         self.set_up_grid()
         d = self.do_cli("mv", "fake:afile", "fake:anotherfile")
-        def _check((rc, out, err)):
+        def _check(xxx_todo_changeme84):
+            (rc, out, err) = xxx_todo_changeme84
             self.failUnlessReallyEqual(rc, 1)
             self.failUnlessIn("error:", err)
             self.failUnlessIn("fake", err)
@@ -1919,15 +1968,16 @@ class Cp(GridTestMixin, CLITestMixin, unittest.TestCase):
         d.addCallback(lambda res: self.do_cli("cp", fn1_arg, "tahoe:"))
 
         d.addCallback(lambda res: self.do_cli("get", "tahoe:" + artonwall_arg))
-        d.addCallback(lambda (rc,out,err): self.failUnlessReallyEqual(out, DATA1))
+        d.addCallback(lambda rc_out_err21: self.failUnlessReallyEqual(rc_out_err21[1], DATA1))
 
         d.addCallback(lambda res: self.do_cli("cp", fn2, "tahoe:"))
 
         d.addCallback(lambda res: self.do_cli("get", "tahoe:Metallica"))
-        d.addCallback(lambda (rc,out,err): self.failUnlessReallyEqual(out, DATA2))
+        d.addCallback(lambda rc_out_err22: self.failUnlessReallyEqual(rc_out_err22[1], DATA2))
 
         d.addCallback(lambda res: self.do_cli("ls", "tahoe:"))
-        def _check((rc, out, err)):
+        def _check(xxx_todo_changeme85):
+            (rc, out, err) = xxx_todo_changeme85
             try:
                 unicode_to_output(u"\u00C4rtonwall")
             except UnicodeEncodeError:
@@ -1974,7 +2024,8 @@ class Cp(GridTestMixin, CLITestMixin, unittest.TestCase):
 
         d = self.do_cli("create-alias", "tahoe")
         d.addCallback(lambda ign: self.do_cli("put", fn1))
-        def _put_file((rc, out, err)):
+        def _put_file(xxx_todo_changeme86):
+            (rc, out, err) = xxx_todo_changeme86
             self.failUnlessReallyEqual(rc, 0)
             self.failUnlessIn("200 OK", err)
             # keep track of the filecap
@@ -1984,7 +2035,8 @@ class Cp(GridTestMixin, CLITestMixin, unittest.TestCase):
         # Let's try copying this to the disk using the filecap
         #  cp FILECAP filename
         d.addCallback(lambda ign: self.do_cli("cp", self.filecap, fn2))
-        def _copy_file((rc, out, err)):
+        def _copy_file(xxx_todo_changeme87):
+            (rc, out, err) = xxx_todo_changeme87
             self.failUnlessReallyEqual(rc, 0)
             results = fileutil.read(fn2)
             self.failUnlessReallyEqual(results, DATA1)
@@ -1993,7 +2045,8 @@ class Cp(GridTestMixin, CLITestMixin, unittest.TestCase):
         # Test with ./ (see #761)
         #  cp FILECAP localdir
         d.addCallback(lambda ign: self.do_cli("cp", self.filecap, outdir))
-        def _resp((rc, out, err)):
+        def _resp(xxx_todo_changeme88):
+            (rc, out, err) = xxx_todo_changeme88
             self.failUnlessReallyEqual(rc, 1)
             self.failUnlessIn("error: you must specify a destination filename",
                               err)
@@ -2002,7 +2055,8 @@ class Cp(GridTestMixin, CLITestMixin, unittest.TestCase):
 
         # Create a directory, linked at tahoe:test
         d.addCallback(lambda ign: self.do_cli("mkdir", "tahoe:test"))
-        def _get_dir((rc, out, err)):
+        def _get_dir(xxx_todo_changeme89):
+            (rc, out, err) = xxx_todo_changeme89
             self.failUnlessReallyEqual(rc, 0)
             self.dircap = out.strip()
         d.addCallback(_get_dir)
@@ -2010,12 +2064,13 @@ class Cp(GridTestMixin, CLITestMixin, unittest.TestCase):
         # Upload a file to the directory
         d.addCallback(lambda ign:
                       self.do_cli("put", fn1, "tahoe:test/test_file"))
-        d.addCallback(lambda (rc, out, err): self.failUnlessReallyEqual(rc, 0))
+        d.addCallback(lambda rc_out_err23: self.failUnlessReallyEqual(rc_out_err23[0], 0))
 
         #  cp DIRCAP/filename localdir
         d.addCallback(lambda ign:
                       self.do_cli("cp",  self.dircap + "/test_file", outdir))
-        def _get_resp((rc, out, err)):
+        def _get_resp(xxx_todo_changeme90):
+            (rc, out, err) = xxx_todo_changeme90
             self.failUnlessReallyEqual(rc, 0)
             results = fileutil.read(os.path.join(outdir, "test_file"))
             self.failUnlessReallyEqual(results, DATA1)
@@ -2024,7 +2079,8 @@ class Cp(GridTestMixin, CLITestMixin, unittest.TestCase):
         #  cp -r DIRCAP/filename filename2
         d.addCallback(lambda ign:
                       self.do_cli("cp",  self.dircap + "/test_file", fn3))
-        def _get_resp2((rc, out, err)):
+        def _get_resp2(xxx_todo_changeme91):
+            (rc, out, err) = xxx_todo_changeme91
             self.failUnlessReallyEqual(rc, 0)
             results = fileutil.read(fn3)
             self.failUnlessReallyEqual(results, DATA1)
@@ -2032,7 +2088,8 @@ class Cp(GridTestMixin, CLITestMixin, unittest.TestCase):
         #  cp --verbose filename3 dircap:test_file
         d.addCallback(lambda ign:
                       self.do_cli("cp", "--verbose", '--recursive', self.basedir, self.dircap))
-        def _test_for_wrong_indices((rc, out, err)):
+        def _test_for_wrong_indices(xxx_todo_changeme92):
+            (rc, out, err) = xxx_todo_changeme92
             self.failUnless('examining 1 of 1\n' in err)
         d.addCallback(_test_for_wrong_indices)
         return d
@@ -2043,7 +2100,8 @@ class Cp(GridTestMixin, CLITestMixin, unittest.TestCase):
         self.basedir = "cli/Cp/cp_with_nonexistent_alias"
         self.set_up_grid()
         d = self.do_cli("cp", "fake:file1", "fake:file2")
-        def _check((rc, out, err)):
+        def _check(xxx_todo_changeme93):
+            (rc, out, err) = xxx_todo_changeme93
             self.failUnlessReallyEqual(rc, 1)
             self.failUnlessIn("error:", err)
         d.addCallback(_check)
@@ -2075,7 +2133,8 @@ class Cp(GridTestMixin, CLITestMixin, unittest.TestCase):
         d.addCallback(lambda res: self.do_cli("mkdir", "tahoe:test/" + artonwall_arg))
         d.addCallback(lambda res: self.do_cli("cp", "-r", "tahoe:test", "tahoe:test2"))
         d.addCallback(lambda res: self.do_cli("ls", "tahoe:test2"))
-        def _check((rc, out, err)):
+        def _check(xxx_todo_changeme94):
+            (rc, out, err) = xxx_todo_changeme94
             try:
                 unicode_to_output(u"\u00C4rtonwall")
             except UnicodeEncodeError:
@@ -2111,7 +2170,8 @@ class Cp(GridTestMixin, CLITestMixin, unittest.TestCase):
             self.do_cli("put", "--mutable", test_txt_path, "tahoe:test/test.txt"))
         d.addCallback(lambda ignored:
             self.do_cli("get", "tahoe:test/test.txt"))
-        def _check((rc, out, err)):
+        def _check(xxx_todo_changeme95):
+            (rc, out, err) = xxx_todo_changeme95
             self.failUnlessEqual(rc, 0)
             self.failUnlessEqual(out, test_txt_contents)
         d.addCallback(_check)
@@ -2120,7 +2180,8 @@ class Cp(GridTestMixin, CLITestMixin, unittest.TestCase):
         # file we've just uploaded.
         d.addCallback(lambda ignored:
             self.do_cli("ls", "--json", "tahoe:test/test.txt"))
-        def _get_test_txt_uris((rc, out, err)):
+        def _get_test_txt_uris(xxx_todo_changeme96):
+            (rc, out, err) = xxx_todo_changeme96
             self.failUnlessEqual(rc, 0)
             filetype, data = simplejson.loads(out)
 
@@ -2147,13 +2208,14 @@ class Cp(GridTestMixin, CLITestMixin, unittest.TestCase):
         # If we get test.txt now, we should see the new data.
         d.addCallback(lambda ignored:
             self.do_cli("get", "tahoe:test/test.txt"))
-        d.addCallback(lambda (rc, out, err):
-            self.failUnlessEqual(out, new_txt_contents))
+        d.addCallback(lambda rc_out_err24:
+            self.failUnlessEqual(rc_out_err24[1], new_txt_contents))
         # If we get the json of the new file, we should see that the old
         # uri is there
         d.addCallback(lambda ignored:
             self.do_cli("ls", "--json", "tahoe:test/test.txt"))
-        def _check_json((rc, out, err)):
+        def _check_json(xxx_todo_changeme97):
+            (rc, out, err) = xxx_todo_changeme97
             self.failUnlessEqual(rc, 0)
             filetype, data = simplejson.loads(out)
 
@@ -2170,8 +2232,8 @@ class Cp(GridTestMixin, CLITestMixin, unittest.TestCase):
         # should give us the new contents.
         d.addCallback(lambda ignored:
             self.do_cli("get", self.rw_uri))
-        d.addCallback(lambda (rc, out, err):
-            self.failUnlessEqual(out, new_txt_contents))
+        d.addCallback(lambda rc_out_err25:
+            self.failUnlessEqual(rc_out_err25[1], new_txt_contents))
         # Now copy the old test.txt without an explicit destination
         # file. tahoe cp will match it to the existing file and
         # overwrite it appropriately.
@@ -2179,15 +2241,15 @@ class Cp(GridTestMixin, CLITestMixin, unittest.TestCase):
             self.do_cli("cp", test_txt_path, "tahoe:test"))
         d.addCallback(lambda ignored:
             self.do_cli("get", "tahoe:test/test.txt"))
-        d.addCallback(lambda (rc, out, err):
-            self.failUnlessEqual(out, test_txt_contents))
+        d.addCallback(lambda rc_out_err26:
+            self.failUnlessEqual(rc_out_err26[1], test_txt_contents))
         d.addCallback(lambda ignored:
             self.do_cli("ls", "--json", "tahoe:test/test.txt"))
         d.addCallback(_check_json)
         d.addCallback(lambda ignored:
             self.do_cli("get", self.rw_uri))
-        d.addCallback(lambda (rc, out, err):
-            self.failUnlessEqual(out, test_txt_contents))
+        d.addCallback(lambda rc_out_err27:
+            self.failUnlessEqual(rc_out_err27[1], test_txt_contents))
 
         # Now we'll make a more complicated directory structure.
         # test2/
@@ -2212,7 +2274,8 @@ class Cp(GridTestMixin, CLITestMixin, unittest.TestCase):
             self.do_cli("put", imm_test_txt_path, "tahoe:test2/imm2"))
         d.addCallback(lambda ignored:
             self.do_cli("ls", "--json", "tahoe:test2"))
-        def _process_directory_json((rc, out, err)):
+        def _process_directory_json(xxx_todo_changeme98):
+            (rc, out, err) = xxx_todo_changeme98
             self.failUnlessEqual(rc, 0)
 
             filetype, data = simplejson.loads(out)
@@ -2255,7 +2318,8 @@ class Cp(GridTestMixin, CLITestMixin, unittest.TestCase):
 
         # We expect that mutable1 and mutable2 are overwritten in-place,
         # so they'll retain their URIs but have different content.
-        def _process_file_json((rc, out, err), fn):
+        def _process_file_json(xxx_todo_changeme99, fn):
+            (rc, out, err) = xxx_todo_changeme99
             self.failUnlessEqual(rc, 0)
             filetype, data = simplejson.loads(out)
             self.failUnlessEqual(filetype, "filenode")
@@ -2272,8 +2336,8 @@ class Cp(GridTestMixin, CLITestMixin, unittest.TestCase):
         for fn in ("mutable1", "mutable2"):
             d.addCallback(lambda ignored, fn=fn:
                 self.do_cli("get", "tahoe:test2/%s" % fn))
-            d.addCallback(lambda (rc, out, err), fn=fn:
-                self.failUnlessEqual(out, fn * 1000))
+            d.addCallback(lambda rc_and_out_and_err, fn=fn:
+                self.failUnlessEqual(rc_and_out_and_err[1], fn * 1000))
             d.addCallback(lambda ignored, fn=fn:
                 self.do_cli("ls", "--json", "tahoe:test2/%s" % fn))
             d.addCallback(_process_file_json, fn=fn)
@@ -2282,8 +2346,8 @@ class Cp(GridTestMixin, CLITestMixin, unittest.TestCase):
         # should be different.
         d.addCallback(lambda ignored:
             self.do_cli("get", "tahoe:test2/imm1"))
-        d.addCallback(lambda (rc, out, err):
-            self.failUnlessEqual(out, "imm1" * 1000))
+        d.addCallback(lambda rc_out_err28:
+            self.failUnlessEqual(rc_out_err28[1], "imm1" * 1000))
         d.addCallback(lambda ignored:
             self.do_cli("ls", "--json", "tahoe:test2/imm1"))
         d.addCallback(_process_file_json, fn="imm1")
@@ -2291,18 +2355,19 @@ class Cp(GridTestMixin, CLITestMixin, unittest.TestCase):
         # imm3 should have been created.
         d.addCallback(lambda ignored:
             self.do_cli("get", "tahoe:test2/imm3"))
-        d.addCallback(lambda (rc, out, err):
-            self.failUnlessEqual(out, "imm3" * 1000))
+        d.addCallback(lambda rc_out_err29:
+            self.failUnlessEqual(rc_out_err29[1], "imm3" * 1000))
 
         # imm2 should be exactly as we left it, since our newly-copied
         # directory didn't contain an imm2 entry.
         d.addCallback(lambda ignored:
             self.do_cli("get", "tahoe:test2/imm2"))
-        d.addCallback(lambda (rc, out, err):
-            self.failUnlessEqual(out, imm_test_txt_contents))
+        d.addCallback(lambda rc_out_err30:
+            self.failUnlessEqual(rc_out_err30[1], imm_test_txt_contents))
         d.addCallback(lambda ignored:
             self.do_cli("ls", "--json", "tahoe:test2/imm2"))
-        def _process_imm2_json((rc, out, err)):
+        def _process_imm2_json(xxx_todo_changeme100):
+            (rc, out, err) = xxx_todo_changeme100
             self.failUnlessEqual(rc, 0)
             filetype, data = simplejson.loads(out)
             self.failUnlessEqual(filetype, "filenode")
@@ -2333,14 +2398,16 @@ class Cp(GridTestMixin, CLITestMixin, unittest.TestCase):
         d = self.do_cli("create-alias", "tahoe:")
         d.addCallback(lambda ignored:
             self.do_cli("put", "--mutable", test_file_path))
-        def _get_test_uri((rc, out, err)):
+        def _get_test_uri(xxx_todo_changeme101):
+            (rc, out, err) = xxx_todo_changeme101
             self.failUnlessEqual(rc, 0)
             # this should be a write uri
             self._test_write_uri = out
         d.addCallback(_get_test_uri)
         d.addCallback(lambda ignored:
             self.do_cli("ls", "--json", self._test_write_uri))
-        def _process_test_json((rc, out, err)):
+        def _process_test_json(xxx_todo_changeme102):
+            (rc, out, err) = xxx_todo_changeme102
             self.failUnlessEqual(rc, 0)
             filetype, data = simplejson.loads(out)
 
@@ -2352,12 +2419,13 @@ class Cp(GridTestMixin, CLITestMixin, unittest.TestCase):
         # Now we'll link the readonly URI into the tahoe: alias.
         d.addCallback(lambda ignored:
             self.do_cli("ln", self._test_read_uri, "tahoe:test_file.txt"))
-        d.addCallback(lambda (rc, out, err):
-            self.failUnlessEqual(rc, 0))
+        d.addCallback(lambda rc_out_err31:
+            self.failUnlessEqual(rc_out_err31[0], 0))
         # Let's grab the json of that to make sure that we did it right.
         d.addCallback(lambda ignored:
             self.do_cli("ls", "--json", "tahoe:"))
-        def _process_tahoe_json((rc, out, err)):
+        def _process_tahoe_json(xxx_todo_changeme103):
+            (rc, out, err) = xxx_todo_changeme103
             self.failUnlessEqual(rc, 0)
 
             filetype, data = simplejson.loads(out)
@@ -2377,40 +2445,41 @@ class Cp(GridTestMixin, CLITestMixin, unittest.TestCase):
         # place of that one. We should get an error.
         d.addCallback(lambda ignored:
             self.do_cli("cp", replacement_file_path, "tahoe:test_file.txt"))
-        def _check_error_message((rc, out, err)):
+        def _check_error_message(xxx_todo_changeme104):
+            (rc, out, err) = xxx_todo_changeme104
             self.failUnlessEqual(rc, 1)
             self.failUnlessIn("replace or update requested with read-only cap", err)
         d.addCallback(_check_error_message)
         # Make extra sure that that didn't work.
         d.addCallback(lambda ignored:
             self.do_cli("get", "tahoe:test_file.txt"))
-        d.addCallback(lambda (rc, out, err):
-            self.failUnlessEqual(out, test_file_contents))
+        d.addCallback(lambda rc_out_err32:
+            self.failUnlessEqual(rc_out_err32[1], test_file_contents))
         d.addCallback(lambda ignored:
             self.do_cli("get", self._test_read_uri))
-        d.addCallback(lambda (rc, out, err):
-            self.failUnlessEqual(out, test_file_contents))
+        d.addCallback(lambda rc_out_err33:
+            self.failUnlessEqual(rc_out_err33[1], test_file_contents))
         # Now we'll do it without an explicit destination.
         d.addCallback(lambda ignored:
             self.do_cli("cp", test_file_path, "tahoe:"))
         d.addCallback(_check_error_message)
         d.addCallback(lambda ignored:
             self.do_cli("get", "tahoe:test_file.txt"))
-        d.addCallback(lambda (rc, out, err):
-            self.failUnlessEqual(out, test_file_contents))
+        d.addCallback(lambda rc_out_err34:
+            self.failUnlessEqual(rc_out_err34[1], test_file_contents))
         d.addCallback(lambda ignored:
             self.do_cli("get", self._test_read_uri))
-        d.addCallback(lambda (rc, out, err):
-            self.failUnlessEqual(out, test_file_contents))
+        d.addCallback(lambda rc_out_err35:
+            self.failUnlessEqual(rc_out_err35[1], test_file_contents))
         # Now we'll link a readonly file into a subdirectory.
         d.addCallback(lambda ignored:
             self.do_cli("mkdir", "tahoe:testdir"))
-        d.addCallback(lambda (rc, out, err):
-            self.failUnlessEqual(rc, 0))
+        d.addCallback(lambda rc_out_err36:
+            self.failUnlessEqual(rc_out_err36[0], 0))
         d.addCallback(lambda ignored:
             self.do_cli("ln", self._test_read_uri, "tahoe:test/file2.txt"))
-        d.addCallback(lambda (rc, out, err):
-            self.failUnlessEqual(rc, 0))
+        d.addCallback(lambda rc_out_err37:
+            self.failUnlessEqual(rc_out_err37[0], 0))
 
         test_dir_path = os.path.join(self.basedir, "test")
         fileutil.make_dirs(test_dir_path)
@@ -2422,7 +2491,8 @@ class Cp(GridTestMixin, CLITestMixin, unittest.TestCase):
         d.addCallback(_check_error_message)
         d.addCallback(lambda ignored:
             self.do_cli("ls", "--json", "tahoe:test"))
-        def _got_testdir_json((rc, out, err)):
+        def _got_testdir_json(xxx_todo_changeme105):
+            (rc, out, err) = xxx_todo_changeme105
             self.failUnlessEqual(rc, 0)
 
             filetype, data = simplejson.loads(out)
@@ -2517,7 +2587,8 @@ class Backup(GridTestMixin, CLITestMixin, StallMixin, unittest.TestCase):
         d = self.do_cli("create-alias", "tahoe")
 
         d.addCallback(lambda res: do_backup())
-        def _check0((rc, out, err)):
+        def _check0(xxx_todo_changeme106):
+            (rc, out, err) = xxx_todo_changeme106
             self.failUnlessReallyEqual(err, "")
             self.failUnlessReallyEqual(rc, 0)
             fu, fr, fs, dc, dr, ds = self.count_output(out)
@@ -2532,7 +2603,8 @@ class Backup(GridTestMixin, CLITestMixin, StallMixin, unittest.TestCase):
         d.addCallback(_check0)
 
         d.addCallback(lambda res: self.do_cli("ls", "--uri", "tahoe:backups"))
-        def _check1((rc, out, err)):
+        def _check1(xxx_todo_changeme107):
+            (rc, out, err) = xxx_todo_changeme107
             self.failUnlessReallyEqual(err, "")
             self.failUnlessReallyEqual(rc, 0)
             lines = out.split("\n")
@@ -2543,25 +2615,29 @@ class Backup(GridTestMixin, CLITestMixin, StallMixin, unittest.TestCase):
             self.failUnlessReallyEqual(sorted(childnames), ["Archives", "Latest"])
         d.addCallback(_check1)
         d.addCallback(lambda res: self.do_cli("ls", "tahoe:backups/Latest"))
-        def _check2((rc, out, err)):
+        def _check2(xxx_todo_changeme108):
+            (rc, out, err) = xxx_todo_changeme108
             self.failUnlessReallyEqual(err, "")
             self.failUnlessReallyEqual(rc, 0)
             self.failUnlessReallyEqual(sorted(out.split()), ["empty", "parent"])
         d.addCallback(_check2)
         d.addCallback(lambda res: self.do_cli("ls", "tahoe:backups/Latest/empty"))
-        def _check2a((rc, out, err)):
+        def _check2a(xxx_todo_changeme109):
+            (rc, out, err) = xxx_todo_changeme109
             self.failUnlessReallyEqual(err, "")
             self.failUnlessReallyEqual(rc, 0)
             self.failUnlessReallyEqual(out.strip(), "")
         d.addCallback(_check2a)
         d.addCallback(lambda res: self.do_cli("get", "tahoe:backups/Latest/parent/subdir/foo.txt"))
-        def _check3((rc, out, err)):
+        def _check3(xxx_todo_changeme110):
+            (rc, out, err) = xxx_todo_changeme110
             self.failUnlessReallyEqual(err, "")
             self.failUnlessReallyEqual(rc, 0)
             self.failUnlessReallyEqual(out, "foo")
         d.addCallback(_check3)
         d.addCallback(lambda res: self.do_cli("ls", "tahoe:backups/Archives"))
-        def _check4((rc, out, err)):
+        def _check4(xxx_todo_changeme111):
+            (rc, out, err) = xxx_todo_changeme111
             self.failUnlessReallyEqual(err, "")
             self.failUnlessReallyEqual(rc, 0)
             self.old_archives = out.split()
@@ -2571,9 +2647,10 @@ class Backup(GridTestMixin, CLITestMixin, StallMixin, unittest.TestCase):
 
         d.addCallback(self.stall, 1.1)
         d.addCallback(lambda res: do_backup())
-        def _check4a((rc, out, err)):
+        def _check4a(xxx_todo_changeme112):
             # second backup should reuse everything, if the backupdb is
             # available
+            (rc, out, err) = xxx_todo_changeme112
             self.failUnlessReallyEqual(err, "")
             self.failUnlessReallyEqual(rc, 0)
             fu, fr, fs, dc, dr, ds = self.count_output(out)
@@ -2602,10 +2679,11 @@ class Backup(GridTestMixin, CLITestMixin, StallMixin, unittest.TestCase):
 
         d.addCallback(self.stall, 1.1)
         d.addCallback(lambda res: do_backup(verbose=True))
-        def _check4b((rc, out, err)):
+        def _check4b(xxx_todo_changeme113):
             # we should check all files, and re-use all of them. None of
             # the directories should have been changed, so we should
             # re-use all of them too.
+            (rc, out, err) = xxx_todo_changeme113
             self.failUnlessReallyEqual(err, "")
             self.failUnlessReallyEqual(rc, 0)
             fu, fr, fs, dc, dr, ds = self.count_output(out)
@@ -2621,7 +2699,8 @@ class Backup(GridTestMixin, CLITestMixin, StallMixin, unittest.TestCase):
         d.addCallback(_check4b)
 
         d.addCallback(lambda res: self.do_cli("ls", "tahoe:backups/Archives"))
-        def _check5((rc, out, err)):
+        def _check5(xxx_todo_changeme114):
+            (rc, out, err) = xxx_todo_changeme114
             self.failUnlessReallyEqual(err, "")
             self.failUnlessReallyEqual(rc, 0)
             self.new_archives = out.split()
@@ -2645,9 +2724,10 @@ class Backup(GridTestMixin, CLITestMixin, StallMixin, unittest.TestCase):
             self.writeto("empty", "imagine nothing being here")
             return do_backup()
         d.addCallback(_modify)
-        def _check5a((rc, out, err)):
+        def _check5a(xxx_todo_changeme115):
             # second backup should reuse bar.txt (if backupdb is available),
             # and upload the rest. None of the directories can be reused.
+            (rc, out, err) = xxx_todo_changeme115
             self.failUnlessReallyEqual(err, "")
             self.failUnlessReallyEqual(rc, 0)
             fu, fr, fs, dc, dr, ds = self.count_output(out)
@@ -2662,7 +2742,8 @@ class Backup(GridTestMixin, CLITestMixin, StallMixin, unittest.TestCase):
             self.failUnlessReallyEqual(ds, 0)
         d.addCallback(_check5a)
         d.addCallback(lambda res: self.do_cli("ls", "tahoe:backups/Archives"))
-        def _check6((rc, out, err)):
+        def _check6(xxx_todo_changeme116):
+            (rc, out, err) = xxx_todo_changeme116
             self.failUnlessReallyEqual(err, "")
             self.failUnlessReallyEqual(rc, 0)
             self.new_archives = out.split()
@@ -2671,14 +2752,16 @@ class Backup(GridTestMixin, CLITestMixin, StallMixin, unittest.TestCase):
                                  self.old_archives[0])
         d.addCallback(_check6)
         d.addCallback(lambda res: self.do_cli("get", "tahoe:backups/Latest/parent/subdir/foo.txt"))
-        def _check7((rc, out, err)):
+        def _check7(xxx_todo_changeme117):
+            (rc, out, err) = xxx_todo_changeme117
             self.failUnlessReallyEqual(err, "")
             self.failUnlessReallyEqual(rc, 0)
             self.failUnlessReallyEqual(out, "FOOF!")
             # the old snapshot should not be modified
             return self.do_cli("get", "tahoe:backups/Archives/%s/parent/subdir/foo.txt" % self.old_archives[0])
         d.addCallback(_check7)
-        def _check8((rc, out, err)):
+        def _check8(xxx_todo_changeme118):
+            (rc, out, err) = xxx_todo_changeme118
             self.failUnlessReallyEqual(err, "")
             self.failUnlessReallyEqual(rc, 0)
             self.failUnlessReallyEqual(out, "foo")
@@ -2812,7 +2895,8 @@ class Backup(GridTestMixin, CLITestMixin, StallMixin, unittest.TestCase):
         d = self.do_cli("create-alias", "tahoe")
         d.addCallback(lambda res: self.do_cli("backup", "--verbose", source, "tahoe:test"))
 
-        def _check((rc, out, err)):
+        def _check(xxx_todo_changeme119):
+            (rc, out, err) = xxx_todo_changeme119
             self.failUnlessReallyEqual(rc, 2)
             foo2 = os.path.join(source, "foo2.txt")
             self.failUnlessReallyEqual(err, "WARNING: cannot backup symlink '%s'\n" % foo2)
@@ -2842,7 +2926,8 @@ class Backup(GridTestMixin, CLITestMixin, StallMixin, unittest.TestCase):
         d = self.do_cli("create-alias", "tahoe")
         d.addCallback(lambda res: self.do_cli("backup", source, "tahoe:test"))
 
-        def _check((rc, out, err)):
+        def _check(xxx_todo_changeme120):
+            (rc, out, err) = xxx_todo_changeme120
             self.failUnlessReallyEqual(rc, 2)
             self.failUnlessReallyEqual(err, "WARNING: permission denied on file %s\n" % os.path.join(source, "foo.txt"))
 
@@ -2859,7 +2944,7 @@ class Backup(GridTestMixin, CLITestMixin, StallMixin, unittest.TestCase):
 
         # This is necessary for the temp files to be correctly removed
         def _cleanup(self):
-            os.chmod(os.path.join(source, "foo.txt"), 0644)
+            os.chmod(os.path.join(source, "foo.txt"), 0o644)
         d.addCallback(_cleanup)
         d.addErrback(_cleanup)
 
@@ -2877,7 +2962,8 @@ class Backup(GridTestMixin, CLITestMixin, StallMixin, unittest.TestCase):
         d = self.do_cli("create-alias", "tahoe")
         d.addCallback(lambda res: self.do_cli("backup", source, "tahoe:test"))
 
-        def _check((rc, out, err)):
+        def _check(xxx_todo_changeme121):
+            (rc, out, err) = xxx_todo_changeme121
             self.failUnlessReallyEqual(rc, 2)
             self.failUnlessReallyEqual(err, "WARNING: permission denied on directory %s\n" % os.path.join(source, "test"))
 
@@ -2894,7 +2980,7 @@ class Backup(GridTestMixin, CLITestMixin, StallMixin, unittest.TestCase):
 
         # This is necessary for the temp files to be correctly removed
         def _cleanup(self):
-            os.chmod(os.path.join(source, "test"), 0655)
+            os.chmod(os.path.join(source, "test"), 0o655)
         d.addCallback(_cleanup)
         d.addErrback(_cleanup)
         return d
@@ -2906,7 +2992,8 @@ class Backup(GridTestMixin, CLITestMixin, StallMixin, unittest.TestCase):
         self.set_up_grid()
         source = os.path.join(self.basedir, "file1")
         d = self.do_cli('backup', source, source)
-        def _check((rc, out, err)):
+        def _check(xxx_todo_changeme122):
+            (rc, out, err) = xxx_todo_changeme122
             self.failUnlessReallyEqual(rc, 1)
             self.failUnlessIn("error:", err)
             self.failUnlessReallyEqual(out, "")
@@ -2920,7 +3007,8 @@ class Backup(GridTestMixin, CLITestMixin, StallMixin, unittest.TestCase):
         self.set_up_grid()
         source = os.path.join(self.basedir, "file1")
         d = self.do_cli("backup", source, "nonexistent:" + source)
-        def _check((rc, out, err)):
+        def _check(xxx_todo_changeme123):
+            (rc, out, err) = xxx_todo_changeme123
             self.failUnlessReallyEqual(rc, 1)
             self.failUnlessIn("error:", err)
             self.failUnlessIn("nonexistent", err)
@@ -2943,7 +3031,8 @@ class Check(GridTestMixin, CLITestMixin, unittest.TestCase):
         d.addCallback(_stash_uri)
 
         d.addCallback(lambda ign: self.do_cli("check", self.uri))
-        def _check1((rc, out, err)):
+        def _check1(xxx_todo_changeme124):
+            (rc, out, err) = xxx_todo_changeme124
             self.failUnlessReallyEqual(err, "")
             self.failUnlessReallyEqual(rc, 0)
             lines = out.splitlines()
@@ -2952,7 +3041,8 @@ class Check(GridTestMixin, CLITestMixin, unittest.TestCase):
         d.addCallback(_check1)
 
         d.addCallback(lambda ign: self.do_cli("check", "--raw", self.uri))
-        def _check2((rc, out, err)):
+        def _check2(xxx_todo_changeme125):
+            (rc, out, err) = xxx_todo_changeme125
             self.failUnlessReallyEqual(err, "")
             self.failUnlessReallyEqual(rc, 0)
             data = simplejson.loads(out)
@@ -2966,7 +3056,8 @@ class Check(GridTestMixin, CLITestMixin, unittest.TestCase):
         d.addCallback(_stash_lit_uri)
 
         d.addCallback(lambda ign: self.do_cli("check", self.lit_uri))
-        def _check_lit((rc, out, err)):
+        def _check_lit(xxx_todo_changeme126):
+            (rc, out, err) = xxx_todo_changeme126
             self.failUnlessReallyEqual(err, "")
             self.failUnlessReallyEqual(rc, 0)
             lines = out.splitlines()
@@ -2974,7 +3065,8 @@ class Check(GridTestMixin, CLITestMixin, unittest.TestCase):
         d.addCallback(_check_lit)
 
         d.addCallback(lambda ign: self.do_cli("check", "--raw", self.lit_uri))
-        def _check_lit_raw((rc, out, err)):
+        def _check_lit_raw(xxx_todo_changeme127):
+            (rc, out, err) = xxx_todo_changeme127
             self.failUnlessReallyEqual(err, "")
             self.failUnlessReallyEqual(rc, 0)
             data = simplejson.loads(out)
@@ -3009,7 +3101,8 @@ class Check(GridTestMixin, CLITestMixin, unittest.TestCase):
         d.addCallback(_clobber_shares)
 
         d.addCallback(lambda ign: self.do_cli("check", "--verify", self.uri))
-        def _check3((rc, out, err)):
+        def _check3(xxx_todo_changeme128):
+            (rc, out, err) = xxx_todo_changeme128
             self.failUnlessReallyEqual(err, "")
             self.failUnlessReallyEqual(rc, 0)
             lines = out.splitlines()
@@ -3022,7 +3115,8 @@ class Check(GridTestMixin, CLITestMixin, unittest.TestCase):
         d.addCallback(_check3)
 
         d.addCallback(lambda ign: self.do_cli("check", "--verify", "--raw", self.uri))
-        def _check3_raw((rc, out, err)):
+        def _check3_raw(xxx_todo_changeme129):
+            (rc, out, err) = xxx_todo_changeme129
             self.failUnlessReallyEqual(err, "")
             self.failUnlessReallyEqual(rc, 0)
             data = simplejson.loads(out)
@@ -3035,7 +3129,8 @@ class Check(GridTestMixin, CLITestMixin, unittest.TestCase):
 
         d.addCallback(lambda ign:
                       self.do_cli("check", "--verify", "--repair", self.uri))
-        def _check4((rc, out, err)):
+        def _check4(xxx_todo_changeme130):
+            (rc, out, err) = xxx_todo_changeme130
             self.failUnlessReallyEqual(err, "")
             self.failUnlessReallyEqual(rc, 0)
             lines = out.splitlines()
@@ -3048,7 +3143,8 @@ class Check(GridTestMixin, CLITestMixin, unittest.TestCase):
 
         d.addCallback(lambda ign:
                       self.do_cli("check", "--verify", "--repair", self.uri))
-        def _check5((rc, out, err)):
+        def _check5(xxx_todo_changeme131):
+            (rc, out, err) = xxx_todo_changeme131
             self.failUnlessReallyEqual(err, "")
             self.failUnlessReallyEqual(rc, 0)
             lines = out.splitlines()
@@ -3089,7 +3185,8 @@ class Check(GridTestMixin, CLITestMixin, unittest.TestCase):
         d.addCallback(_stash_uri, "mutable")
 
         d.addCallback(lambda ign: self.do_cli("deep-check", self.rooturi))
-        def _check1((rc, out, err)):
+        def _check1(xxx_todo_changeme132):
+            (rc, out, err) = xxx_todo_changeme132
             self.failUnlessReallyEqual(err, "")
             self.failUnlessReallyEqual(rc, 0)
             lines = out.splitlines()
@@ -3104,7 +3201,8 @@ class Check(GridTestMixin, CLITestMixin, unittest.TestCase):
 
         d.addCallback(lambda ign: self.do_cli("deep-check", "--verbose",
                                               self.rooturi))
-        def _check2((rc, out, err)):
+        def _check2(xxx_todo_changeme133):
+            (rc, out, err) = xxx_todo_changeme133
             self.failUnlessReallyEqual(err, "")
             self.failUnlessReallyEqual(rc, 0)
             lines = out.splitlines()
@@ -3117,7 +3215,8 @@ class Check(GridTestMixin, CLITestMixin, unittest.TestCase):
         d.addCallback(_check2)
 
         d.addCallback(lambda ign: self.do_cli("stats", self.rooturi))
-        def _check_stats((rc, out, err)):
+        def _check_stats(xxx_todo_changeme134):
+            (rc, out, err) = xxx_todo_changeme134
             self.failUnlessReallyEqual(err, "")
             self.failUnlessReallyEqual(rc, 0)
             lines = out.splitlines()
@@ -3155,7 +3254,8 @@ class Check(GridTestMixin, CLITestMixin, unittest.TestCase):
 
         d.addCallback(lambda ign:
                       self.do_cli("deep-check", "--verbose", self.rooturi))
-        def _check3((rc, out, err)):
+        def _check3(xxx_todo_changeme135):
+            (rc, out, err) = xxx_todo_changeme135
             self.failUnlessReallyEqual(err, "")
             self.failUnlessReallyEqual(rc, 0)
             lines = out.splitlines()
@@ -3171,7 +3271,8 @@ class Check(GridTestMixin, CLITestMixin, unittest.TestCase):
         d.addCallback(lambda ign:
                       self.do_cli("deep-check", "--verbose", "--verify",
                                   self.rooturi))
-        def _check4((rc, out, err)):
+        def _check4(xxx_todo_changeme136):
+            (rc, out, err) = xxx_todo_changeme136
             self.failUnlessReallyEqual(err, "")
             self.failUnlessReallyEqual(rc, 0)
             lines = out.splitlines()
@@ -3189,7 +3290,8 @@ class Check(GridTestMixin, CLITestMixin, unittest.TestCase):
         d.addCallback(lambda ign:
                       self.do_cli("deep-check", "--raw",
                                   self.rooturi))
-        def _check5((rc, out, err)):
+        def _check5(xxx_todo_changeme137):
+            (rc, out, err) = xxx_todo_changeme137
             self.failUnlessReallyEqual(err, "")
             self.failUnlessReallyEqual(rc, 0)
             lines = out.splitlines()
@@ -3202,7 +3304,8 @@ class Check(GridTestMixin, CLITestMixin, unittest.TestCase):
                       self.do_cli("deep-check",
                                   "--verbose", "--verify", "--repair",
                                   self.rooturi))
-        def _check6((rc, out, err)):
+        def _check6(xxx_todo_changeme138):
+            (rc, out, err) = xxx_todo_changeme138
             self.failUnlessReallyEqual(err, "")
             self.failUnlessReallyEqual(rc, 0)
             lines = out.splitlines()
@@ -3227,7 +3330,7 @@ class Check(GridTestMixin, CLITestMixin, unittest.TestCase):
                       fn.add_file(u"subfile", upload.Data(DATA+"2", "")))
         d.addCallback(lambda ign:
                       self.delete_shares_numbered(self.uris["subdir"],
-                                                  range(10)))
+                                                  list(range(10))))
 
         # root
         # rootg\u00F6\u00F6d/
@@ -3237,7 +3340,8 @@ class Check(GridTestMixin, CLITestMixin, unittest.TestCase):
         # root/subfile
 
         d.addCallback(lambda ign: self.do_cli("manifest", self.rooturi))
-        def _manifest_failed((rc, out, err)):
+        def _manifest_failed(xxx_todo_changeme139):
+            (rc, out, err) = xxx_todo_changeme139
             self.failIfEqual(rc, 0)
             self.failUnlessIn("ERROR: UnrecoverableFileError", err)
             # the fatal directory should still show up, as the last line
@@ -3245,7 +3349,8 @@ class Check(GridTestMixin, CLITestMixin, unittest.TestCase):
         d.addCallback(_manifest_failed)
 
         d.addCallback(lambda ign: self.do_cli("deep-check", self.rooturi))
-        def _deep_check_failed((rc, out, err)):
+        def _deep_check_failed(xxx_todo_changeme140):
+            (rc, out, err) = xxx_todo_changeme140
             self.failIfEqual(rc, 0)
             self.failUnlessIn("ERROR: UnrecoverableFileError", err)
             # we want to make sure that the error indication is the last
@@ -3275,7 +3380,8 @@ class Check(GridTestMixin, CLITestMixin, unittest.TestCase):
         self.basedir = "cli/Check/check_without_alias"
         self.set_up_grid()
         d = self.do_cli("check")
-        def _check((rc, out, err)):
+        def _check(xxx_todo_changeme141):
+            (rc, out, err) = xxx_todo_changeme141
             self.failUnlessReallyEqual(rc, 1)
             self.failUnlessIn("error:", err)
             self.failUnlessReallyEqual(out, "")
@@ -3290,7 +3396,8 @@ class Check(GridTestMixin, CLITestMixin, unittest.TestCase):
         self.basedir = "cli/Check/check_with_nonexistent_alias"
         self.set_up_grid()
         d = self.do_cli("check", "nonexistent:")
-        def _check((rc, out, err)):
+        def _check(xxx_todo_changeme142):
+            (rc, out, err) = xxx_todo_changeme142
             self.failUnlessReallyEqual(rc, 1)
             self.failUnlessIn("error:", err)
             self.failUnlessIn("nonexistent", err)
@@ -3309,24 +3416,26 @@ class Check(GridTestMixin, CLITestMixin, unittest.TestCase):
         d.addCallback(_stash_uri)
         d = c0.create_dirnode()
         d.addCallback(_stash_uri)
-        
+
         d.addCallback(lambda ign: self.do_cli("check", self.uriList[0], self.uriList[1]))
-        def _check((rc, out, err)):
+        def _check(xxx_todo_changeme143):
+            (rc, out, err) = xxx_todo_changeme143
             self.failUnlessReallyEqual(rc, 0)
             self.failUnlessReallyEqual(err, "")
             #Ensure healthy appears for each uri
-            self.failUnlessIn("Healthy", out[:len(out)/2])
-            self.failUnlessIn("Healthy", out[len(out)/2:])
+            self.failUnlessIn("Healthy", out[:len(out)//2])
+            self.failUnlessIn("Healthy", out[len(out)//2:])
         d.addCallback(_check)
-        
+
         d.addCallback(lambda ign: self.do_cli("check", self.uriList[0], "nonexistent:"))
-        def _check2((rc, out, err)):
+        def _check2(xxx_todo_changeme144):
+            (rc, out, err) = xxx_todo_changeme144
             self.failUnlessReallyEqual(rc, 1)
             self.failUnlessIn("Healthy", out)
             self.failUnlessIn("error:", err)
             self.failUnlessIn("nonexistent", err)
         d.addCallback(_check2)
-        
+
         return d
 
 
@@ -3340,7 +3449,7 @@ class Errors(GridTestMixin, CLITestMixin, unittest.TestCase):
         d = c0.upload(upload.Data(DATA, convergence=""))
         def _stash_bad(ur):
             self.uri_1share = ur.get_uri()
-            self.delete_shares_numbered(ur.get_uri(), range(1,10))
+            self.delete_shares_numbered(ur.get_uri(), list(range(1,10)))
         d.addCallback(_stash_bad)
 
         # the download is abandoned as soon as it's clear that we won't get
@@ -3350,7 +3459,8 @@ class Errors(GridTestMixin, CLITestMixin, unittest.TestCase):
         in_pending_msg = "ran out of shares: complete= pending=Share(sh0-on-fob7vqgd) overdue= unused= need 3"
 
         d.addCallback(lambda ign: self.do_cli("get", self.uri_1share))
-        def _check1((rc, out, err)):
+        def _check1(xxx_todo_changeme145):
+            (rc, out, err) = xxx_todo_changeme145
             self.failIfEqual(rc, 0)
             self.failUnless("410 Gone" in err, err)
             self.failUnlessIn("NotEnoughSharesError: ", err)
@@ -3360,7 +3470,8 @@ class Errors(GridTestMixin, CLITestMixin, unittest.TestCase):
 
         targetf = os.path.join(self.basedir, "output")
         d.addCallback(lambda ign: self.do_cli("get", self.uri_1share, targetf))
-        def _check2((rc, out, err)):
+        def _check2(xxx_todo_changeme146):
+            (rc, out, err) = xxx_todo_changeme146
             self.failIfEqual(rc, 0)
             self.failUnless("410 Gone" in err, err)
             self.failUnlessIn("NotEnoughSharesError: ", err)
@@ -3384,7 +3495,8 @@ class Errors(GridTestMixin, CLITestMixin, unittest.TestCase):
                    "endheaders", _socket_error)
 
         d = self.do_cli("mkdir")
-        def _check_invalid((rc,stdout,stderr)):
+        def _check_invalid(xxx_todo_changeme147):
+            (rc,stdout,stderr) = xxx_todo_changeme147
             self.failIfEqual(rc, 0)
             self.failUnlessIn("Error trying to connect to http://127.0.0.1", stderr)
         d.addCallback(_check_invalid)
@@ -3399,7 +3511,8 @@ class Get(GridTestMixin, CLITestMixin, unittest.TestCase):
         self.basedir = "cli/Get/get_without_alias"
         self.set_up_grid()
         d = self.do_cli('get', 'file')
-        def _check((rc, out, err)):
+        def _check(xxx_todo_changeme148):
+            (rc, out, err) = xxx_todo_changeme148
             self.failUnlessReallyEqual(rc, 1)
             self.failUnlessIn("error:", err)
             self.failUnlessReallyEqual(out, "")
@@ -3412,7 +3525,8 @@ class Get(GridTestMixin, CLITestMixin, unittest.TestCase):
         self.basedir = "cli/Get/get_with_nonexistent_alias"
         self.set_up_grid()
         d = self.do_cli("get", "nonexistent:file")
-        def _check((rc, out, err)):
+        def _check(xxx_todo_changeme149):
+            (rc, out, err) = xxx_todo_changeme149
             self.failUnlessReallyEqual(rc, 1)
             self.failUnlessIn("error:", err)
             self.failUnlessIn("nonexistent", err)
@@ -3429,7 +3543,8 @@ class Manifest(GridTestMixin, CLITestMixin, unittest.TestCase):
         self.basedir = "cli/Manifest/manifest_without_alias"
         self.set_up_grid()
         d = self.do_cli("manifest")
-        def _check((rc, out, err)):
+        def _check(xxx_todo_changeme150):
+            (rc, out, err) = xxx_todo_changeme150
             self.failUnlessReallyEqual(rc, 1)
             self.failUnlessIn("error:", err)
             self.failUnlessReallyEqual(out, "")
@@ -3442,7 +3557,8 @@ class Manifest(GridTestMixin, CLITestMixin, unittest.TestCase):
         self.basedir = "cli/Manifest/manifest_with_nonexistent_alias"
         self.set_up_grid()
         d = self.do_cli("manifest", "nonexistent:")
-        def _check((rc, out, err)):
+        def _check(xxx_todo_changeme151):
+            (rc, out, err) = xxx_todo_changeme151
             self.failUnlessReallyEqual(rc, 1)
             self.failUnlessIn("error:", err)
             self.failUnlessIn("nonexistent", err)
@@ -3458,7 +3574,8 @@ class Mkdir(GridTestMixin, CLITestMixin, unittest.TestCase):
 
         d = self.do_cli("create-alias", "tahoe")
         d.addCallback(lambda res: self.do_cli("mkdir", "test"))
-        def _check((rc, out, err)):
+        def _check(xxx_todo_changeme152):
+            (rc, out, err) = xxx_todo_changeme152
             self.failUnlessReallyEqual(rc, 0)
             self.failUnlessReallyEqual(err, "")
             self.failUnlessIn("URI:", out)
@@ -3470,7 +3587,8 @@ class Mkdir(GridTestMixin, CLITestMixin, unittest.TestCase):
         self.basedir = os.path.dirname(self.mktemp())
         self.set_up_grid()
         d = self.do_cli("create-alias", "tahoe")
-        def _check((rc, out, err), st):
+        def _check(xxx_todo_changeme153, st):
+            (rc, out, err) = xxx_todo_changeme153
             self.failUnlessReallyEqual(rc, 0)
             self.failUnlessReallyEqual(err, "")
             self.failUnlessIn(st, out)
@@ -3499,7 +3617,8 @@ class Mkdir(GridTestMixin, CLITestMixin, unittest.TestCase):
         self.basedir = os.path.dirname(self.mktemp())
         self.set_up_grid()
         d = self.do_cli("mkdir", "--format=SDMF")
-        def _check((rc, out, err), st):
+        def _check(xxx_todo_changeme154, st):
+            (rc, out, err) = xxx_todo_changeme154
             self.failUnlessReallyEqual(rc, 0)
             self.failUnlessReallyEqual(err, "")
             self.failUnlessIn(st, out)
@@ -3542,7 +3661,8 @@ class Mkdir(GridTestMixin, CLITestMixin, unittest.TestCase):
 
         d = self.do_cli("create-alias", "tahoe")
         d.addCallback(lambda res: self.do_cli("mkdir", motorhead_arg))
-        def _check((rc, out, err)):
+        def _check(xxx_todo_changeme155):
+            (rc, out, err) = xxx_todo_changeme155
             self.failUnlessReallyEqual(rc, 0)
             self.failUnlessReallyEqual(err, "")
             self.failUnlessIn("URI:", out)
@@ -3556,7 +3676,8 @@ class Mkdir(GridTestMixin, CLITestMixin, unittest.TestCase):
         self.basedir = "cli/Mkdir/mkdir_with_nonexistent_alias"
         self.set_up_grid()
         d = self.do_cli("mkdir", "havasu:")
-        def _check((rc, out, err)):
+        def _check(xxx_todo_changeme156):
+            (rc, out, err) = xxx_todo_changeme156
             self.failUnlessReallyEqual(rc, 1)
             self.failUnlessIn("error:", err)
             self.failUnlessReallyEqual(out, "")
@@ -3579,7 +3700,8 @@ class Unlink(GridTestMixin, CLITestMixin, unittest.TestCase):
         self.basedir = "cli/Unlink/%s_without_alias" % (self.command,)
         self.set_up_grid()
         d = self.do_cli(self.command, "afile")
-        def _check((rc, out, err)):
+        def _check(xxx_todo_changeme157):
+            (rc, out, err) = xxx_todo_changeme157
             self.failUnlessReallyEqual(rc, 1)
             self.failUnlessIn("error:", err)
             self.failUnlessReallyEqual(out, "")
@@ -3595,7 +3717,8 @@ class Unlink(GridTestMixin, CLITestMixin, unittest.TestCase):
         self.basedir = "cli/Unlink/%s_with_nonexistent_alias" % (self.command,)
         self.set_up_grid()
         d = self.do_cli(self.command, "nonexistent:afile")
-        def _check((rc, out, err)):
+        def _check(xxx_todo_changeme158):
+            (rc, out, err) = xxx_todo_changeme158
             self.failUnlessReallyEqual(rc, 1)
             self.failUnlessIn("error:", err)
             self.failUnlessIn("nonexistent", err)
@@ -3613,13 +3736,15 @@ class Unlink(GridTestMixin, CLITestMixin, unittest.TestCase):
         self._create_test_file()
         d = self.do_cli("create-alias", "tahoe")
         d.addCallback(lambda ign: self.do_cli("put", self.datafile, "tahoe:test"))
-        def _do_unlink((rc, out, err)):
+        def _do_unlink(xxx_todo_changeme159):
+            (rc, out, err) = xxx_todo_changeme159
             self.failUnlessReallyEqual(rc, 0)
             self.failUnless(out.startswith("URI:"), out)
             return self.do_cli(self.command, out.strip('\n'))
         d.addCallback(_do_unlink)
 
-        def _check((rc, out, err)):
+        def _check(xxx_todo_changeme160):
+            (rc, out, err) = xxx_todo_changeme160
             self.failUnlessReallyEqual(rc, 1)
             self.failUnlessIn("'tahoe %s'" % (self.command,), err)
             self.failUnlessIn("path must be given", err)
@@ -3647,7 +3772,8 @@ class Stats(GridTestMixin, CLITestMixin, unittest.TestCase):
 
         # make sure we can get stats on an empty directory too
         d.addCallback(lambda ign: self.do_cli("stats", self.rooturi))
-        def _check_stats((rc, out, err)):
+        def _check_stats(xxx_todo_changeme161):
+            (rc, out, err) = xxx_todo_changeme161
             self.failUnlessReallyEqual(err, "")
             self.failUnlessReallyEqual(rc, 0)
             lines = out.splitlines()
@@ -3668,7 +3794,8 @@ class Stats(GridTestMixin, CLITestMixin, unittest.TestCase):
         self.basedir = "cli/Stats/stats_without_alias"
         self.set_up_grid()
         d = self.do_cli("stats")
-        def _check((rc, out, err)):
+        def _check(xxx_todo_changeme162):
+            (rc, out, err) = xxx_todo_changeme162
             self.failUnlessReallyEqual(rc, 1)
             self.failUnlessIn("error:", err)
             self.failUnlessReallyEqual(out, "")
@@ -3681,7 +3808,8 @@ class Stats(GridTestMixin, CLITestMixin, unittest.TestCase):
         self.basedir = "cli/Stats/stats_with_nonexistent_alias"
         self.set_up_grid()
         d = self.do_cli("stats", "havasu:")
-        def _check((rc, out, err)):
+        def _check(xxx_todo_changeme163):
+            (rc, out, err) = xxx_todo_changeme163
             self.failUnlessReallyEqual(rc, 1)
             self.failUnlessIn("error:", err)
             self.failUnlessReallyEqual(out, "")
@@ -3697,7 +3825,8 @@ class Webopen(GridTestMixin, CLITestMixin, unittest.TestCase):
         self.basedir = "cli/Webopen/webopen_with_nonexistent_alias"
         self.set_up_grid()
         d = self.do_cli("webopen", "fake:")
-        def _check((rc, out, err)):
+        def _check(xxx_todo_changeme164):
+            (rc, out, err) = xxx_todo_changeme164
             self.failUnlessReallyEqual(rc, 1)
             self.failUnlessIn("error:", err)
             self.failUnlessReallyEqual(out, "")
@@ -3721,14 +3850,16 @@ class Webopen(GridTestMixin, CLITestMixin, unittest.TestCase):
             self.basedir = "cli/Webopen/webopen"
             self.set_up_grid()
             d = self.do_cli("create-alias", "alias:")
-            def _check_alias((rc, out, err)):
+            def _check_alias(xxx_todo_changeme39):
+                (rc, out, err) = xxx_todo_changeme39
                 self.failUnlessReallyEqual(rc, 0, repr((rc, out, err)))
                 self.failUnlessIn("Alias 'alias' created", out)
                 self.failUnlessReallyEqual(err, "")
                 self.alias_uri = get_aliases(self.get_clientdir())["alias"]
             d.addCallback(_check_alias)
             d.addCallback(lambda res: self.do_cli("webopen", "alias:"))
-            def _check_webopen((rc, out, err)):
+            def _check_webopen(xxx_todo_changeme40):
+                (rc, out, err) = xxx_todo_changeme40
                 self.failUnlessReallyEqual(rc, 0, repr((rc, out, err)))
                 self.failUnlessReallyEqual(out, "")
                 self.failUnlessReallyEqual(err, "")
index e3760d61ff1dac88b226b717d11be9c892759958..e2899482e591b887de104011f9c3909b1ef2fead 100644 (file)
@@ -15,20 +15,23 @@ class T(unittest.TestCase):
         assert params == (size, required_shares, max_shares)
         log.msg("params: %s" % (params,))
         d = enc.encode(data0s)
-        def _done_encoding_all((shares, shareids)):
+        def _done_encoding_all(xxx_todo_changeme1):
+            (shares, shareids) = xxx_todo_changeme1
             self.failUnlessEqual(len(shares), max_shares)
             self.shares = shares
             self.shareids = shareids
         d.addCallback(_done_encoding_all)
         if fewer_shares is not None:
             # also validate that the desired_shareids= parameter works
-            desired_shareids = random.sample(range(max_shares), fewer_shares)
+            desired_shareids = random.sample(list(range(max_shares)), fewer_shares)
             d.addCallback(lambda res: enc.encode(data0s, desired_shareids))
-            def _check_fewer_shares((some_shares, their_shareids)):
+            def _check_fewer_shares(xxx_todo_changeme):
+                (some_shares, their_shareids) = xxx_todo_changeme
                 self.failUnlessEqual(tuple(their_shareids), tuple(desired_shareids))
             d.addCallback(_check_fewer_shares)
 
-        def _decode((shares, shareids)):
+        def _decode(xxx_todo_changeme2):
+            (shares, shareids) = xxx_todo_changeme2
             dec = CRSDecoder()
             dec.set_params(*params)
             d1 = dec.decode(shares, shareids)
index c4aa9914247d9408932e04702836baf7e63bfd96..48d1ba26e5359d3334d269be23b1bc2544fc70e8 100644 (file)
@@ -1,3 +1,4 @@
+from __future__ import print_function
 
 import time
 import os.path
@@ -293,7 +294,7 @@ class Basic(unittest.TestCase, StallMixin, pollmixin.PollMixin):
                 left = p["remaining-sleep-time"]
                 self.failUnless(isinstance(left, float), left)
                 self.failUnless(left > 0.0, left)
-            except Exception, e:
+            except Exception as e:
                 did_check_progress[0] = e
             else:
                 did_check_progress[0] = True
@@ -373,9 +374,9 @@ class Basic(unittest.TestCase, StallMixin, pollmixin.PollMixin):
             # our buildslaves vary too much in their speeds and load levels,
             # and many of them only manage to hit 7% usage when our target is
             # 50%. So don't assert anything about the results, just log them.
-            print
-            print "crawler: got %d%% percent when trying for 50%%" % percent
-            print "crawler: got %d full cycles" % c.cycles
+            print()
+            print("crawler: got %d%% percent when trying for 50%%" % percent)
+            print("crawler: got %d full cycles" % c.cycles)
         d.addCallback(_done)
         return d
 
index 98f2a528850d5212d8a89f326260c799c99dcb55..ac0111341fc7b2006edc9fa258ec7e3a173022ce 100644 (file)
@@ -138,7 +138,8 @@ class DeepCheckBase(GridTestMixin, ErrorMixin, StallMixin, ShouldFailMixin):
         d.addCallback(self.decode_json)
         return d
 
-    def decode_json(self, (s,url)):
+    def decode_json(self, xxx_todo_changeme8):
+        (s,url) = xxx_todo_changeme8
         try:
             data = simplejson.loads(s)
         except ValueError:
@@ -152,7 +153,7 @@ class DeepCheckBase(GridTestMixin, ErrorMixin, StallMixin, ShouldFailMixin):
                 continue
             try:
                 yield simplejson.loads(unit)
-            except ValueError, le:
+            except ValueError as le:
                 le.args = tuple(le.args + (unit,))
                 raise
 
@@ -372,8 +373,8 @@ class DeepCheckWebGood(DeepCheckBase, unittest.TestCase):
 
     def do_web_stream_manifest(self, ignored):
         d = self.web(self.root, method="POST", t="stream-manifest")
-        d.addCallback(lambda (output,url):
-                      self._check_streamed_manifest(output))
+        d.addCallback(lambda output_url:
+                      self._check_streamed_manifest(output_url[0]))
         return d
 
     def _check_streamed_manifest(self, output):
@@ -762,7 +763,8 @@ class DeepCheckWebGood(DeepCheckBase, unittest.TestCase):
         d = self._run_cli(["--node-directory", basedir,
                            "manifest",
                            self.root_uri])
-        def _check((out,err)):
+        def _check(xxx_todo_changeme):
+            (out,err) = xxx_todo_changeme
             self.failUnlessEqual(err, "")
             lines = [l for l in out.split("\n") if l]
             self.failUnlessEqual(len(lines), 8)
@@ -791,7 +793,8 @@ class DeepCheckWebGood(DeepCheckBase, unittest.TestCase):
                            "manifest",
                            "--raw",
                            self.root_uri])
-        def _check((out,err)):
+        def _check(xxx_todo_changeme1):
+            (out,err) = xxx_todo_changeme1
             self.failUnlessEqual(err, "")
             # this should be the same as the POST t=stream-manifest output
             self._check_streamed_manifest(out)
@@ -804,7 +807,8 @@ class DeepCheckWebGood(DeepCheckBase, unittest.TestCase):
                            "manifest",
                            "--storage-index",
                            self.root_uri])
-        def _check((out,err)):
+        def _check(xxx_todo_changeme2):
+            (out,err) = xxx_todo_changeme2
             self.failUnlessEqual(err, "")
             self._check_manifest_storage_index(out)
         d.addCallback(_check)
@@ -816,7 +820,8 @@ class DeepCheckWebGood(DeepCheckBase, unittest.TestCase):
                            "manifest",
                            "--verify-cap",
                            self.root_uri])
-        def _check((out,err)):
+        def _check(xxx_todo_changeme3):
+            (out,err) = xxx_todo_changeme3
             self.failUnlessEqual(err, "")
             lines = [l for l in out.split("\n") if l]
             self.failUnlessEqual(len(lines), 3)
@@ -832,7 +837,8 @@ class DeepCheckWebGood(DeepCheckBase, unittest.TestCase):
                            "manifest",
                            "--repair-cap",
                            self.root_uri])
-        def _check((out,err)):
+        def _check(xxx_todo_changeme4):
+            (out,err) = xxx_todo_changeme4
             self.failUnlessEqual(err, "")
             lines = [l for l in out.split("\n") if l]
             self.failUnlessEqual(len(lines), 3)
@@ -847,7 +853,8 @@ class DeepCheckWebGood(DeepCheckBase, unittest.TestCase):
         d = self._run_cli(["--node-directory", basedir,
                            "stats",
                            self.root_uri])
-        def _check3((out,err)):
+        def _check3(xxx_todo_changeme5):
+            (out,err) = xxx_todo_changeme5
             lines = [l.strip() for l in out.split("\n") if l]
             self.failUnless("count-immutable-files: 1" in lines)
             self.failUnless("count-mutable-files: 1" in lines)
@@ -868,7 +875,8 @@ class DeepCheckWebGood(DeepCheckBase, unittest.TestCase):
                            "stats",
                            "--raw",
                            self.root_uri])
-        def _check4((out,err)):
+        def _check4(xxx_todo_changeme6):
+            (out,err) = xxx_todo_changeme6
             data = simplejson.loads(out)
             self.failUnlessEqual(data["count-immutable-files"], 1)
             self.failUnlessEqual(data["count-immutable-files"], 1)
@@ -1000,7 +1008,7 @@ class DeepCheckWebBad(DeepCheckBase, unittest.TestCase):
                 self._run_cli(["debug", "corrupt-share", sharefile])
 
     def _delete_most_shares(self, node):
-        self.delete_shares_numbered(node.get_uri(), range(1,10))
+        self.delete_shares_numbered(node.get_uri(), list(range(1,10)))
 
 
     def check_is_healthy(self, cr, where):
@@ -1011,7 +1019,7 @@ class DeepCheckWebBad(DeepCheckBase, unittest.TestCase):
             self.failUnlessEqual(cr.get_version_counter_recoverable(), 1, where)
             self.failUnlessEqual(cr.get_version_counter_unrecoverable(), 0, where)
             return cr
-        except Exception, le:
+        except Exception as le:
             le.args = tuple(le.args + (where,))
             raise
 
@@ -1247,7 +1255,8 @@ class Large(DeepCheckBase, unittest.TestCase):
         def _start_deepcheck(ignored):
             return self.web(self.root, method="POST", t="stream-deep-check")
         d.addCallback(_start_deepcheck)
-        def _check( (output, url) ):
+        def _check(xxx_todo_changeme7 ):
+            (output, url) = xxx_todo_changeme7
             units = list(self.parse_streamed_json(output))
             self.failUnlessEqual(len(units), 2+COUNT+1)
         d.addCallback(_check)
index c65114fb1bcc13917d1adc499e68fb8b4ec49833..666aba84dc029971d5776d9c5dfb5753e09982c8 100644 (file)
@@ -24,6 +24,7 @@ from allmydata.unknown import UnknownNode, strip_prefix_for_ro
 from allmydata.nodemaker import NodeMaker
 from base64 import b32decode
 import allmydata.test.common_util as testutil
+import six
 
 class MemAccum:
     implements(IConsumer)
@@ -173,7 +174,7 @@ class Dirnode(GridTestMixin, unittest.TestCase,
                             "largest-directory-children": 2,
                             "largest-immutable-file": 0,
                             }
-                for k,v in expected.iteritems():
+                for k,v in six.iteritems(expected):
                     self.failUnlessReallyEqual(stats[k], v,
                                                "stats[%s] was %s, not %s" %
                                                (k, stats[k], v))
@@ -263,8 +264,8 @@ class Dirnode(GridTestMixin, unittest.TestCase,
                                                 { 'tahoe': {'linkcrtime': "bogus"}}))
             d.addCallback(lambda res: n.get_metadata_for(u"c2"))
             def _has_good_linkcrtime(metadata):
-                self.failUnless(metadata.has_key('tahoe'))
-                self.failUnless(metadata['tahoe'].has_key('linkcrtime'))
+                self.failUnless('tahoe' in metadata)
+                self.failUnless('linkcrtime' in metadata['tahoe'])
                 self.failIfEqual(metadata['tahoe']['linkcrtime'], 'bogus')
             d.addCallback(_has_good_linkcrtime)
 
@@ -1406,9 +1407,9 @@ class Packing(testutil.ReallyEqualMixin, unittest.TestCase):
 
     def _check_children(self, children):
         # Are all the expected child nodes there?
-        self.failUnless(children.has_key(u'file1'))
-        self.failUnless(children.has_key(u'file2'))
-        self.failUnless(children.has_key(u'file3'))
+        self.failUnless(u'file1' in children)
+        self.failUnless(u'file2' in children)
+        self.failUnless(u'file3' in children)
 
         # Are the metadata for child 3 right?
         file3_rocap = "URI:CHK:cmtcxq7hwxvfxan34yiev6ivhy:qvcekmjtoetdcw4kmi7b3rtblvgx7544crnwaqtiewemdliqsokq:3:10:5"
@@ -1790,7 +1791,7 @@ class DeepStats(testutil.ReallyEqualMixin, unittest.TestCase):
                                      (101, 316, 216),
                                      (317, 1000, 684),
                                      (1001, 3162, 99),
-                                     (3162277660169L, 10000000000000L, 1),
+                                     (3162277660169, 10000000000000, 1),
                                      ])
 
 class UCWEingMutableFileNode(MutableFileNode):
index 710d98ed1432c98777552eaed33005637042b25f..7e2a5791cc64044f785450e89d79fe4c80978079 100644 (file)
@@ -1,3 +1,4 @@
+from __future__ import print_function
 
 # system-level upload+download roundtrip test, but using shares created from
 # a previous run. This asserts that the current code is capable of decoding
@@ -328,7 +329,7 @@ class DownloadTest(_Base, unittest.TestCase):
         n = self.c0.create_node_from_uri(immutable_uri)
 
         c = MemoryConsumer()
-        d = n.read(c, 0L, 10L)
+        d = n.read(c, 0, 10)
         d.addCallback(lambda c: len("".join(c.chunks)))
         d.addCallback(lambda size: self.failUnlessEqual(size, 10))
         return d
@@ -521,8 +522,8 @@ class DownloadTest(_Base, unittest.TestCase):
             n._cnode._node._build_guessed_tables(u.max_segment_size)
             con1 = MemoryConsumer()
             con2 = MemoryConsumer()
-            d = n.read(con1, 0L, 20)
-            d2 = n.read(con2, 140L, 20)
+            d = n.read(con1, 0, 20)
+            d2 = n.read(con2, 140, 20)
             # con2 will be cancelled, so d2 should fail with DownloadStopped
             def _con2_should_not_succeed(res):
                 self.fail("the second read should not have succeeded")
@@ -562,8 +563,8 @@ class DownloadTest(_Base, unittest.TestCase):
             n._cnode._node._build_guessed_tables(u.max_segment_size)
             con1 = MemoryConsumer()
             con2 = MemoryConsumer()
-            d = n.read(con1, 0L, 20)
-            d2 = n.read(con2, 140L, 20)
+            d = n.read(con1, 0, 20)
+            d2 = n.read(con2, 140, 20)
             # con2 should wait for con1 to fail and then con2 should succeed.
             # In particular, we should not lose progress. If this test fails,
             # it will fail with a timeout error.
@@ -617,7 +618,8 @@ class DownloadTest(_Base, unittest.TestCase):
         n = self.c0.create_node_from_uri(immutable_uri)
         cn = n._cnode
         (d,c) = cn.get_segment(0)
-        def _got_segment((offset,data,decodetime)):
+        def _got_segment(xxx_todo_changeme):
+            (offset,data,decodetime) = xxx_todo_changeme
             self.failUnlessEqual(offset, 0)
             self.failUnlessEqual(len(data), len(plaintext))
         d.addCallback(_got_segment)
@@ -1066,9 +1068,9 @@ class Corruption(_Base, unittest.TestCase):
             return d
         d.addCallback(_uploaded)
         def _show_results(ign):
-            print
-            print ("of [0:%d], corruption ignored in %s" %
-                   (len(self.sh0_orig), undetected.dump()))
+            print()
+            print(("of [0:%d], corruption ignored in %s" %
+                   (len(self.sh0_orig), undetected.dump())))
         if self.catalog_detection:
             d.addCallback(_show_results)
             # of [0:2070], corruption ignored in len=1133:
index c908e97490db81b71e2949e4918ea0d5afcd84bc..ea70e55d1700c958468e0d66aa6b568875d063ab 100644 (file)
@@ -10,6 +10,7 @@ from allmydata.util.assertutil import _assert
 from allmydata.util.consumer import download_to_data
 from allmydata.interfaces import IStorageBucketWriter, IStorageBucketReader
 from allmydata.test.no_network import GridTestMixin
+import six
 
 class LostPeerError(Exception):
     pass
@@ -174,7 +175,7 @@ class ValidatedExtendedURIProxy(unittest.TestCase):
     if _TMP % K != 0:
         _TMP += (K - (_TMP % K))
     TAIL_SEGSIZE = _TMP
-    _TMP = SIZE / SEGSIZE
+    _TMP = SIZE // SEGSIZE
     if SIZE % SEGSIZE != 0:
         _TMP += 1
     NUM_SEGMENTS = _TMP
@@ -233,7 +234,7 @@ class ValidatedExtendedURIProxy(unittest.TestCase):
 
     def test_reject_insufficient(self):
         dl = []
-        for k in self.mindict.iterkeys():
+        for k in six.iterkeys(self.mindict):
             insuffdict = self.mindict.copy()
             del insuffdict[k]
             d = self._test_reject(insuffdict)
@@ -242,7 +243,7 @@ class ValidatedExtendedURIProxy(unittest.TestCase):
 
     def test_accept_optional(self):
         dl = []
-        for k in self.optional_consistent.iterkeys():
+        for k in six.iterkeys(self.optional_consistent):
             mydict = self.mindict.copy()
             mydict[k] = self.optional_consistent[k]
             d = self._test_accept(mydict)
@@ -251,7 +252,7 @@ class ValidatedExtendedURIProxy(unittest.TestCase):
 
     def test_reject_optional(self):
         dl = []
-        for k in self.optional_inconsistent.iterkeys():
+        for k in six.iterkeys(self.optional_inconsistent):
             for v in self.optional_inconsistent[k]:
                 mydict = self.mindict.copy()
                 mydict[k] = v
index abd3d8cb58e7b6f09653248175106c332055c5cc..5d4d1ebd4e3ac2a8df8b8620b7d5ce76ec6cfef6 100644 (file)
@@ -1,3 +1,4 @@
+from __future__ import print_function
 
 lumiere_nfc = u"lumi\u00E8re"
 Artonwall_nfc = u"\u00C4rtonwall.mp3"
@@ -19,24 +20,24 @@ if __name__ == "__main__":
     import platform
 
     if len(sys.argv) != 2:
-        print "Usage: %s lumi<e-grave>re" % sys.argv[0]
+        print("Usage: %s lumi<e-grave>re" % sys.argv[0])
         sys.exit(1)
 
     if sys.platform == "win32":
         try:
             from allmydata.windows.fixups import initialize
         except ImportError:
-            print "set PYTHONPATH to the src directory"
+            print("set PYTHONPATH to the src directory")
             sys.exit(1)
         initialize()
 
-    print
-    print "class MyWeirdOS(EncodingUtil, unittest.TestCase):"
-    print "    uname = '%s'" % ' '.join(platform.uname())
-    print "    argv = %s" % repr(sys.argv[1])
-    print "    platform = '%s'" % sys.platform
-    print "    filesystem_encoding = '%s'" % sys.getfilesystemencoding()
-    print "    io_encoding = '%s'" % sys.stdout.encoding
+    print()
+    print("class MyWeirdOS(EncodingUtil, unittest.TestCase):")
+    print("    uname = '%s'" % ' '.join(platform.uname()))
+    print("    argv = %s" % repr(sys.argv[1]))
+    print("    platform = '%s'" % sys.platform)
+    print("    filesystem_encoding = '%s'" % sys.getfilesystemencoding())
+    print("    io_encoding = '%s'" % sys.stdout.encoding)
     try:
         tmpdir = tempfile.mkdtemp()
         for fname in TEST_FILENAMES:
@@ -48,10 +49,10 @@ if __name__ == "__main__":
         else:
             dirlist = os.listdir(tmpdir)
 
-        print "    dirlist = %s" % repr(dirlist)
+        print("    dirlist = %s" % repr(dirlist))
     except:
-        print "    # Oops, I cannot write filenames containing non-ascii characters"
-    print
+        print("    # Oops, I cannot write filenames containing non-ascii characters")
+    print()
 
     shutil.rmtree(tmpdir)
     sys.exit(0)
@@ -260,7 +261,7 @@ class StdlibUnicode(unittest.TestCase):
 
         try:
             os.mkdir(lumiere_nfc)
-        except EnvironmentError, e:
+        except EnvironmentError as e:
             raise unittest.SkipTest("%r\nIt is possible that the filesystem on which this test is being run "
                                     "does not support Unicode, even though the platform does." % (e,))
 
index c98a320966224265a11a924942816a2c80556594..9c28b3099405abab6ccdc69a5954e3b27ee4df48 100644 (file)
@@ -93,12 +93,12 @@ class Handler(GridTestMixin, ReallyEqualMixin, unittest.TestCase):
 
         expected_root = [
             ('loop',
-             [0, True, 0600, 1, self.FALL_OF_BERLIN_WALL, 'alice', 'alice', '??']),
+             [0, True, 0o600, 1, self.FALL_OF_BERLIN_WALL, 'alice', 'alice', '??']),
             ('immutable',
-             [23, False, 0600, 1, self.TURN_OF_MILLENIUM, 'alice', 'alice', '??']),
+             [23, False, 0o600, 1, self.TURN_OF_MILLENIUM, 'alice', 'alice', '??']),
             ('mutable',
              # timestamp should be 0 if no timestamp metadata is present
-             [0, False, 0600, 1, 0, 'alice', 'alice', '??'])]
+             [0, False, 0o600, 1, 0, 'alice', 'alice', '??'])]
 
         d.addCallback(lambda root: self._compareDirLists(root, expected_root))
 
index abcf4eb1b4e73a19f1af9eb8871981a7cba0a318..d96e8ebd77a5cf6fcdf80c345022e9cf73960f18 100644 (file)
@@ -186,7 +186,7 @@ class Incomplete(unittest.TestCase):
         # this should succeed
         try:
             iht.set_hashes(chain, leaves={0: tagged_hash("tag", "0")})
-        except hashtree.BadHashError, e:
+        except hashtree.BadHashError as e:
             self.fail("bad hash: %s" % e)
 
         self.failUnlessEqual(ht.get_leaf(0), tagged_hash("tag", "0"))
@@ -215,5 +215,5 @@ class Incomplete(unittest.TestCase):
         # this should succeed
         try:
             iht.set_hashes(chain, leaves={4: tagged_hash("tag", "4")})
-        except hashtree.BadHashError, e:
+        except hashtree.BadHashError as e:
             self.fail("bad hash: %s" % e)
index e2d6f6a1dda96ef2f6faa8fd38aca74c4e74ecb3..e6de229487d9f9628e7781708e47a1a81156538d 100644 (file)
@@ -233,7 +233,7 @@ class HungServerDownloadTest(GridTestMixin, ShouldFailMixin, PollMixin,
         done = []
         d = self._set_up(False, "test_5_overdue_immutable")
         def _reduce_max_outstanding_requests_and_download(ign):
-            self._hang_shares(range(5))
+            self._hang_shares(list(range(5)))
             n = self.c0.create_node_from_uri(self.uri)
             n._cnode._maybe_create_download_node()
             self._sf = n._cnode._node._sharefinder
index 963cedae03dc97d48bd95fe1796d0d011dd0c3aa..5f3b0c910d6ad7d1eadad4dff74fe7ff0b6d3c8e 100644 (file)
@@ -165,7 +165,7 @@ class Test(GridTestMixin, unittest.TestCase, common.ShouldFailMixin):
         return d
 
     def _shuffled(self, num_shnums):
-        shnums = range(10)
+        shnums = list(range(10))
         random.shuffle(shnums)
         return shnums[:num_shnums]
 
@@ -196,7 +196,7 @@ class Test(GridTestMixin, unittest.TestCase, common.ShouldFailMixin):
         # test the Tahoe code...
         def _then_delete_8(ign):
             self.restore_all_shares(self.shares)
-            self.delete_shares_numbered(self.uri, range(8))
+            self.delete_shares_numbered(self.uri, list(range(8)))
         d.addCallback(_then_delete_8)
         d.addCallback(lambda ign:
                       self.shouldFail(NotEnoughSharesError, "download-2",
@@ -223,7 +223,7 @@ class Test(GridTestMixin, unittest.TestCase, common.ShouldFailMixin):
         removed."""
         d = self.startup("immutable/Test/download_from_only_3_remaining_shares")
         d.addCallback(lambda ign:
-                      self.delete_shares_numbered(self.uri, range(7)))
+                      self.delete_shares_numbered(self.uri, list(range(7))))
         d.addCallback(self._download_and_check_plaintext)
         def _after_download(num_reads):
             #print num_reads
@@ -247,7 +247,7 @@ class Test(GridTestMixin, unittest.TestCase, common.ShouldFailMixin):
         enough shares out there."""
         d = self.startup("download_abort_if_too_many_missing_shares")
         d.addCallback(lambda ign:
-                      self.delete_shares_numbered(self.uri, range(8)))
+                      self.delete_shares_numbered(self.uri, list(range(8))))
         d.addCallback(lambda ign:
                       self.shouldFail(NotEnoughSharesError, "delete 8",
                                       "Last failure: None",
index 21adb814037bab21cd84f670ede6508fb19f1858..9831efdea0d216340089954452f498f4c3f9e570 100644 (file)
@@ -23,6 +23,7 @@ from allmydata.web import introweb
 from allmydata.client import Client as TahoeClient
 from allmydata.util import pollmixin, keyutil, idlib, fileutil
 import allmydata.test.common_util as testutil
+import six
 
 class LoggingMultiService(service.MultiService):
     def log(self, msg, **kw):
@@ -152,7 +153,7 @@ def fakeseq():
 
 seqnum_counter = itertools.count(1)
 def realseq():
-    return seqnum_counter.next(), str(os.randint(1,100000))
+    return six.advance_iterator(seqnum_counter), str(os.randint(1,100000))
 
 def make_ann(furl):
     ann = { "anonymous-storage-FURL": furl,
index fb39af72e15a311576ddcbe46686df36ab40e876..f9e5f38feafabb94d4e5760ce18d5cd9b3ec40a7 100644 (file)
@@ -1,3 +1,4 @@
+from __future__ import print_function
 import os, re, base64
 from cStringIO import StringIO
 from twisted.trial import unittest
@@ -38,6 +39,7 @@ from allmydata.test.common import TEST_RSA_KEY_SIZE
 from allmydata.test.test_download import PausingConsumer, \
      PausingAndStoppingConsumer, StoppingConsumer, \
      ImmediatelyStoppingConsumer
+import six
 
 def eventuaaaaaly(res=None):
     d = fireEventually(res)
@@ -597,7 +599,7 @@ class Filenode(unittest.TestCase, testutil.ShouldFailMixin):
                                                version=MDMF_VERSION)
         def _check_server_write_counts(ignored):
             sb = self.nodemaker.storage_broker
-            for server in sb.servers.itervalues():
+            for server in six.itervalues(sb.servers):
                 self.failUnlessEqual(server.get_rref().queries, 1)
         d.addCallback(_check_server_write_counts)
         return d
@@ -1228,7 +1230,7 @@ class Servermap(unittest.TestCase, PublishMixin):
             # 10 shares
             self.failUnlessEqual(len(sm.update_data), 10)
             # one version
-            for data in sm.update_data.itervalues():
+            for data in six.itervalues(sm.update_data):
                 self.failUnlessEqual(len(data), 1)
         d.addCallback(_check_servermap)
         return d
@@ -1274,11 +1276,11 @@ class Roundtrip(unittest.TestCase, testutil.ShouldFailMixin, PublishMixin):
         return output
 
     def dump_servermap(self, servermap):
-        print "SERVERMAP", servermap
-        print "RECOVERABLE", [self.abbrev_verinfo(v)
-                              for v in servermap.recoverable_versions()]
-        print "BEST", self.abbrev_verinfo(servermap.best_recoverable_version())
-        print "available", self.abbrev_verinfo_dict(servermap.shares_available())
+        print("SERVERMAP", servermap)
+        print("RECOVERABLE", [self.abbrev_verinfo(v)
+                              for v in servermap.recoverable_versions()])
+        print("BEST", self.abbrev_verinfo(servermap.best_recoverable_version()))
+        print("available", self.abbrev_verinfo_dict(servermap.shares_available()))
 
     def do_download(self, servermap, version=None):
         if version is None:
@@ -1549,7 +1551,7 @@ class Roundtrip(unittest.TestCase, testutil.ShouldFailMixin, PublishMixin):
         N = self._fn.get_total_shares()
         d = defer.succeed(None)
         d.addCallback(corrupt, self._storage, "pubkey",
-                      shnums_to_corrupt=range(0, N-k))
+                      shnums_to_corrupt=list(range(0, N-k)))
         d.addCallback(lambda res: self.make_servermap())
         def _do_retrieve(servermap):
             self.failUnless(servermap.get_problems())
@@ -1572,7 +1574,7 @@ class Roundtrip(unittest.TestCase, testutil.ShouldFailMixin, PublishMixin):
         else:
             d = defer.succeed(None)
         d.addCallback(lambda ignored:
-            corrupt(None, self._storage, offset, range(5)))
+            corrupt(None, self._storage, offset, list(range(5))))
         d.addCallback(lambda ignored:
             self.make_servermap())
         def _do_retrieve(servermap):
@@ -1742,7 +1744,7 @@ class Checker(unittest.TestCase, CheckerMixin, PublishMixin):
         # On 8 of the shares, corrupt the beginning of the share data.
         # The signature check during the servermap update won't catch this.
         d.addCallback(lambda ignored:
-            corrupt(None, self._storage, "share_data", range(8)))
+            corrupt(None, self._storage, "share_data", list(range(8))))
         # On 2 of the shares, corrupt the end of the share data.
         # The signature check during the servermap update won't catch
         # this either, and the retrieval process will have to process
@@ -2593,7 +2595,8 @@ class Problems(GridTestMixin, unittest.TestCase, testutil.ShouldFailMixin):
         # the choice of server for share[0].
 
         d = nm.key_generator.generate(TEST_RSA_KEY_SIZE)
-        def _got_key( (pubkey, privkey) ):
+        def _got_key(xxx_todo_changeme ):
+            (pubkey, privkey) = xxx_todo_changeme
             nm.key_generator = SameKeyGenerator(pubkey, privkey)
             pubkey_s = pubkey.serialize()
             privkey_s = privkey.serialize()
@@ -2623,9 +2626,9 @@ class Problems(GridTestMixin, unittest.TestCase, testutil.ShouldFailMixin):
             d.addCallback(lambda res: n.download_best_version())
             d.addCallback(lambda res: self.failUnlessEqual(res, "contents 2"))
             def _explain_error(f):
-                print f
+                print(f)
                 if f.check(NotEnoughServersError):
-                    print "first_error:", f.value.first_error
+                    print("first_error:", f.value.first_error)
                 return f
             d.addErrback(_explain_error)
             return d
@@ -2955,7 +2958,7 @@ class FileHandle(unittest.TestCase):
     def test_filehandle_read(self):
         self.basedir = "mutable/FileHandle/test_filehandle_read"
         chunk_size = 10
-        for i in xrange(0, len(self.test_data), chunk_size):
+        for i in range(0, len(self.test_data), chunk_size):
             data = self.uploadable.read(chunk_size)
             data = "".join(data)
             start = i
@@ -3024,7 +3027,7 @@ class DataHandle(unittest.TestCase):
 
     def test_datahandle_read(self):
         chunk_size = 10
-        for i in xrange(0, len(self.test_data), chunk_size):
+        for i in range(0, len(self.test_data), chunk_size):
             data = self.uploadable.read(chunk_size)
             data = "".join(data)
             start = i
@@ -3375,7 +3378,7 @@ class Version(GridTestMixin, unittest.TestCase, testutil.ShouldFailMixin, \
         def _read_data(version):
             c = consumer.MemoryConsumer()
             d2 = defer.succeed(None)
-            for i in xrange(0, len(self.data), 10000):
+            for i in range(0, len(self.data), 10000):
                 d2.addCallback(lambda ignored, i=i: version.read(c, i, 10000))
             d2.addCallback(lambda ignored:
                 self.failUnlessEqual(self.data, "".join(c.chunks)))
@@ -3389,9 +3392,9 @@ class Version(GridTestMixin, unittest.TestCase, testutil.ShouldFailMixin, \
         d.addCallback(lambda ignored: "".join(c.chunks))
         def _check(results):
             if results != expected:
-                print
-                print "got: %s ... %s" % (results[:20], results[-20:])
-                print "exp: %s ... %s" % (expected[:20], expected[-20:])
+                print()
+                print("got: %s ... %s" % (results[:20], results[-20:]))
+                print("exp: %s ... %s" % (expected[:20], expected[-20:]))
                 self.fail("results[%s] != expected" % name)
             return version # daisy-chained to next call
         d.addCallback(_check)
@@ -3490,9 +3493,9 @@ class Update(GridTestMixin, unittest.TestCase, testutil.ShouldFailMixin):
                               node.download_best_version())
                 def _check(results):
                     if results != expected:
-                        print
-                        print "got: %s ... %s" % (results[:20], results[-20:])
-                        print "exp: %s ... %s" % (expected[:20], expected[-20:])
+                        print()
+                        print("got: %s ... %s" % (results[:20], results[-20:]))
+                        print("exp: %s ... %s" % (expected[:20], expected[-20:]))
                         self.fail("results != expected")
                 d.addCallback(_check)
             return d
@@ -3546,35 +3549,35 @@ class Update(GridTestMixin, unittest.TestCase, testutil.ShouldFailMixin):
 
         SEGSIZE = 128*1024
         if got != expected:
-            print "differences:"
+            print("differences:")
             for segnum in range(len(expected)//SEGSIZE):
                 start = segnum * SEGSIZE
                 end = (segnum+1) * SEGSIZE
                 got_ends = "%s .. %s" % (got[start:start+20], got[end-20:end])
                 exp_ends = "%s .. %s" % (expected[start:start+20], expected[end-20:end])
                 if got_ends != exp_ends:
-                    print "expected[%d]: %s" % (start, exp_ends)
-                    print "got     [%d]: %s" % (start, got_ends)
+                    print("expected[%d]: %s" % (start, exp_ends))
+                    print("got     [%d]: %s" % (start, got_ends))
             if expspans != gotspans:
-                print "expected: %s" % expspans
-                print "got     : %s" % gotspans
+                print("expected: %s" % expspans)
+                print("got     : %s" % gotspans)
             open("EXPECTED","wb").write(expected)
             open("GOT","wb").write(got)
-            print "wrote data to EXPECTED and GOT"
+            print("wrote data to EXPECTED and GOT")
             self.fail("didn't get expected data")
 
 
     def test_replace_locations(self):
         # exercise fencepost conditions
         SEGSIZE = 128*1024
-        suspects = range(SEGSIZE-3, SEGSIZE+1)+range(2*SEGSIZE-3, 2*SEGSIZE+1)
+        suspects = list(range(SEGSIZE-3, SEGSIZE+1))+list(range(2*SEGSIZE-3, 2*SEGSIZE+1))
         letters = iter("ABCDEFGHIJKLMNOPQRSTUVWXYZ")
         d0 = self.do_upload_mdmf()
         def _run(ign):
             expected = self.data
             d = defer.succeed(None)
             for offset in suspects:
-                new_data = letters.next()*2 # "AA", then "BB", etc
+                new_data = six.advance_iterator(letters)*2 # "AA", then "BB", etc
                 expected = expected[:offset]+new_data+expected[offset+2:]
                 d.addCallback(lambda ign:
                               self.mdmf_node.get_best_mutable_version())
@@ -3593,14 +3596,14 @@ class Update(GridTestMixin, unittest.TestCase, testutil.ShouldFailMixin):
     def test_replace_locations_max_shares(self):
         # exercise fencepost conditions
         SEGSIZE = 128*1024
-        suspects = range(SEGSIZE-3, SEGSIZE+1)+range(2*SEGSIZE-3, 2*SEGSIZE+1)
+        suspects = list(range(SEGSIZE-3, SEGSIZE+1))+list(range(2*SEGSIZE-3, 2*SEGSIZE+1))
         letters = iter("ABCDEFGHIJKLMNOPQRSTUVWXYZ")
         d0 = self.do_upload_mdmf()
         def _run(ign):
             expected = self.data
             d = defer.succeed(None)
             for offset in suspects:
-                new_data = letters.next()*2 # "AA", then "BB", etc
+                new_data = six.advance_iterator(letters)*2 # "AA", then "BB", etc
                 expected = expected[:offset]+new_data+expected[offset+2:]
                 d.addCallback(lambda ign:
                               self.mdmf_max_shares_node.get_best_mutable_version())
index 72d6ef8ce00c6956c8127a98f1cdcb18565a8455..c85135f5c402b440b07e8b4180d0bc0f557afe7c 100644 (file)
@@ -145,7 +145,7 @@ class TestCase(testutil.SignalMixin, unittest.TestCase):
         privdir = os.path.join(basedir, "private")
         st = os.stat(privdir)
         bits = stat.S_IMODE(st[stat.ST_MODE])
-        self.failUnless(bits & 0001 == 0, bits)
+        self.failUnless(bits & 0o001 == 0, bits)
 
     @patch("foolscap.logging.log.setLogDir")
     def test_logdir_is_str(self, mock_setLogDir):
index 0feaacb12162c369eb2f205da44c6dee34f74d1e..77a6178d216d21521ea958bea1e57cfcc70a8556 100644 (file)
@@ -1,3 +1,4 @@
+from __future__ import print_function
 # -*- coding: utf-8 -*-
 from allmydata.test import common
 from allmydata.monitor import Monitor
@@ -115,7 +116,7 @@ class Verifier(GridTestMixin, unittest.TestCase, RepairTestMixin):
             self.failIfBigger(delta_reads, MAX_DELTA_READS)
             try:
                 judgement(vr)
-            except unittest.FailTest, e:
+            except unittest.FailTest as e:
                 # FailTest just uses e.args[0] == str
                 new_arg = str(e.args[0]) + "\nvr.data is: " + str(vr.as_dict())
                 e.args = (new_arg,)
@@ -348,9 +349,9 @@ class Verifier(GridTestMixin, unittest.TestCase, RepairTestMixin):
         def _show_results(ign):
             f = open("test_each_byte_output", "w")
             for i in sorted(results.keys()):
-                print >>f, "%d: %s" % (i, results[i])
+                print("%d: %s" % (i, results[i]), file=f)
             f.close()
-            print "Please look in _trial_temp/test_each_byte_output for results"
+            print("Please look in _trial_temp/test_each_byte_output for results")
         d.addCallback(_show_results)
         return d
 
@@ -462,7 +463,7 @@ class Repairer(GridTestMixin, unittest.TestCase, RepairTestMixin,
         # previously-deleted share #2.
 
         d.addCallback(lambda ignored:
-                      self.delete_shares_numbered(self.uri, range(3, 10+1)))
+                      self.delete_shares_numbered(self.uri, list(range(3, 10+1))))
         d.addCallback(lambda ignored: download_to_data(self.c1_filenode))
         d.addCallback(lambda newdata:
                       self.failUnlessEqual(newdata, common.TEST_DATA))
@@ -474,7 +475,7 @@ class Repairer(GridTestMixin, unittest.TestCase, RepairTestMixin,
         self.set_up_grid(num_clients=2)
         d = self.upload_and_stash()
         d.addCallback(lambda ignored:
-                      self.delete_shares_numbered(self.uri, range(7)))
+                      self.delete_shares_numbered(self.uri, list(range(7))))
         d.addCallback(lambda ignored: self._stash_counts())
         d.addCallback(lambda ignored:
                       self.c0_filenode.check_and_repair(Monitor(),
@@ -507,7 +508,7 @@ class Repairer(GridTestMixin, unittest.TestCase, RepairTestMixin,
         # previously-deleted share #2.
 
         d.addCallback(lambda ignored:
-                      self.delete_shares_numbered(self.uri, range(3, 10+1)))
+                      self.delete_shares_numbered(self.uri, list(range(3, 10+1))))
         d.addCallback(lambda ignored: download_to_data(self.c1_filenode))
         d.addCallback(lambda newdata:
                       self.failUnlessEqual(newdata, common.TEST_DATA))
@@ -525,7 +526,7 @@ class Repairer(GridTestMixin, unittest.TestCase, RepairTestMixin,
         # distributing the shares widely enough to satisfy the default
         # happiness setting.
         def _delete_some_servers(ignored):
-            for i in xrange(7):
+            for i in range(7):
                 self.g.remove_server(self.g.servers_by_number[i].my_nodeid)
 
             assert len(self.g.servers_by_number) == 3
@@ -638,7 +639,7 @@ class Repairer(GridTestMixin, unittest.TestCase, RepairTestMixin,
                 # downloading and has the right contents. This can't work
                 # unless it has already repaired the previously-corrupted share.
                 def _then_delete_7_and_try_a_download(unused=None):
-                    shnums = range(10)
+                    shnums = list(range(10))
                     shnums.remove(shnum)
                     random.shuffle(shnums)
                     for sharenum in shnums[:7]:
index b5ccaa568639a6573da3ed48fc3c1a2574026feb..624aba7f1a810fb2b77207b54868917fdb560105 100644 (file)
@@ -96,7 +96,7 @@ class BinTahoe(common_util.SignalMixin, unittest.TestCase, RunBinTahoeMixin):
         if not same:
             try:
                 same = os.path.samefile(root_from_cwd, root_to_check)
-            except AttributeError, e:
+            except AttributeError as e:
                 e  # hush pyflakes
 
         if not same:
index f24a1ae8c826657a58792d0d8ab82da62962ce0b..860c52e12922154561b19f567613ba1e7251d27d 100644 (file)
@@ -1,3 +1,4 @@
+from __future__ import print_function
 
 import re, struct, traceback, time, calendar
 from stat import S_IFREG, S_IFDIR
@@ -54,7 +55,7 @@ class Handler(GridTestMixin, ShouldFailMixin, ReallyEqualMixin, unittest.TestCas
                                            "%s was supposed to raise SFTPError(%r), not SFTPError(%r): %s" %
                                            (which, expected_code, res.value.code, res))
             else:
-                print '@' + '@'.join(s)
+                print('@' + '@'.join(s))
                 self.fail("%s was supposed to raise SFTPError(%r), not get %r" %
                           (which, expected_code, res))
         d.addBoth(_done)
@@ -291,16 +292,16 @@ class Handler(GridTestMixin, ShouldFailMixin, ReallyEqualMixin, unittest.TestCas
 
         gross = u"gro\u00DF".encode("utf-8")
         expected_root = [
-            ('empty_lit_dir', r'dr-xr-xr-x .* 0 .* empty_lit_dir$',       {'permissions': S_IFDIR | 0555}),
-            (gross,           r'-rw-rw-rw- .* 1010 .* '+gross+'$',        {'permissions': S_IFREG | 0666, 'size': 1010}),
+            ('empty_lit_dir', r'dr-xr-xr-x .* 0 .* empty_lit_dir$',       {'permissions': S_IFDIR | 0o555}),
+            (gross,           r'-rw-rw-rw- .* 1010 .* '+gross+'$',        {'permissions': S_IFREG | 0o666, 'size': 1010}),
             # The fall of the Berlin wall may have been on 9th or 10th November 1989 depending on the gateway's timezone.
             #('loop',          r'drwxrwxrwx .* 0 Nov (09|10)  1989 loop$', {'permissions': S_IFDIR | 0777}),
-            ('loop',          r'drwxrwxrwx .* 0 .* loop$',                {'permissions': S_IFDIR | 0777}),
-            ('mutable',       r'-rw-rw-rw- .* 0 .* mutable$',             {'permissions': S_IFREG | 0666}),
-            ('readonly',      r'-r--r--r-- .* 0 .* readonly$',            {'permissions': S_IFREG | 0444}),
-            ('small',         r'-rw-rw-rw- .* 10 .* small$',              {'permissions': S_IFREG | 0666, 'size': 10}),
-            ('small2',        r'-rw-rw-rw- .* 26 .* small2$',             {'permissions': S_IFREG | 0666, 'size': 26}),
-            ('tiny_lit_dir',  r'dr-xr-xr-x .* 0 .* tiny_lit_dir$',        {'permissions': S_IFDIR | 0555}),
+            ('loop',          r'drwxrwxrwx .* 0 .* loop$',                {'permissions': S_IFDIR | 0o777}),
+            ('mutable',       r'-rw-rw-rw- .* 0 .* mutable$',             {'permissions': S_IFREG | 0o666}),
+            ('readonly',      r'-r--r--r-- .* 0 .* readonly$',            {'permissions': S_IFREG | 0o444}),
+            ('small',         r'-rw-rw-rw- .* 10 .* small$',              {'permissions': S_IFREG | 0o666, 'size': 10}),
+            ('small2',        r'-rw-rw-rw- .* 26 .* small2$',             {'permissions': S_IFREG | 0o666, 'size': 26}),
+            ('tiny_lit_dir',  r'dr-xr-xr-x .* 0 .* tiny_lit_dir$',        {'permissions': S_IFDIR | 0o555}),
             ('unknown',       r'\?--------- .* 0 .* unknown$',            {'permissions': 0}),
         ]
 
@@ -318,20 +319,20 @@ class Handler(GridTestMixin, ShouldFailMixin, ReallyEqualMixin, unittest.TestCas
 
         # The UTC epoch may either be in Jan 1 1970 or Dec 31 1969 depending on the gateway's timezone.
         expected_tiny_lit = [
-            ('short', r'-r--r--r-- .* 8 (Jan 01  1970|Dec 31  1969) short$', {'permissions': S_IFREG | 0444, 'size': 8}),
+            ('short', r'-r--r--r-- .* 8 (Jan 01  1970|Dec 31  1969) short$', {'permissions': S_IFREG | 0o444, 'size': 8}),
         ]
 
         d.addCallback(lambda ign: self.handler.openDirectory("tiny_lit_dir"))
         d.addCallback(lambda res: self._compareDirLists(res, expected_tiny_lit))
 
         d.addCallback(lambda ign: self.handler.getAttrs("small", True))
-        d.addCallback(lambda attrs: self._compareAttributes(attrs, {'permissions': S_IFREG | 0666, 'size': 10}))
+        d.addCallback(lambda attrs: self._compareAttributes(attrs, {'permissions': S_IFREG | 0o666, 'size': 10}))
 
         d.addCallback(lambda ign: self.handler.setAttrs("small", {}))
         d.addCallback(lambda res: self.failUnlessReallyEqual(res, None))
 
         d.addCallback(lambda ign: self.handler.getAttrs("small", True))
-        d.addCallback(lambda attrs: self._compareAttributes(attrs, {'permissions': S_IFREG | 0666, 'size': 10}))
+        d.addCallback(lambda attrs: self._compareAttributes(attrs, {'permissions': S_IFREG | 0o666, 'size': 10}))
 
         d.addCallback(lambda ign:
             self.shouldFailWithSFTPError(sftp.FX_OP_UNSUPPORTED, "setAttrs size",
@@ -406,10 +407,10 @@ class Handler(GridTestMixin, ShouldFailMixin, ReallyEqualMixin, unittest.TestCas
                                              rf.readChunk, 11, 1))
 
             d2.addCallback(lambda ign: rf.getAttrs())
-            d2.addCallback(lambda attrs: self._compareAttributes(attrs, {'permissions': S_IFREG | 0666, 'size': 10}))
+            d2.addCallback(lambda attrs: self._compareAttributes(attrs, {'permissions': S_IFREG | 0o666, 'size': 10}))
 
             d2.addCallback(lambda ign: self.handler.getAttrs("small", followLinks=0))
-            d2.addCallback(lambda attrs: self._compareAttributes(attrs, {'permissions': S_IFREG | 0666, 'size': 10}))
+            d2.addCallback(lambda attrs: self._compareAttributes(attrs, {'permissions': S_IFREG | 0o666, 'size': 10}))
 
             d2.addCallback(lambda ign:
                 self.shouldFailWithSFTPError(sftp.FX_PERMISSION_DENIED, "writeChunk on read-only handle denied",
@@ -458,10 +459,10 @@ class Handler(GridTestMixin, ShouldFailMixin, ReallyEqualMixin, unittest.TestCas
                                              rf.readChunk, 1011, 1))
 
             d2.addCallback(lambda ign: rf.getAttrs())
-            d2.addCallback(lambda attrs: self._compareAttributes(attrs, {'permissions': S_IFREG | 0666, 'size': 1010}))
+            d2.addCallback(lambda attrs: self._compareAttributes(attrs, {'permissions': S_IFREG | 0o666, 'size': 1010}))
 
             d2.addCallback(lambda ign: self.handler.getAttrs(gross, followLinks=0))
-            d2.addCallback(lambda attrs: self._compareAttributes(attrs, {'permissions': S_IFREG | 0666, 'size': 1010}))
+            d2.addCallback(lambda attrs: self._compareAttributes(attrs, {'permissions': S_IFREG | 0o666, 'size': 1010}))
 
             d2.addCallback(lambda ign:
                 self.shouldFailWithSFTPError(sftp.FX_PERMISSION_DENIED, "writeChunk on read-only handle denied",
@@ -667,10 +668,10 @@ class Handler(GridTestMixin, ShouldFailMixin, ReallyEqualMixin, unittest.TestCas
             d2.addCallback(lambda ign: wf.writeChunk(13, "abc"))
 
             d2.addCallback(lambda ign: wf.getAttrs())
-            d2.addCallback(lambda attrs: self._compareAttributes(attrs, {'permissions': S_IFREG | 0666, 'size': 16}))
+            d2.addCallback(lambda attrs: self._compareAttributes(attrs, {'permissions': S_IFREG | 0o666, 'size': 16}))
 
             d2.addCallback(lambda ign: self.handler.getAttrs("newfile", followLinks=0))
-            d2.addCallback(lambda attrs: self._compareAttributes(attrs, {'permissions': S_IFREG | 0666, 'size': 16}))
+            d2.addCallback(lambda attrs: self._compareAttributes(attrs, {'permissions': S_IFREG | 0o666, 'size': 16}))
 
             d2.addCallback(lambda ign: wf.setAttrs({}))
 
@@ -901,15 +902,15 @@ class Handler(GridTestMixin, ShouldFailMixin, ReallyEqualMixin, unittest.TestCas
         def _write_mutable_setattr(wf):
             d2 = wf.writeChunk(8, "read-only link from parent")
 
-            d2.addCallback(lambda ign: self.handler.setAttrs("mutable", {'permissions': 0444}))
+            d2.addCallback(lambda ign: self.handler.setAttrs("mutable", {'permissions': 0o444}))
 
             d2.addCallback(lambda ign: self.root.get(u"mutable"))
             d2.addCallback(lambda node: self.failUnless(node.is_readonly()))
 
             d2.addCallback(lambda ign: wf.getAttrs())
-            d2.addCallback(lambda attrs: self.failUnlessReallyEqual(attrs['permissions'], S_IFREG | 0666))
+            d2.addCallback(lambda attrs: self.failUnlessReallyEqual(attrs['permissions'], S_IFREG | 0o666))
             d2.addCallback(lambda ign: self.handler.getAttrs("mutable", followLinks=0))
-            d2.addCallback(lambda attrs: self.failUnlessReallyEqual(attrs['permissions'], S_IFREG | 0444))
+            d2.addCallback(lambda attrs: self.failUnlessReallyEqual(attrs['permissions'], S_IFREG | 0o444))
 
             d2.addCallback(lambda ign: wf.close())
             return d2
@@ -930,16 +931,16 @@ class Handler(GridTestMixin, ShouldFailMixin, ReallyEqualMixin, unittest.TestCas
         def _write_mutable2_setattr(wf):
             d2 = wf.writeChunk(7, "2")
 
-            d2.addCallback(lambda ign: wf.setAttrs({'permissions': 0444, 'size': 8}))
+            d2.addCallback(lambda ign: wf.setAttrs({'permissions': 0o444, 'size': 8}))
 
             # The link isn't made read-only until the file is closed.
             d2.addCallback(lambda ign: self.root.get(u"mutable2"))
             d2.addCallback(lambda node: self.failIf(node.is_readonly()))
 
             d2.addCallback(lambda ign: wf.getAttrs())
-            d2.addCallback(lambda attrs: self.failUnlessReallyEqual(attrs['permissions'], S_IFREG | 0444))
+            d2.addCallback(lambda attrs: self.failUnlessReallyEqual(attrs['permissions'], S_IFREG | 0o444))
             d2.addCallback(lambda ign: self.handler.getAttrs("mutable2", followLinks=0))
-            d2.addCallback(lambda attrs: self.failUnlessReallyEqual(attrs['permissions'], S_IFREG | 0666))
+            d2.addCallback(lambda attrs: self.failUnlessReallyEqual(attrs['permissions'], S_IFREG | 0o666))
 
             d2.addCallback(lambda ign: wf.close())
             return d2
@@ -1346,7 +1347,8 @@ class Handler(GridTestMixin, ShouldFailMixin, ReallyEqualMixin, unittest.TestCas
         d.addCallback(lambda ign: self.handler.makeDirectory("newdir", {'ext_foo': 'bar', 'ctime': 42}))
 
         d.addCallback(lambda ign: self.root.get_child_and_metadata(u"newdir"))
-        def _got( (child, metadata) ):
+        def _got(xxx_todo_changeme ):
+            (child, metadata) = xxx_todo_changeme
             self.failUnless(IDirectoryNode.providedBy(child))
             self.failUnless(child.is_mutable())
             # FIXME
@@ -1385,7 +1387,7 @@ class Handler(GridTestMixin, ShouldFailMixin, ReallyEqualMixin, unittest.TestCas
         d.addCallback(lambda ign:
             self.shouldFailWithSFTPError(sftp.FX_PERMISSION_DENIED, "makeDirectory newdir2 permissions:0444 denied",
                                          self.handler.makeDirectory, "newdir2",
-                                         {'permissions': 0444}))
+                                         {'permissions': 0o444}))
 
         d.addCallback(lambda ign: self.failUnlessEqual(sftpd.all_heisenfiles, {}))
         d.addCallback(lambda ign: self.failUnlessEqual(self.handler._heisenfiles, {}))
index 5c5e2c7c02bae554f3a818885bcd2eab2e2d9a80..6f3179fd0b5714ea8b59f2488f58c9dd40c53f57 100644 (file)
@@ -33,6 +33,7 @@ from allmydata.test.common import LoggingServiceParent, ShouldFailMixin
 from allmydata.test.common_web import WebRenderingMixin
 from allmydata.test.no_network import NoNetworkServer
 from allmydata.web.storage import StorageStatus, remove_prefix
+import six
 
 class Marker:
     pass
@@ -333,8 +334,8 @@ class Server(unittest.TestCase):
         self.failUnlessIn('maximum-mutable-share-size', sv1)
 
     def allocate(self, ss, storage_index, sharenums, size, canary=None):
-        renew_secret = hashutil.tagged_hash("blah", "%d" % self._lease_secret.next())
-        cancel_secret = hashutil.tagged_hash("blah", "%d" % self._lease_secret.next())
+        renew_secret = hashutil.tagged_hash("blah", "%d" % six.advance_iterator(self._lease_secret))
+        cancel_secret = hashutil.tagged_hash("blah", "%d" % six.advance_iterator(self._lease_secret))
         if not canary:
             canary = FakeCanary()
         return ss.remote_allocate_buckets(storage_index,
@@ -393,7 +394,7 @@ class Server(unittest.TestCase):
 
     def test_remove_incoming(self):
         ss = self.create("test_remove_incoming")
-        already, writers = self.allocate(ss, "vid", range(3), 10)
+        already, writers = self.allocate(ss, "vid", list(range(3)), 10)
         for i,wb in writers.items():
             wb.remote_write(0, "%10d" % i)
             wb.remote_close()
@@ -414,7 +415,7 @@ class Server(unittest.TestCase):
         self.failIfEqual(ss.allocated_size(), 0)
 
         # Now abort the writers.
-        for writer in writers.itervalues():
+        for writer in six.itervalues(writers):
             writer.remote_abort()
         self.failUnlessEqual(ss.allocated_size(), 0)
 
@@ -563,7 +564,7 @@ class Server(unittest.TestCase):
 
         # now there should be ALLOCATED=1001+12+72=1085 bytes allocated, and
         # 5000-1085=3915 free, therefore we can fit 39 100byte shares
-        already3,writers3 = self.allocate(ss,"vid3", range(100), 100, canary)
+        already3,writers3 = self.allocate(ss,"vid3", list(range(100)), 100, canary)
         self.failUnlessEqual(len(writers3), 39)
         self.failUnlessEqual(len(ss._active_writers), 39)
 
@@ -596,11 +597,11 @@ class Server(unittest.TestCase):
     def test_leases(self):
         ss = self.create("test_leases")
         canary = FakeCanary()
-        sharenums = range(5)
+        sharenums = list(range(5))
         size = 100
 
-        rs0,cs0 = (hashutil.tagged_hash("blah", "%d" % self._lease_secret.next()),
-                   hashutil.tagged_hash("blah", "%d" % self._lease_secret.next()))
+        rs0,cs0 = (hashutil.tagged_hash("blah", "%d" % six.advance_iterator(self._lease_secret)),
+                   hashutil.tagged_hash("blah", "%d" % six.advance_iterator(self._lease_secret)))
         already,writers = ss.remote_allocate_buckets("si0", rs0, cs0,
                                                      sharenums, size, canary)
         self.failUnlessEqual(len(already), 0)
@@ -612,16 +613,16 @@ class Server(unittest.TestCase):
         self.failUnlessEqual(len(leases), 1)
         self.failUnlessEqual(set([l.renew_secret for l in leases]), set([rs0]))
 
-        rs1,cs1 = (hashutil.tagged_hash("blah", "%d" % self._lease_secret.next()),
-                   hashutil.tagged_hash("blah", "%d" % self._lease_secret.next()))
+        rs1,cs1 = (hashutil.tagged_hash("blah", "%d" % six.advance_iterator(self._lease_secret)),
+                   hashutil.tagged_hash("blah", "%d" % six.advance_iterator(self._lease_secret)))
         already,writers = ss.remote_allocate_buckets("si1", rs1, cs1,
                                                      sharenums, size, canary)
         for wb in writers.values():
             wb.remote_close()
 
         # take out a second lease on si1
-        rs2,cs2 = (hashutil.tagged_hash("blah", "%d" % self._lease_secret.next()),
-                   hashutil.tagged_hash("blah", "%d" % self._lease_secret.next()))
+        rs2,cs2 = (hashutil.tagged_hash("blah", "%d" % six.advance_iterator(self._lease_secret)),
+                   hashutil.tagged_hash("blah", "%d" % six.advance_iterator(self._lease_secret)))
         already,writers = ss.remote_allocate_buckets("si1", rs2, cs2,
                                                      sharenums, size, canary)
         self.failUnlessEqual(len(already), 5)
@@ -632,8 +633,8 @@ class Server(unittest.TestCase):
         self.failUnlessEqual(set([l.renew_secret for l in leases]), set([rs1, rs2]))
 
         # and a third lease, using add-lease
-        rs2a,cs2a = (hashutil.tagged_hash("blah", "%d" % self._lease_secret.next()),
-                     hashutil.tagged_hash("blah", "%d" % self._lease_secret.next()))
+        rs2a,cs2a = (hashutil.tagged_hash("blah", "%d" % six.advance_iterator(self._lease_secret)),
+                     hashutil.tagged_hash("blah", "%d" % six.advance_iterator(self._lease_secret)))
         ss.remote_add_lease("si1", rs2a, cs2a)
         leases = list(ss.get_leases("si1"))
         self.failUnlessEqual(len(leases), 3)
@@ -661,10 +662,10 @@ class Server(unittest.TestCase):
                         "ss should not have a 'remote_cancel_lease' method/attribute")
 
         # test overlapping uploads
-        rs3,cs3 = (hashutil.tagged_hash("blah", "%d" % self._lease_secret.next()),
-                   hashutil.tagged_hash("blah", "%d" % self._lease_secret.next()))
-        rs4,cs4 = (hashutil.tagged_hash("blah", "%d" % self._lease_secret.next()),
-                   hashutil.tagged_hash("blah", "%d" % self._lease_secret.next()))
+        rs3,cs3 = (hashutil.tagged_hash("blah", "%d" % six.advance_iterator(self._lease_secret)),
+                   hashutil.tagged_hash("blah", "%d" % six.advance_iterator(self._lease_secret)))
+        rs4,cs4 = (hashutil.tagged_hash("blah", "%d" % six.advance_iterator(self._lease_secret)),
+                   hashutil.tagged_hash("blah", "%d" % six.advance_iterator(self._lease_secret)))
         already,writers = ss.remote_allocate_buckets("si3", rs3, cs3,
                                                      sharenums, size, canary)
         self.failUnlessEqual(len(already), 0)
@@ -817,7 +818,7 @@ class MutableServer(unittest.TestCase):
 
     def test_bad_magic(self):
         ss = self.create("test_bad_magic")
-        self.allocate(ss, "si1", "we1", self._lease_secret.next(), set([0]), 10)
+        self.allocate(ss, "si1", "we1", six.advance_iterator(self._lease_secret), set([0]), 10)
         fn = os.path.join(ss.sharedir, storage_index_to_dir("si1"), "0")
         f = open(fn, "rb+")
         f.seek(0)
@@ -831,7 +832,7 @@ class MutableServer(unittest.TestCase):
 
     def test_container_size(self):
         ss = self.create("test_container_size")
-        self.allocate(ss, "si1", "we1", self._lease_secret.next(),
+        self.allocate(ss, "si1", "we1", six.advance_iterator(self._lease_secret),
                       set([0,1,2]), 100)
         read = ss.remote_slot_readv
         rstaraw = ss.remote_slot_testv_and_readv_and_writev
@@ -929,7 +930,7 @@ class MutableServer(unittest.TestCase):
 
     def test_allocate(self):
         ss = self.create("test_allocate")
-        self.allocate(ss, "si1", "we1", self._lease_secret.next(),
+        self.allocate(ss, "si1", "we1", six.advance_iterator(self._lease_secret),
                       set([0,1,2]), 100)
 
         read = ss.remote_slot_readv
@@ -1315,7 +1316,7 @@ class MutableServer(unittest.TestCase):
 
     def test_remove(self):
         ss = self.create("test_remove")
-        self.allocate(ss, "si1", "we1", self._lease_secret.next(),
+        self.allocate(ss, "si1", "we1", six.advance_iterator(self._lease_secret),
                       set([0,1,2]), 100)
         readv = ss.remote_slot_readv
         writev = ss.remote_slot_testv_and_readv_and_writev
@@ -1373,15 +1374,15 @@ class MDMFProxies(unittest.TestCase, ShouldFailMixin):
         self.block = "aa"
         self.salt = "a" * 16
         self.block_hash = "a" * 32
-        self.block_hash_tree = [self.block_hash for i in xrange(6)]
+        self.block_hash_tree = [self.block_hash for i in range(6)]
         self.share_hash = self.block_hash
-        self.share_hash_chain = dict([(i, self.share_hash) for i in xrange(6)])
+        self.share_hash_chain = dict([(i, self.share_hash) for i in range(6)])
         self.signature = "foobarbaz"
         self.verification_key = "vvvvvv"
         self.encprivkey = "private"
         self.root_hash = self.block_hash
         self.salt_hash = self.root_hash
-        self.salt_hash_tree = [self.salt_hash for i in xrange(6)]
+        self.salt_hash_tree = [self.salt_hash for i in range(6)]
         self.block_hash_tree_s = self.serialize_blockhashes(self.block_hash_tree)
         self.share_hash_chain_s = self.serialize_sharehashes(self.share_hash_chain)
         # blockhashes and salt hashes are serialized in the same way,
@@ -1448,10 +1449,10 @@ class MDMFProxies(unittest.TestCase, ShouldFailMixin):
         # Now we'll build the offsets.
         sharedata = ""
         if not tail_segment and not empty:
-            for i in xrange(6):
+            for i in range(6):
                 sharedata += self.salt + self.block
         elif tail_segment:
-            for i in xrange(5):
+            for i in range(5):
                 sharedata += self.salt + self.block
             sharedata += self.salt + "a"
 
@@ -1507,7 +1508,7 @@ class MDMFProxies(unittest.TestCase, ShouldFailMixin):
         # and the verification key
         data += self.verification_key
         # Then we'll add in gibberish until we get to the right point.
-        nulls = "".join([" " for i in xrange(len(data), share_data_offset)])
+        nulls = "".join([" " for i in range(len(data), share_data_offset)])
         data += nulls
 
         # Then the share data
@@ -1611,11 +1612,12 @@ class MDMFProxies(unittest.TestCase, ShouldFailMixin):
         mr = MDMFSlotReadProxy(self.rref, "si1", 0)
         # Check that every method equals what we expect it to.
         d = defer.succeed(None)
-        def _check_block_and_salt((block, salt)):
+        def _check_block_and_salt(xxx_todo_changeme):
+            (block, salt) = xxx_todo_changeme
             self.failUnlessEqual(block, self.block)
             self.failUnlessEqual(salt, self.salt)
 
-        for i in xrange(6):
+        for i in range(6):
             d.addCallback(lambda ignored, i=i:
                 mr.get_block_and_salt(i))
             d.addCallback(_check_block_and_salt)
@@ -1662,7 +1664,8 @@ class MDMFProxies(unittest.TestCase, ShouldFailMixin):
 
         d.addCallback(lambda ignored:
             mr.get_encoding_parameters())
-        def _check_encoding_parameters((k, n, segsize, datalen)):
+        def _check_encoding_parameters(xxx_todo_changeme1):
+            (k, n, segsize, datalen) = xxx_todo_changeme1
             self.failUnlessEqual(k, 3)
             self.failUnlessEqual(n, 10)
             self.failUnlessEqual(segsize, 6)
@@ -1703,7 +1706,8 @@ class MDMFProxies(unittest.TestCase, ShouldFailMixin):
         self.write_test_share_to_server("si1")
         mr = MDMFSlotReadProxy(self.rref, "si1", 0)
         d = mr.get_encoding_parameters()
-        def _check_encoding_parameters((k, n, segment_size, datalen)):
+        def _check_encoding_parameters(xxx_todo_changeme2):
+            (k, n, segment_size, datalen) = xxx_todo_changeme2
             self.failUnlessEqual(k, 3)
             self.failUnlessEqual(n, 10)
             self.failUnlessEqual(segment_size, 6)
@@ -1747,7 +1751,7 @@ class MDMFProxies(unittest.TestCase, ShouldFailMixin):
         # is working appropriately.
         mw = self._make_new_mw("si1", 0)
 
-        for i in xrange(6):
+        for i in range(6):
             mw.put_block(self.block, i, self.salt)
         mw.put_encprivkey(self.encprivkey)
         mw.put_blockhashes(self.block_hash_tree)
@@ -1781,7 +1785,7 @@ class MDMFProxies(unittest.TestCase, ShouldFailMixin):
     def test_private_key_after_share_hash_chain(self):
         mw = self._make_new_mw("si1", 0)
         d = defer.succeed(None)
-        for i in xrange(6):
+        for i in range(6):
             d.addCallback(lambda ignored, i=i:
                 mw.put_block(self.block, i, self.salt))
         d.addCallback(lambda ignored:
@@ -1801,7 +1805,7 @@ class MDMFProxies(unittest.TestCase, ShouldFailMixin):
         mw = self._make_new_mw("si1", 0)
         d = defer.succeed(None)
         # Put everything up to and including the verification key.
-        for i in xrange(6):
+        for i in range(6):
             d.addCallback(lambda ignored, i=i:
                 mw.put_block(self.block, i, self.salt))
         d.addCallback(lambda ignored:
@@ -1840,7 +1844,7 @@ class MDMFProxies(unittest.TestCase, ShouldFailMixin):
             self.failIf(result)
 
         def _write_share(mw):
-            for i in xrange(6):
+            for i in range(6):
                 mw.put_block(self.block, i, self.salt)
             mw.put_encprivkey(self.encprivkey)
             mw.put_blockhashes(self.block_hash_tree)
@@ -1892,7 +1896,7 @@ class MDMFProxies(unittest.TestCase, ShouldFailMixin):
 
         mw = self._make_new_mw("si1", 0)
         mw.set_checkstring("this is a lie")
-        for i in xrange(6):
+        for i in range(6):
             mw.put_block(self.block, i, self.salt)
         mw.put_encprivkey(self.encprivkey)
         mw.put_blockhashes(self.block_hash_tree)
@@ -1934,7 +1938,7 @@ class MDMFProxies(unittest.TestCase, ShouldFailMixin):
                                     SHARE_HASH_CHAIN_SIZE
         written_block_size = 2 + len(self.salt)
         written_block = self.block + self.salt
-        for i in xrange(6):
+        for i in range(6):
             mw.put_block(self.block, i, self.salt)
 
         mw.put_encprivkey(self.encprivkey)
@@ -1948,7 +1952,7 @@ class MDMFProxies(unittest.TestCase, ShouldFailMixin):
             self.failUnlessEqual(len(results), 2)
             result, ign = results
             self.failUnless(result, "publish failed")
-            for i in xrange(6):
+            for i in range(6):
                 self.failUnlessEqual(read("si1", [0], [(expected_sharedata_offset + (i * written_block_size), written_block_size)]),
                                 {0: [written_block]})
 
@@ -2059,7 +2063,7 @@ class MDMFProxies(unittest.TestCase, ShouldFailMixin):
         # more than 6
         # blocks into each share.
         d = defer.succeed(None)
-        for i in xrange(6):
+        for i in range(6):
             d.addCallback(lambda ignored, i=i:
                 mw.put_block(self.block, i, self.salt))
         d.addCallback(lambda ignored:
@@ -2092,7 +2096,7 @@ class MDMFProxies(unittest.TestCase, ShouldFailMixin):
         # a block hash tree, and a share hash tree. Otherwise, we'll see
         # failures that match what we are looking for, but are caused by
         # the constraints imposed on operation ordering.
-        for i in xrange(6):
+        for i in range(6):
             d.addCallback(lambda ignored, i=i:
                 mw.put_block(self.block, i, self.salt))
         d.addCallback(lambda ignored:
@@ -2127,7 +2131,7 @@ class MDMFProxies(unittest.TestCase, ShouldFailMixin):
             self.shouldFail(LayoutInvalid, "test blocksize too large",
                             None,
                             mw.put_block, invalid_block, 0, self.salt))
-        for i in xrange(5):
+        for i in range(5):
             d.addCallback(lambda ignored, i=i:
                 mw.put_block(self.block, i, self.salt))
         # Try to put an invalid tail segment
@@ -2163,7 +2167,7 @@ class MDMFProxies(unittest.TestCase, ShouldFailMixin):
         mw0 = self._make_new_mw("si0", 0)
         # Write some shares
         d = defer.succeed(None)
-        for i in xrange(6):
+        for i in range(6):
             d.addCallback(lambda ignored, i=i:
                 mw0.put_block(self.block, i, self.salt))
 
@@ -2232,7 +2236,7 @@ class MDMFProxies(unittest.TestCase, ShouldFailMixin):
         # Write a share using the mutable writer, and make sure that the
         # reader knows how to read everything back to us.
         d = defer.succeed(None)
-        for i in xrange(6):
+        for i in range(6):
             d.addCallback(lambda ignored, i=i:
                 mw.put_block(self.block, i, self.salt))
         d.addCallback(lambda ignored:
@@ -2251,11 +2255,12 @@ class MDMFProxies(unittest.TestCase, ShouldFailMixin):
             mw.finish_publishing())
 
         mr = MDMFSlotReadProxy(self.rref, "si1", 0)
-        def _check_block_and_salt((block, salt)):
+        def _check_block_and_salt(xxx_todo_changeme3):
+            (block, salt) = xxx_todo_changeme3
             self.failUnlessEqual(block, self.block)
             self.failUnlessEqual(salt, self.salt)
 
-        for i in xrange(6):
+        for i in range(6):
             d.addCallback(lambda ignored, i=i:
                 mr.get_block_and_salt(i))
             d.addCallback(_check_block_and_salt)
@@ -2297,7 +2302,8 @@ class MDMFProxies(unittest.TestCase, ShouldFailMixin):
 
         d.addCallback(lambda ignored:
             mr.get_encoding_parameters())
-        def _check_encoding_parameters((k, n, segsize, datalen)):
+        def _check_encoding_parameters(xxx_todo_changeme4):
+            (k, n, segsize, datalen) = xxx_todo_changeme4
             self.failUnlessEqual(k, 3)
             self.failUnlessEqual(n, 10)
             self.failUnlessEqual(segsize, 6)
@@ -2464,7 +2470,8 @@ class MDMFProxies(unittest.TestCase, ShouldFailMixin):
         d.addCallback(_make_mr, 123)
         d.addCallback(lambda mr:
             mr.get_block_and_salt(0))
-        def _check_block_and_salt((block, salt)):
+        def _check_block_and_salt(xxx_todo_changeme5):
+            (block, salt) = xxx_todo_changeme5
             self.failUnlessEqual(block, self.block)
             self.failUnlessEqual(salt, self.salt)
             self.failUnlessEqual(self.rref.read_count, 1)
@@ -2525,7 +2532,8 @@ class MDMFProxies(unittest.TestCase, ShouldFailMixin):
         d.addCallback(_make_mr, 123)
         d.addCallback(lambda mr:
             mr.get_block_and_salt(0))
-        def _check_block_and_salt((block, salt)):
+        def _check_block_and_salt(xxx_todo_changeme6):
+            (block, salt) = xxx_todo_changeme6
             self.failUnlessEqual(block, self.block * 6)
             self.failUnlessEqual(salt, self.salt)
             # TODO: Fix the read routine so that it reads only the data
index 56abc24ed04fd5c3e502f07a8cbe04231417f1eb..a84e0e5dcbec508fc22a28c887404f9eb9e77ab4 100644 (file)
@@ -1,3 +1,4 @@
+from __future__ import print_function
 
 import os, re, sys, time, simplejson
 from cStringIO import StringIO
@@ -37,6 +38,7 @@ from allmydata.test.common import SystemTestMixin
 
 # TODO: move this to common or common_util
 from allmydata.test.test_runner import RunBinTahoeMixin
+import six
 
 LARGE_DATA = """
 This is some data to publish to the remote grid.., which needs to be large
@@ -528,9 +530,9 @@ class SystemTest(SystemTestMixin, RunBinTahoeMixin, unittest.TestCase):
                             base32.b2a(storage_index))
                 self.failUnless(expected in output)
             except unittest.FailTest:
-                print
-                print "dump-share output was:"
-                print output
+                print()
+                print("dump-share output was:")
+                print(output)
                 raise
         d.addCallback(_test_debug)
 
@@ -565,7 +567,8 @@ class SystemTest(SystemTestMixin, RunBinTahoeMixin, unittest.TestCase):
             return d1
         d.addCallback(_check_download_2)
 
-        def _check_download_3((res, newnode)):
+        def _check_download_3(xxx_todo_changeme):
+            (res, newnode) = xxx_todo_changeme
             self.failUnlessEqual(res, DATA)
             # replace the data
             log.msg("starting replace1")
@@ -794,9 +797,9 @@ class SystemTest(SystemTestMixin, RunBinTahoeMixin, unittest.TestCase):
                 self.failUnless("Subscription Summary: storage: 5" in res)
                 self.failUnless("tahoe.css" in res)
             except unittest.FailTest:
-                print
-                print "GET %s output was:" % self.introweb_url
-                print res
+                print()
+                print("GET %s output was:" % self.introweb_url)
+                print(res)
                 raise
         d.addCallback(_check)
         # make sure it serves the CSS too
@@ -815,9 +818,9 @@ class SystemTest(SystemTestMixin, RunBinTahoeMixin, unittest.TestCase):
                 self.failUnlessEqual(data["announcement_distinct_hosts"],
                                      {"storage": 1})
             except unittest.FailTest:
-                print
-                print "GET %s?t=json output was:" % self.introweb_url
-                print res
+                print()
+                print("GET %s?t=json output was:" % self.introweb_url)
+                print(res)
                 raise
         d.addCallback(_check_json)
         return d
@@ -1006,7 +1009,7 @@ class SystemTest(SystemTestMixin, RunBinTahoeMixin, unittest.TestCase):
                             "largest-directory-children": 3,
                             "largest-immutable-file": 112,
                             }
-                for k,v in expected.iteritems():
+                for k,v in six.iteritems(expected):
                     self.failUnlessEqual(stats[k], v,
                                          "stats[%s] was %s, not %s" %
                                          (k, stats[k], v))
@@ -1065,7 +1068,7 @@ class SystemTest(SystemTestMixin, RunBinTahoeMixin, unittest.TestCase):
         form.append('')
         form.append('UTF-8')
         form.append(sep)
-        for name, value in fields.iteritems():
+        for name, value in six.iteritems(fields):
             if isinstance(value, tuple):
                 filename, value = value
                 form.append('Content-Disposition: form-data; name="%s"; '
@@ -1444,14 +1447,16 @@ class SystemTest(SystemTestMixin, RunBinTahoeMixin, unittest.TestCase):
             newargs = nodeargs + [verb] + list(args)
             return self._run_cli(newargs, stdin=stdin)
 
-        def _check_ls((out,err), expected_children, unexpected_children=[]):
+        def _check_ls(xxx_todo_changeme1, expected_children, unexpected_children=[]):
+            (out,err) = xxx_todo_changeme1
             self.failUnlessEqual(err, "")
             for s in expected_children:
                 self.failUnless(s in out, (s,out))
             for s in unexpected_children:
                 self.failIf(s in out, (s,out))
 
-        def _check_ls_root((out,err)):
+        def _check_ls_root(xxx_todo_changeme2):
+            (out,err) = xxx_todo_changeme2
             self.failUnless("personal" in out)
             self.failUnless("s2-ro" in out)
             self.failUnless("s2-rw" in out)
@@ -1462,7 +1467,8 @@ class SystemTest(SystemTestMixin, RunBinTahoeMixin, unittest.TestCase):
         d.addCallback(_check_ls, ["personal", "s2-ro", "s2-rw"])
 
         d.addCallback(run, "list-aliases")
-        def _check_aliases_1((out,err)):
+        def _check_aliases_1(xxx_todo_changeme3):
+            (out,err) = xxx_todo_changeme3
             self.failUnlessEqual(err, "")
             self.failUnlessEqual(out.strip(" \n"), "tahoe: %s" % private_uri)
         d.addCallback(_check_aliases_1)
@@ -1471,32 +1477,37 @@ class SystemTest(SystemTestMixin, RunBinTahoeMixin, unittest.TestCase):
         # new files
         d.addCallback(lambda res: os.unlink(root_file))
         d.addCallback(run, "list-aliases")
-        def _check_aliases_2((out,err)):
+        def _check_aliases_2(xxx_todo_changeme4):
+            (out,err) = xxx_todo_changeme4
             self.failUnlessEqual(err, "")
             self.failUnlessEqual(out, "")
         d.addCallback(_check_aliases_2)
 
         d.addCallback(run, "mkdir")
-        def _got_dir( (out,err) ):
+        def _got_dir(xxx_todo_changeme5 ):
+            (out,err) = xxx_todo_changeme5
             self.failUnless(uri.from_string_dirnode(out.strip()))
             return out.strip()
         d.addCallback(_got_dir)
         d.addCallback(lambda newcap: run(None, "add-alias", "tahoe", newcap))
 
         d.addCallback(run, "list-aliases")
-        def _check_aliases_3((out,err)):
+        def _check_aliases_3(xxx_todo_changeme6):
+            (out,err) = xxx_todo_changeme6
             self.failUnlessEqual(err, "")
             self.failUnless("tahoe: " in out)
         d.addCallback(_check_aliases_3)
 
-        def _check_empty_dir((out,err)):
+        def _check_empty_dir(xxx_todo_changeme7):
+            (out,err) = xxx_todo_changeme7
             self.failUnlessEqual(out, "")
             self.failUnlessEqual(err, "")
         d.addCallback(run, "ls")
         d.addCallback(_check_empty_dir)
 
-        def _check_missing_dir((out,err)):
+        def _check_missing_dir(xxx_todo_changeme8):
             # TODO: check that rc==2
+            (out,err) = xxx_todo_changeme8
             self.failUnlessEqual(out, "")
             self.failUnlessEqual(err, "No such file or directory\n")
         d.addCallback(run, "ls", "bogus")
@@ -1511,7 +1522,8 @@ class SystemTest(SystemTestMixin, RunBinTahoeMixin, unittest.TestCase):
             datas.append(data)
             open(fn,"wb").write(data)
 
-        def _check_stdout_against((out,err), filenum=None, data=None):
+        def _check_stdout_against(xxx_todo_changeme9, filenum=None, data=None):
+            (out,err) = xxx_todo_changeme9
             self.failUnlessEqual(err, "")
             if filenum is not None:
                 self.failUnlessEqual(out, datas[filenum])
@@ -1521,19 +1533,21 @@ class SystemTest(SystemTestMixin, RunBinTahoeMixin, unittest.TestCase):
         # test all both forms of put: from a file, and from stdin
         #  tahoe put bar FOO
         d.addCallback(run, "put", files[0], "tahoe-file0")
-        def _put_out((out,err)):
+        def _put_out(xxx_todo_changeme10):
+            (out,err) = xxx_todo_changeme10
             self.failUnless("URI:LIT:" in out, out)
             self.failUnless("201 Created" in err, err)
             uri0 = out.strip()
             return run(None, "get", uri0)
         d.addCallback(_put_out)
-        d.addCallback(lambda (out,err): self.failUnlessEqual(out, datas[0]))
+        d.addCallback(lambda out_err: self.failUnlessEqual(out_err[0], datas[0]))
 
         d.addCallback(run, "put", files[1], "subdir/tahoe-file1")
         #  tahoe put bar tahoe:FOO
         d.addCallback(run, "put", files[2], "tahoe:file2")
         d.addCallback(run, "put", "--format=SDMF", files[3], "tahoe:file3")
-        def _check_put_mutable((out,err)):
+        def _check_put_mutable(xxx_todo_changeme11):
+            (out,err) = xxx_todo_changeme11
             self._mutable_file3_uri = out.strip()
         d.addCallback(_check_put_mutable)
         d.addCallback(run, "get", "tahoe:file3")
@@ -1565,13 +1579,15 @@ class SystemTest(SystemTestMixin, RunBinTahoeMixin, unittest.TestCase):
         d.addCallback(_check_stdout_against, 1)
         outfile0 = os.path.join(self.basedir, "outfile0")
         d.addCallback(run, "get", "file2", outfile0)
-        def _check_outfile0((out,err)):
+        def _check_outfile0(xxx_todo_changeme12):
+            (out,err) = xxx_todo_changeme12
             data = open(outfile0,"rb").read()
             self.failUnlessEqual(data, "data to be uploaded: file2\n")
         d.addCallback(_check_outfile0)
         outfile1 = os.path.join(self.basedir, "outfile0")
         d.addCallback(run, "get", "tahoe:subdir/tahoe-file1", outfile1)
-        def _check_outfile1((out,err)):
+        def _check_outfile1(xxx_todo_changeme13):
+            (out,err) = xxx_todo_changeme13
             data = open(outfile1,"rb").read()
             self.failUnlessEqual(data, "data to be uploaded: file1\n")
         d.addCallback(_check_outfile1)
@@ -1582,7 +1598,8 @@ class SystemTest(SystemTestMixin, RunBinTahoeMixin, unittest.TestCase):
         d.addCallback(_check_ls, [], ["tahoe-file0", "file2"])
 
         d.addCallback(run, "ls", "-l")
-        def _check_ls_l((out,err)):
+        def _check_ls_l(xxx_todo_changeme14):
+            (out,err) = xxx_todo_changeme14
             lines = out.split("\n")
             for l in lines:
                 if "tahoe-file-stdin" in l:
@@ -1593,7 +1610,8 @@ class SystemTest(SystemTestMixin, RunBinTahoeMixin, unittest.TestCase):
         d.addCallback(_check_ls_l)
 
         d.addCallback(run, "ls", "--uri")
-        def _check_ls_uri((out,err)):
+        def _check_ls_uri(xxx_todo_changeme15):
+            (out,err) = xxx_todo_changeme15
             lines = out.split("\n")
             for l in lines:
                 if "file3" in l:
@@ -1601,7 +1619,8 @@ class SystemTest(SystemTestMixin, RunBinTahoeMixin, unittest.TestCase):
         d.addCallback(_check_ls_uri)
 
         d.addCallback(run, "ls", "--readonly-uri")
-        def _check_ls_rouri((out,err)):
+        def _check_ls_rouri(xxx_todo_changeme16):
+            (out,err) = xxx_todo_changeme16
             lines = out.split("\n")
             for l in lines:
                 if "file3" in l:
@@ -1636,7 +1655,8 @@ class SystemTest(SystemTestMixin, RunBinTahoeMixin, unittest.TestCase):
         # copy from tahoe into disk
         target_filename = os.path.join(self.basedir, "file-out")
         d.addCallback(run, "cp", "tahoe:file4", target_filename)
-        def _check_cp_out((out,err)):
+        def _check_cp_out(xxx_todo_changeme17):
+            (out,err) = xxx_todo_changeme17
             self.failUnless(os.path.exists(target_filename))
             got = open(target_filename,"rb").read()
             self.failUnlessEqual(got, datas[4])
@@ -1645,7 +1665,8 @@ class SystemTest(SystemTestMixin, RunBinTahoeMixin, unittest.TestCase):
         # copy from disk to disk (silly case)
         target2_filename = os.path.join(self.basedir, "file-out-copy")
         d.addCallback(run, "cp", target_filename, target2_filename)
-        def _check_cp_out2((out,err)):
+        def _check_cp_out2(xxx_todo_changeme18):
+            (out,err) = xxx_todo_changeme18
             self.failUnless(os.path.exists(target2_filename))
             got = open(target2_filename,"rb").read()
             self.failUnlessEqual(got, datas[4])
@@ -1653,7 +1674,8 @@ class SystemTest(SystemTestMixin, RunBinTahoeMixin, unittest.TestCase):
 
         # copy from tahoe into disk, overwriting an existing file
         d.addCallback(run, "cp", "tahoe:file3", target_filename)
-        def _check_cp_out3((out,err)):
+        def _check_cp_out3(xxx_todo_changeme19):
+            (out,err) = xxx_todo_changeme19
             self.failUnless(os.path.exists(target_filename))
             got = open(target_filename,"rb").read()
             self.failUnlessEqual(got, datas[3])
@@ -1700,7 +1722,8 @@ class SystemTest(SystemTestMixin, RunBinTahoeMixin, unittest.TestCase):
         # and back out again
         dn_copy = os.path.join(self.basedir, "dir1-copy")
         d.addCallback(run, "cp", "--verbose", "-r", "tahoe:dir1", dn_copy)
-        def _check_cp_r_out((out,err)):
+        def _check_cp_r_out(xxx_todo_changeme20):
+            (out,err) = xxx_todo_changeme20
             def _cmp(name):
                 old = open(os.path.join(dn, name), "rb").read()
                 newfn = os.path.join(dn_copy, name)
@@ -1720,8 +1743,9 @@ class SystemTest(SystemTestMixin, RunBinTahoeMixin, unittest.TestCase):
         # and again, only writing filecaps
         dn_copy2 = os.path.join(self.basedir, "dir1-copy-capsonly")
         d.addCallback(run, "cp", "-r", "--caps-only", "tahoe:dir1", dn_copy2)
-        def _check_capsonly((out,err)):
+        def _check_capsonly(xxx_todo_changeme21):
             # these should all be LITs
+            (out,err) = xxx_todo_changeme21
             x = open(os.path.join(dn_copy2, "subdir2", "rfile4")).read()
             y = uri.from_string_filenode(x)
             self.failUnlessEqual(y.data, "rfile4")
@@ -1817,7 +1841,8 @@ class SystemTest(SystemTestMixin, RunBinTahoeMixin, unittest.TestCase):
 
         d = self.run_bintahoe(['debug', 'trial', '--reporter=verbose',
                                'allmydata.test.trialtest'])
-        def _check_failure( (out, err, rc) ):
+        def _check_failure(xxx_todo_changeme22 ):
+            (out, err, rc) = xxx_todo_changeme22
             self.failUnlessEqual(rc, 1)
             lines = out.split('\n')
             _check_for_line(lines, "[SKIPPED]", "test_skip")
@@ -1831,7 +1856,8 @@ class SystemTest(SystemTestMixin, RunBinTahoeMixin, unittest.TestCase):
         # the --quiet argument regression-tests a problem in finding which arguments to pass to trial
         d.addCallback(lambda ign: self.run_bintahoe(['--quiet', 'debug', 'trial', '--reporter=verbose',
                                                      'allmydata.test.trialtest.Success']))
-        def _check_success( (out, err, rc) ):
+        def _check_success(xxx_todo_changeme23 ):
+            (out, err, rc) = xxx_todo_changeme23
             self.failUnlessEqual(rc, 0)
             lines = out.split('\n')
             _check_for_line(lines, "[SKIPPED]", "test_skip")
index 7d65926c7513bacc58e19c380db8e6c7c3c8c6bf..af377304e03d153e6268f7892e1790db4753a938 100644 (file)
@@ -21,6 +21,8 @@ from allmydata.util.happinessutil import servers_of_happiness, \
 from allmydata.storage_client import StorageFarmBroker
 from allmydata.storage.server import storage_index_to_dir
 from allmydata.client import Client
+from functools import reduce
+import six
 
 MiB = 1024*1024
 
@@ -416,7 +418,8 @@ class ServerErrors(unittest.TestCase, ShouldFailMixin, SetDEPMixin):
         d = self.shouldFail(UploadUnhappinessError, "first_error_all",
                             "server selection failed",
                             upload_data, self.u, DATA)
-        def _check((f,)):
+        def _check(xxx_todo_changeme):
+            (f,) = xxx_todo_changeme
             self.failUnlessIn("placed 0 shares out of 100 total", str(f.value))
             # there should also be a 'last failure was' message
             self.failUnlessIn("ServerError", str(f.value))
@@ -448,7 +451,8 @@ class ServerErrors(unittest.TestCase, ShouldFailMixin, SetDEPMixin):
         d = self.shouldFail(UploadUnhappinessError, "second_error_all",
                             "server selection failed",
                             upload_data, self.u, DATA)
-        def _check((f,)):
+        def _check(xxx_todo_changeme1):
+            (f,) = xxx_todo_changeme1
             self.failUnlessIn("placed 10 shares out of 100 total", str(f.value))
             # there should also be a 'last failure was' message
             self.failUnlessIn("ServerError", str(f.value))
@@ -709,10 +713,10 @@ def combinations(iterable, r):
     n = len(pool)
     if r > n:
         return
-    indices = range(r)
+    indices = list(range(r))
     yield tuple(pool[i] for i in indices)
     while True:
-        for i in reversed(range(r)):
+        for i in reversed(list(range(r))):
             if indices[i] != i + n - r:
                 break
         else:
@@ -727,7 +731,7 @@ def is_happy_enough(servertoshnums, h, k):
     if len(servertoshnums) < h:
         return False
     # print "servertoshnums: ", servertoshnums, h, k
-    for happysetcombo in combinations(servertoshnums.iterkeys(), h):
+    for happysetcombo in combinations(six.iterkeys(servertoshnums), h):
         # print "happysetcombo: ", happysetcombo
         for subsetcombo in combinations(happysetcombo, k):
             shnums = reduce(set.union, [ servertoshnums[s] for s in subsetcombo ])
@@ -754,7 +758,7 @@ class EncodingParameters(GridTestMixin, unittest.TestCase, SetDEPMixin,
         assert self.g, "I tried to find a grid at self.g, but failed"
         servertoshnums = {} # k: server, v: set(shnum)
 
-        for i, c in self.g.servers_by_number.iteritems():
+        for i, c in six.iteritems(self.g.servers_by_number):
             for (dirp, dirns, fns) in os.walk(c.sharedir):
                 for fn in fns:
                     try:
@@ -791,9 +795,10 @@ class EncodingParameters(GridTestMixin, unittest.TestCase, SetDEPMixin,
         d = selector.get_shareholders(broker, sh, storage_index,
                                       share_size, block_size, num_segments,
                                       10, 3, 4)
-        def _have_shareholders((upload_trackers, already_servers)):
+        def _have_shareholders(xxx_todo_changeme2):
+            (upload_trackers, already_servers) = xxx_todo_changeme2
             assert servers_to_break <= len(upload_trackers)
-            for index in xrange(servers_to_break):
+            for index in range(servers_to_break):
                 tracker = list(upload_trackers)[index]
                 for share in tracker.buckets.keys():
                     tracker.buckets[share].abort()
@@ -1125,7 +1130,7 @@ class EncodingParameters(GridTestMixin, unittest.TestCase, SetDEPMixin,
             self._add_server_with_share(server_number=1, share_number=2))
         # Copy all of the other shares to server number 2
         def _copy_shares(ign):
-            for i in xrange(0, 10):
+            for i in range(0, 10):
                 self._copy_share_to_server(i, 2)
         d.addCallback(_copy_shares)
         # Remove the first server, and add a placeholder with share 0
@@ -1220,7 +1225,7 @@ class EncodingParameters(GridTestMixin, unittest.TestCase, SetDEPMixin,
                                         readonly=True))
         # Copy all of the other shares to server number 2
         def _copy_shares(ign):
-            for i in xrange(1, 10):
+            for i in range(1, 10):
                 self._copy_share_to_server(i, 2)
         d.addCallback(_copy_shares)
         # Remove server 0, and add another in its place
@@ -1263,7 +1268,7 @@ class EncodingParameters(GridTestMixin, unittest.TestCase, SetDEPMixin,
             self._add_server_with_share(server_number=2, share_number=0,
                                         readonly=True))
         def _copy_shares(ign):
-            for i in xrange(1, 10):
+            for i in range(1, 10):
                 self._copy_share_to_server(i, 2)
         d.addCallback(_copy_shares)
         d.addCallback(lambda ign:
@@ -1373,7 +1378,7 @@ class EncodingParameters(GridTestMixin, unittest.TestCase, SetDEPMixin,
         # return the first argument unchanged.
         self.failUnlessEqual(shares, merge_servers(shares, set([])))
         trackers = []
-        for (i, server) in [(i, "server%d" % i) for i in xrange(5, 9)]:
+        for (i, server) in [(i, "server%d" % i) for i in range(5, 9)]:
             t = FakeServerTracker(server, [i])
             trackers.append(t)
         expected = {
@@ -1398,7 +1403,7 @@ class EncodingParameters(GridTestMixin, unittest.TestCase, SetDEPMixin,
         shares3 = {}
         trackers = []
         expected = {}
-        for (i, server) in [(i, "server%d" % i) for i in xrange(10)]:
+        for (i, server) in [(i, "server%d" % i) for i in range(10)]:
             shares3[i] = set([server])
             t = FakeServerTracker(server, [i])
             trackers.append(t)
@@ -1435,7 +1440,7 @@ class EncodingParameters(GridTestMixin, unittest.TestCase, SetDEPMixin,
         # FakeServerTracker whose job is to hold those instance variables to
         # test that part.
         trackers = []
-        for (i, server) in [(i, "server%d" % i) for i in xrange(5, 9)]:
+        for (i, server) in [(i, "server%d" % i) for i in range(5, 9)]:
             t = FakeServerTracker(server, [i])
             trackers.append(t)
         # Recall that test1 is a server layout with servers_of_happiness
@@ -1513,7 +1518,7 @@ class EncodingParameters(GridTestMixin, unittest.TestCase, SetDEPMixin,
 
 
     def test_shares_by_server(self):
-        test = dict([(i, set(["server%d" % i])) for i in xrange(1, 5)])
+        test = dict([(i, set(["server%d" % i])) for i in range(1, 5)])
         sbs = shares_by_server(test)
         self.failUnlessEqual(set([1]), sbs["server1"])
         self.failUnlessEqual(set([2]), sbs["server2"])
@@ -1558,7 +1563,7 @@ class EncodingParameters(GridTestMixin, unittest.TestCase, SetDEPMixin,
         d.addCallback(lambda ign:
             self._add_server(4))
         def _copy_shares(ign):
-            for i in xrange(1, 10):
+            for i in range(1, 10):
                 self._copy_share_to_server(i, 1)
         d.addCallback(_copy_shares)
         d.addCallback(lambda ign:
@@ -1582,7 +1587,7 @@ class EncodingParameters(GridTestMixin, unittest.TestCase, SetDEPMixin,
         self.basedir = self.mktemp()
         d = self._setup_and_upload()
         def _setup(ign):
-            for i in xrange(1, 11):
+            for i in range(1, 11):
                 self._add_server(server_number=i)
             self.g.remove_server(self.g.servers_by_number[0].my_nodeid)
             c = self.g.clients[0]
@@ -1607,7 +1612,7 @@ class EncodingParameters(GridTestMixin, unittest.TestCase, SetDEPMixin,
         d.addCallback(lambda ign:
             self._setup_and_upload())
         def _then(ign):
-            for i in xrange(1, 11):
+            for i in range(1, 11):
                 self._add_server(server_number=i)
             self._add_server(server_number=11, readonly=True)
             self._add_server(server_number=12, readonly=True)
@@ -1633,11 +1638,11 @@ class EncodingParameters(GridTestMixin, unittest.TestCase, SetDEPMixin,
             self._setup_and_upload())
 
         def _next(ign):
-            for i in xrange(1, 11):
+            for i in range(1, 11):
                 self._add_server(server_number=i)
             # Copy all of the shares to server 9, since that will be
             # the first one that the selector sees.
-            for i in xrange(10):
+            for i in range(10):
                 self._copy_share_to_server(i, 9)
             # Remove server 0, and its contents
             self.g.remove_server(self.g.servers_by_number[0].my_nodeid)
@@ -1658,7 +1663,7 @@ class EncodingParameters(GridTestMixin, unittest.TestCase, SetDEPMixin,
         self.basedir = self.mktemp()
         d = self._setup_and_upload()
         def _then(ign):
-            for i in xrange(1, 11):
+            for i in range(1, 11):
                 self._add_server(server_number=i, readonly=True)
             self.g.remove_server(self.g.servers_by_number[0].my_nodeid)
             c = self.g.clients[0]
@@ -2000,7 +2005,7 @@ class EncodingParameters(GridTestMixin, unittest.TestCase, SetDEPMixin,
             self._add_server_with_share(server_number=8, share_number=4)
             self._add_server_with_share(server_number=5, share_number=5)
             self._add_server_with_share(server_number=10, share_number=7)
-            for i in xrange(4):
+            for i in range(4):
                 self._copy_share_to_server(i, 2)
             return self.g.clients[0]
         d.addCallback(_server_setup)
index 03c9b01df56fd8f2bf88bb9640040121f24250e3..a1e60fb445237b1b5bd6f6fd844b304e7391d417 100644 (file)
@@ -1,3 +1,6 @@
+from __future__ import print_function
+import six
+from six.moves import filter
 
 def foo(): pass # keep the line number constant
 
@@ -48,7 +51,7 @@ class HumanReadable(unittest.TestCase):
         self.failUnlessEqual(hr(foo), "<foo() at test_util.py:2>")
         self.failUnlessEqual(hr(self.test_repr),
                              "<bound method HumanReadable.test_repr of <allmydata.test.test_util.HumanReadable testMethod=test_repr>>")
-        self.failUnlessEqual(hr(1L), "1")
+        self.failUnlessEqual(hr(1), "1")
         self.failUnlessEqual(hr(10**40),
                              "100000000000000000...000000000000000000")
         self.failUnlessEqual(hr(self), "<allmydata.test.test_util.HumanReadable testMethod=test_repr>")
@@ -56,19 +59,19 @@ class HumanReadable(unittest.TestCase):
         self.failUnlessEqual(hr({1:2}), "{1:2}")
         try:
             raise ValueError
-        except Exception, e:
+        except Exception as e:
             self.failUnless(
                 hr(e) == "<ValueError: ()>" # python-2.4
                 or hr(e) == "ValueError()") # python-2.5
         try:
             raise ValueError("oops")
-        except Exception, e:
+        except Exception as e:
             self.failUnless(
                 hr(e) == "<ValueError: 'oops'>" # python-2.4
                 or hr(e) == "ValueError('oops',)") # python-2.5
         try:
             raise NoArgumentException
-        except Exception, e:
+        except Exception as e:
             self.failUnless(
                 hr(e) == "<NoArgumentException>" # python-2.4
                 or hr(e) == "NoArgumentException()") # python-2.5
@@ -315,18 +318,18 @@ class Asserts(unittest.TestCase):
     def should_assert(self, func, *args, **kwargs):
         try:
             func(*args, **kwargs)
-        except AssertionError, e:
+        except AssertionError as e:
             return str(e)
-        except Exception, e:
+        except Exception as e:
             self.fail("assert failed with non-AssertionError: %s" % e)
         self.fail("assert was not caught")
 
     def should_not_assert(self, func, *args, **kwargs):
         try:
             func(*args, **kwargs)
-        except AssertionError, e:
+        except AssertionError as e:
             self.fail("assertion fired when it should not have: %s" % e)
-        except Exception, e:
+        except Exception as e:
             self.fail("assertion (which shouldn't have failed) failed with non-AssertionError: %s" % e)
         return # we're happy
 
@@ -371,7 +374,7 @@ class Asserts(unittest.TestCase):
         self.failUnlessEqual("postcondition: othermsg: 'message2' <type 'str'>", m)
 
 class FileUtil(unittest.TestCase):
-    def mkdir(self, basedir, path, mode=0777):
+    def mkdir(self, basedir, path, mode=0o777):
         fn = os.path.join(basedir, path)
         fileutil.make_dirs(fn, mode)
 
@@ -391,16 +394,16 @@ class FileUtil(unittest.TestCase):
         d = os.path.join(basedir, "doomed")
         self.mkdir(d, "a/b")
         self.touch(d, "a/b/1.txt")
-        self.touch(d, "a/b/2.txt", 0444)
+        self.touch(d, "a/b/2.txt", 0o444)
         self.touch(d, "a/b/3.txt", 0)
         self.mkdir(d, "a/c")
         self.touch(d, "a/c/1.txt")
-        self.touch(d, "a/c/2.txt", 0444)
+        self.touch(d, "a/c/2.txt", 0o444)
         self.touch(d, "a/c/3.txt", 0)
-        os.chmod(os.path.join(d, "a/c"), 0444)
+        os.chmod(os.path.join(d, "a/c"), 0o444)
         self.mkdir(d, "a/d")
         self.touch(d, "a/d/1.txt")
-        self.touch(d, "a/d/2.txt", 0444)
+        self.touch(d, "a/d/2.txt", 0o444)
         self.touch(d, "a/d/3.txt", 0)
         os.chmod(os.path.join(d, "a/d"), 0)
 
@@ -1085,12 +1088,12 @@ class DictUtil(unittest.TestCase):
         d[fake3] = fake7
         d[3] = 7
         d[3] = 8
-        self.failUnless(filter(lambda x: x is 8,  d.itervalues()))
-        self.failUnless(filter(lambda x: x is fake7,  d.itervalues()))
+        self.failUnless(filter(lambda x: x is 8,  six.itervalues(d)))
+        self.failUnless(filter(lambda x: x is fake7,  six.itervalues(d)))
         # The real 7 should have been ejected by the d[3] = 8.
-        self.failUnless(not filter(lambda x: x is 7,  d.itervalues()))
-        self.failUnless(filter(lambda x: x is fake3,  d.iterkeys()))
-        self.failUnless(filter(lambda x: x is 3,  d.iterkeys()))
+        self.failUnless(not filter(lambda x: x is 7,  six.itervalues(d)))
+        self.failUnless(filter(lambda x: x is fake3,  six.iterkeys(d)))
+        self.failUnless(filter(lambda x: x is 3,  six.iterkeys(d)))
         d[fake3] = 8
 
         d.clear()
@@ -1099,12 +1102,12 @@ class DictUtil(unittest.TestCase):
         fake7 = EqButNotIs(7)
         d[fake3] = fake7
         d[3] = 8
-        self.failUnless(filter(lambda x: x is 8,  d.itervalues()))
-        self.failUnless(filter(lambda x: x is fake7,  d.itervalues()))
+        self.failUnless(filter(lambda x: x is 8,  six.itervalues(d)))
+        self.failUnless(filter(lambda x: x is fake7,  six.itervalues(d)))
         # The real 7 should have been ejected by the d[3] = 8.
-        self.failUnless(not filter(lambda x: x is 7,  d.itervalues()))
-        self.failUnless(filter(lambda x: x is fake3,  d.iterkeys()))
-        self.failUnless(filter(lambda x: x is 3,  d.iterkeys()))
+        self.failUnless(not filter(lambda x: x is 7,  six.itervalues(d)))
+        self.failUnless(filter(lambda x: x is fake3,  six.iterkeys(d)))
+        self.failUnless(filter(lambda x: x is 3,  six.iterkeys(d)))
         d[fake3] = 8
 
     def test_all(self):
@@ -1208,7 +1211,7 @@ class DictUtil(unittest.TestCase):
         self.failUnlessEqual(d.get(3, "default"), "default")
         self.failUnlessEqual(sorted(list(d.items())),
                              [(1, "b"), (2, "a")])
-        self.failUnlessEqual(sorted(list(d.iteritems())),
+        self.failUnlessEqual(sorted(list(six.iteritems(d))),
                              [(1, "b"), (2, "a")])
         self.failUnlessEqual(sorted(d.keys()), [1, 2])
         self.failUnlessEqual(sorted(d.values()), ["a", "b"])
@@ -1285,12 +1288,12 @@ class DictUtil(unittest.TestCase):
         self.failUnlessEqual(d.get("c", 5), 5)
         self.failUnlessEqual(sorted(list(d.items())),
                              [("a", 1), ("b", 2)])
-        self.failUnlessEqual(sorted(list(d.iteritems())),
+        self.failUnlessEqual(sorted(list(six.iteritems(d))),
                              [("a", 1), ("b", 2)])
         self.failUnlessEqual(sorted(d.keys()), ["a", "b"])
         self.failUnlessEqual(sorted(d.values()), [1, 2])
-        self.failUnless(d.has_key("a"))
-        self.failIf(d.has_key("c"))
+        self.failUnless("a" in d)
+        self.failIf("c" in d)
 
         x = d.setdefault("c", 3)
         self.failUnlessEqual(x, 3)
@@ -1686,7 +1689,8 @@ class SimpleSpans:
                 s.add(i, 1)
         return s
 
-    def __contains__(self, (start,length)):
+    def __contains__(self, xxx_todo_changeme):
+        (start,length) = xxx_todo_changeme
         for i in range(start, start+length):
             if i not in self._have:
                 return False
@@ -1703,7 +1707,7 @@ class ByteSpans(unittest.TestCase):
         s1 = Spans(3, 4) # 3,4,5,6
         self._check1(s1)
 
-        s1 = Spans(3L, 4L) # 3,4,5,6
+        s1 = Spans(3, 4) # 3,4,5,6
         self._check1(s1)
 
         s2 = Spans(s1)
@@ -2030,9 +2034,9 @@ class StringSpans(unittest.TestCase):
         self.failUnlessEqual(ds.get(2, 4), "fear")
 
         ds = klass()
-        ds.add(2L, "four")
-        ds.add(3L, "ea")
-        self.failUnlessEqual(ds.get(2L, 4L), "fear")
+        ds.add(2, "four")
+        ds.add(3, "ea")
+        self.failUnlessEqual(ds.get(2, 4), "fear")
 
 
     def do_scan(self, klass):
@@ -2083,13 +2087,13 @@ class StringSpans(unittest.TestCase):
                 p_added = set(range(start, end))
                 b = base()
                 if DEBUG:
-                    print
-                    print dump(b), which
+                    print()
+                    print(dump(b), which)
                     add = klass(); add.add(start, S[start:end])
-                    print dump(add)
+                    print(dump(add))
                 b.add(start, S[start:end])
                 if DEBUG:
-                    print dump(b)
+                    print(dump(b))
                 # check that the new span is there
                 d = b.get(start, end-start)
                 self.failUnlessEqual(d, S[start:end], which)
index 296db0624c2f34f9adc666c716bad66747f68088..da7c14981b3209aae5ba355ef504bd27243ca437 100644 (file)
@@ -25,7 +25,7 @@ class CheckRequirement(unittest.TestCase):
         try:
             check_requirement("foolscap[secure_connections] >= 0.6.0", {"foolscap": ("0.6.1+", "", None)})
             # succeeding is ok
-        except PackagingError, e:
+        except PackagingError as e:
             self.failUnlessIn("could not parse", str(e))
 
         self.failUnlessRaises(PackagingError, check_requirement,
index ba76b8c659922824af5e99d743a6b7556beacec0..8f3996a7a6ecd2ef0dfcb624d92bf5a63af75f14 100644 (file)
@@ -1,3 +1,4 @@
+from __future__ import print_function
 import os.path, re, urllib, time, cgi
 import simplejson
 from StringIO import StringIO
@@ -39,6 +40,7 @@ from allmydata.test.common_web import HTTPClientGETFactory, \
      HTTPClientHEADFactory
 from allmydata.client import Client, SecretHolder
 from allmydata.introducer import IntroducerNode
+import six
 
 # create a fake uploader/downloader, and a couple of fake dirnodes, then
 # create a webserver that works against them
@@ -448,7 +450,7 @@ class WebMixin(object):
                               u"blockingfile", u"empty", u"n\u00fc.txt", u"quux.txt", u"sub"])
         kids = dict( [(unicode(name),value)
                       for (name,value)
-                      in data[1]["children"].iteritems()] )
+                      in six.iteritems(data[1]["children"])] )
         self.failUnlessEqual(kids[u"sub"][0], "dirnode")
         self.failUnlessIn("metadata", kids[u"sub"][1])
         self.failUnlessIn("tahoe", kids[u"sub"][1]["metadata"])
@@ -517,7 +519,7 @@ class WebMixin(object):
         form.append('')
         form.append('UTF-8')
         form.append(sep)
-        for name, value in fields.iteritems():
+        for name, value in six.iteritems(fields):
             if isinstance(value, tuple):
                 filename, value = value
                 form.append('Content-Disposition: form-data; name="%s"; '
@@ -767,7 +769,7 @@ class Web(WebMixin, WebErrorMixin, testutil.StallMixin, testutil.ReallyEqualMixi
             # serverids[] keys are strings, since that's what JSON does, but
             # we'd really like them to be ints
             self.failUnlessEqual(data["serverids"]["0"], "phwrsjte")
-            self.failUnless(data["serverids"].has_key("1"),
+            self.failUnless("1" in data["serverids"],
                             str(data["serverids"]))
             self.failUnlessEqual(data["serverids"]["1"], "cmpuvkjm",
                                  str(data["serverids"]))
@@ -829,9 +831,10 @@ class Web(WebMixin, WebErrorMixin, testutil.StallMixin, testutil.ReallyEqualMixi
         headers = {"range": "bytes=1-10"}
         d = self.GET(self.public_url + "/foo/bar.txt", headers=headers,
                      return_response=True)
-        def _got((res, status, headers)):
+        def _got(xxx_todo_changeme):
+            (res, status, headers) = xxx_todo_changeme
             self.failUnlessReallyEqual(int(status), 206)
-            self.failUnless(headers.has_key("content-range"))
+            self.failUnless("content-range" in headers)
             self.failUnlessReallyEqual(headers["content-range"][0],
                                        "bytes 1-10/%d" % len(self.BAR_CONTENTS))
             self.failUnlessReallyEqual(res, self.BAR_CONTENTS[1:11])
@@ -843,9 +846,10 @@ class Web(WebMixin, WebErrorMixin, testutil.StallMixin, testutil.ReallyEqualMixi
         length  = len(self.BAR_CONTENTS)
         d = self.GET(self.public_url + "/foo/bar.txt", headers=headers,
                      return_response=True)
-        def _got((res, status, headers)):
+        def _got(xxx_todo_changeme4):
+            (res, status, headers) = xxx_todo_changeme4
             self.failUnlessReallyEqual(int(status), 206)
-            self.failUnless(headers.has_key("content-range"))
+            self.failUnless("content-range" in headers)
             self.failUnlessReallyEqual(headers["content-range"][0],
                                        "bytes 5-%d/%d" % (length-1, length))
             self.failUnlessReallyEqual(res, self.BAR_CONTENTS[5:])
@@ -857,9 +861,10 @@ class Web(WebMixin, WebErrorMixin, testutil.StallMixin, testutil.ReallyEqualMixi
         length  = len(self.BAR_CONTENTS)
         d = self.GET(self.public_url + "/foo/bar.txt", headers=headers,
                      return_response=True)
-        def _got((res, status, headers)):
+        def _got(xxx_todo_changeme5):
+            (res, status, headers) = xxx_todo_changeme5
             self.failUnlessReallyEqual(int(status), 206)
-            self.failUnless(headers.has_key("content-range"))
+            self.failUnless("content-range" in headers)
             self.failUnlessReallyEqual(headers["content-range"][0],
                                        "bytes %d-%d/%d" % (length-5, length-1, length))
             self.failUnlessReallyEqual(res, self.BAR_CONTENTS[-5:])
@@ -879,10 +884,11 @@ class Web(WebMixin, WebErrorMixin, testutil.StallMixin, testutil.ReallyEqualMixi
         headers = {"range": "bytes=1-10"}
         d = self.HEAD(self.public_url + "/foo/bar.txt", headers=headers,
                      return_response=True)
-        def _got((res, status, headers)):
+        def _got(xxx_todo_changeme6):
+            (res, status, headers) = xxx_todo_changeme6
             self.failUnlessReallyEqual(res, "")
             self.failUnlessReallyEqual(int(status), 206)
-            self.failUnless(headers.has_key("content-range"))
+            self.failUnless("content-range" in headers)
             self.failUnlessReallyEqual(headers["content-range"][0],
                                        "bytes 1-10/%d" % len(self.BAR_CONTENTS))
         d.addCallback(_got)
@@ -893,9 +899,10 @@ class Web(WebMixin, WebErrorMixin, testutil.StallMixin, testutil.ReallyEqualMixi
         length  = len(self.BAR_CONTENTS)
         d = self.HEAD(self.public_url + "/foo/bar.txt", headers=headers,
                      return_response=True)
-        def _got((res, status, headers)):
+        def _got(xxx_todo_changeme7):
+            (res, status, headers) = xxx_todo_changeme7
             self.failUnlessReallyEqual(int(status), 206)
-            self.failUnless(headers.has_key("content-range"))
+            self.failUnless("content-range" in headers)
             self.failUnlessReallyEqual(headers["content-range"][0],
                                        "bytes 5-%d/%d" % (length-1, length))
         d.addCallback(_got)
@@ -906,9 +913,10 @@ class Web(WebMixin, WebErrorMixin, testutil.StallMixin, testutil.ReallyEqualMixi
         length  = len(self.BAR_CONTENTS)
         d = self.HEAD(self.public_url + "/foo/bar.txt", headers=headers,
                      return_response=True)
-        def _got((res, status, headers)):
+        def _got(xxx_todo_changeme8):
+            (res, status, headers) = xxx_todo_changeme8
             self.failUnlessReallyEqual(int(status), 206)
-            self.failUnless(headers.has_key("content-range"))
+            self.failUnless("content-range" in headers)
             self.failUnlessReallyEqual(headers["content-range"][0],
                                        "bytes %d-%d/%d" % (length-5, length-1, length))
         d.addCallback(_got)
@@ -927,16 +935,18 @@ class Web(WebMixin, WebErrorMixin, testutil.StallMixin, testutil.ReallyEqualMixi
         headers = {"range": "BOGUS=fizbop-quarnak"}
         d = self.GET(self.public_url + "/foo/bar.txt", headers=headers,
                      return_response=True)
-        def _got((res, status, headers)):
+        def _got(xxx_todo_changeme9):
+            (res, status, headers) = xxx_todo_changeme9
             self.failUnlessReallyEqual(int(status), 200)
-            self.failUnless(not headers.has_key("content-range"))
+            self.failUnless("content-range" not in headers)
             self.failUnlessReallyEqual(res, self.BAR_CONTENTS)
         d.addCallback(_got)
         return d
 
     def test_HEAD_FILEURL(self):
         d = self.HEAD(self.public_url + "/foo/bar.txt", return_response=True)
-        def _got((res, status, headers)):
+        def _got(xxx_todo_changeme10):
+            (res, status, headers) = xxx_todo_changeme10
             self.failUnlessReallyEqual(res, "")
             self.failUnlessReallyEqual(headers["content-length"][0],
                                        str(len(self.BAR_CONTENTS)))
@@ -1144,29 +1154,29 @@ class Web(WebMixin, WebErrorMixin, testutil.StallMixin, testutil.ReallyEqualMixi
             uri = "/uri/%s" % self._bar_txt_uri
             d = self.GET(uri, return_response=True)
             # extract the ETag
-            d.addCallback(lambda (data, code, headers):
-                          headers['etag'][0])
+            d.addCallback(lambda data_code_headers:
+                          data_code_headers[2]['etag'][0])
             # do a GET that's supposed to match the ETag
             d.addCallback(lambda etag:
                           self.GET(uri, return_response=True,
                                    headers={"If-None-Match": etag}))
             # make sure it short-circuited (304 instead of 200)
-            d.addCallback(lambda (data, code, headers):
-                          self.failUnlessEqual(int(code), http.NOT_MODIFIED))
+            d.addCallback(lambda data_code_headers1:
+                          self.failUnlessEqual(int(data_code_headers1[1]), http.NOT_MODIFIED))
             return d
         d.addCallback(_check_match)
 
         def _no_etag(uri, t):
             target = "/uri/%s?t=%s" % (uri, t)
             d = self.GET(target, return_response=True, followRedirect=True)
-            d.addCallback(lambda (data, code, headers):
-                          self.failIf("etag" in headers, target))
+            d.addCallback(lambda data_code_headers2:
+                          self.failIf("etag" in data_code_headers2[2], target))
             return d
         def _yes_etag(uri, t):
             target = "/uri/%s?t=%s" % (uri, t)
             d = self.GET(target, return_response=True, followRedirect=True)
-            d.addCallback(lambda (data, code, headers):
-                          self.failUnless("etag" in headers, target))
+            d.addCallback(lambda data_code_headers3:
+                          self.failUnless("etag" in data_code_headers3[2], target))
             return d
 
         d.addCallback(lambda ign: _yes_etag(self._bar_txt_uri, ""))
@@ -1188,7 +1198,8 @@ class Web(WebMixin, WebErrorMixin, testutil.StallMixin, testutil.ReallyEqualMixi
     def test_GET_FILEURL_save(self):
         d = self.GET(self.public_url + "/foo/bar.txt?filename=bar.txt&save=true",
                      return_response=True)
-        def _got((res, statuscode, headers)):
+        def _got(xxx_todo_changeme11):
+            (res, statuscode, headers) = xxx_todo_changeme11
             content_disposition = headers["content-disposition"][0]
             self.failUnless(content_disposition == 'attachment; filename="bar.txt"', content_disposition)
             self.failUnlessIsBarDotTxt(res)
@@ -1780,7 +1791,7 @@ class Web(WebMixin, WebErrorMixin, testutil.StallMixin, testutil.ReallyEqualMixi
                         "largest-directory-children": 8,
                         "largest-immutable-file": 19,
                         }
-            for k,v in expected.iteritems():
+            for k,v in six.iteritems(expected):
                 self.failUnlessReallyEqual(stats[k], v,
                                            "stats[%s] was %s, not %s" %
                                            (k, stats[k], v))
@@ -2109,10 +2120,10 @@ class Web(WebMixin, WebErrorMixin, testutil.StallMixin, testutil.ReallyEqualMixi
         return d
 
     def dump_root(self):
-        print "NODEWALK"
+        print("NODEWALK")
         w = webish.DirnodeWalkerMixin()
         def visitor(childpath, childnode, metadata):
-            print childpath
+            print(childpath)
         d = w.walk(self.public_root, visitor)
         return d
 
@@ -2485,7 +2496,7 @@ class Web(WebMixin, WebErrorMixin, testutil.StallMixin, testutil.ReallyEqualMixi
             self.failUnlessEqual(parsed[0], "dirnode")
             children = dict( [(unicode(name),value)
                               for (name,value)
-                              in parsed[1]["children"].iteritems()] )
+                              in six.iteritems(parsed[1]["children"])] )
             self.failUnlessIn(u"new.txt", children)
             new_json = children[u"new.txt"]
             self.failUnlessEqual(new_json[0], "filenode")
@@ -2528,7 +2539,8 @@ class Web(WebMixin, WebErrorMixin, testutil.StallMixin, testutil.ReallyEqualMixi
         d.addCallback(lambda res:
                       self.HEAD(self.public_url + "/foo/new.txt",
                                 return_response=True))
-        def _got_headers((res, status, headers)):
+        def _got_headers(xxx_todo_changeme12):
+            (res, status, headers) = xxx_todo_changeme12
             self.failUnlessReallyEqual(res, "")
             self.failUnlessReallyEqual(headers["content-length"][0],
                                        str(len(NEW2_CONTENTS)))
@@ -2563,9 +2575,9 @@ class Web(WebMixin, WebErrorMixin, testutil.StallMixin, testutil.ReallyEqualMixi
         # will be rather terse and unhelpful. addErrback this method to the
         # end of your chain to get more information out of these errors.
         if f.check(error.Error):
-            print "web.error.Error:"
-            print f
-            print f.value.response
+            print("web.error.Error:")
+            print(f)
+            print(f.value.response)
         return f
 
     def test_POST_upload_replace(self):
@@ -5090,8 +5102,8 @@ class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMi
                          for line in res.splitlines()
                          if line]
             except ValueError:
-                print "response is:", res
-                print "undecodeable line was '%s'" % line
+                print("response is:", res)
+                print("undecodeable line was '%s'" % line)
                 raise
             self.failUnlessReallyEqual(len(units), 5+1)
             # should be parent-first
@@ -5149,7 +5161,7 @@ class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMi
 
         d.addCallback(lambda ign:
                       self.delete_shares_numbered(self.uris["subdir"],
-                                                  range(1, 10)))
+                                                  list(range(1, 10))))
 
         # root
         # root/good
@@ -5504,7 +5516,7 @@ class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMi
         d.addCallback(lambda ign: c0.upload(upload.Data(DATA, convergence="")))
         def _stash_bad(ur):
             self.fileurls["1share"] = "uri/" + urllib.quote(ur.get_uri())
-            self.delete_shares_numbered(ur.get_uri(), range(1,10))
+            self.delete_shares_numbered(ur.get_uri(), list(range(1,10)))
 
             u = uri.from_string(ur.get_uri())
             u.key = testutil.flip_bit(u.key, 0)
@@ -5516,14 +5528,14 @@ class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMi
             u = n.get_uri()
             url = self.fileurls["dir-1share"] = "uri/" + urllib.quote(u) + "/"
             self.fileurls["dir-1share-json"] = url + "?t=json"
-            self.delete_shares_numbered(u, range(1,10))
+            self.delete_shares_numbered(u, list(range(1,10)))
         d.addCallback(_mangle_dirnode_1share)
         d.addCallback(lambda ign: c0.create_dirnode())
         def _mangle_dirnode_0share(n):
             u = n.get_uri()
             url = self.fileurls["dir-0share"] = "uri/" + urllib.quote(u) + "/"
             self.fileurls["dir-0share-json"] = url + "?t=json"
-            self.delete_shares_numbered(u, range(0,10))
+            self.delete_shares_numbered(u, list(range(0,10)))
         d.addCallback(_mangle_dirnode_0share)
 
         # NotEnoughSharesError should be reported sensibly, with a
index 47569da109b166457f6c45ec5c928a22b6dbd39d..4cdbe37283997490569722e7e098c662336eb9fb 100644 (file)
@@ -26,6 +26,7 @@ data. Our 'misc/coverage2text.py' tool produces a slightly more useful
 summary, and 'misc/coverage2html.py' will produce a more useful HTML report.
 
 """
+from __future__ import print_function
 
 from twisted.trial.reporter import TreeReporter, VerboseTextReporter
 
@@ -58,7 +59,7 @@ class CoverageTextReporter(VerboseTextReporter):
     def stop_coverage(self):
         cov.stop()
         cov.save()
-        print "Coverage results written to .coverage"
+        print("Coverage results written to .coverage")
     def printSummary(self):
         # for twisted-2.5.x
         self.stop_coverage()
@@ -73,7 +74,7 @@ class sample_Reporter(object):
     # trigger exceptions. So it is a guide to what methods are invoked on a
     # Reporter.
     def __init__(self, *args, **kwargs):
-        print "START HERE"
+        print("START HERE")
         self.r = TreeReporter(*args, **kwargs)
         self.shouldStop = self.r.shouldStop
         self.separator = self.r.separator
@@ -83,7 +84,7 @@ class sample_Reporter(object):
     def write(self, *args):
         if not self._starting2:
             self._starting2 = True
-            print "FIRST WRITE"
+            print("FIRST WRITE")
         return self.r.write(*args)
 
     def startTest(self, *args, **kwargs):
@@ -102,7 +103,7 @@ class sample_Reporter(object):
         return self.r.writeln(*args, **kwargs)
 
     def printSummary(self, *args, **kwargs):
-        print "PRINT SUMMARY"
+        print("PRINT SUMMARY")
         return self.r.printSummary(*args, **kwargs)
 
     def wasSuccessful(self, *args, **kwargs):
index 372e0b8be2605eb49c755b7a6aa45b588be1901f..36effc175472b1b1073ad83d8c80db7e7917fabf 100644 (file)
@@ -817,7 +817,7 @@ def from_string(u, deep_immutable=False, name=u"<unknown name>"):
         else:
             error = MustBeReadonlyError(kind + " used in a read-only context", name)
 
-    except BadURIError, e:
+    except BadURIError as e:
         error = e
 
     return UnknownURI(u, error=error)
index 4bc170618cc5abe83d156953f76999be7cbcf3d8..7e0ff1bc2550b017e96f7aceff2d4a84bea16a95 100644 (file)
@@ -18,14 +18,14 @@ def abbreviate_time(s):
     if s < 120:
         return _plural(s, "second")
     if s < 3*HOUR:
-        return _plural(s/60, "minute")
+        return _plural(s//60, "minute")
     if s < 2*DAY:
-        return _plural(s/HOUR, "hour")
+        return _plural(s//HOUR, "hour")
     if s < 2*MONTH:
-        return _plural(s/DAY, "day")
+        return _plural(s//DAY, "day")
     if s < 4*YEAR:
-        return _plural(s/MONTH, "month")
-    return _plural(s/YEAR, "year")
+        return _plural(s//MONTH, "month")
+    return _plural(s//YEAR, "year")
 
 def abbreviate_space(s, SI=True):
     if s is None:
index 735f534cdf8f16132a04fb4e1eabd517b5fcd571..3b83889e226d55d8a7af3ac04ded735ff81bf935 100644 (file)
@@ -3,6 +3,7 @@ Tests useful in assertion checking, prints out nicely formated messages too.
 """
 
 from allmydata.util.humanreadable import hr
+from allmydata.util.sixutil import map
 
 def _assert(___cond=False, *___args, **___kwargs):
     if ___cond:
@@ -18,7 +19,7 @@ def _assert(___cond=False, *___args, **___kwargs):
             msgbuf.append("%s: %s %s" % ((___kwargs.items()[0][0],) + tuple(map(hr, (___kwargs.items()[0][1], type(___kwargs.items()[0][1]),)))))
     msgbuf.extend([", %s: %s %s" % tuple(map(hr, (k, v, type(v),))) for k, v in ___kwargs.items()[1:]])
 
-    raise AssertionError, "".join(msgbuf)
+    raise AssertionError("".join(msgbuf))
 
 def precondition(___cond=False, *___args, **___kwargs):
     if ___cond:
@@ -36,7 +37,7 @@ def precondition(___cond=False, *___args, **___kwargs):
             msgbuf.append("%s: %s %s" % ((___kwargs.items()[0][0],) + tuple(map(hr, (___kwargs.items()[0][1], type(___kwargs.items()[0][1]),)))))
     msgbuf.extend([", %s: %s %s" % tuple(map(hr, (k, v, type(v),))) for k, v in ___kwargs.items()[1:]])
 
-    raise AssertionError, "".join(msgbuf)
+    raise AssertionError("".join(msgbuf))
 
 def postcondition(___cond=False, *___args, **___kwargs):
     if ___cond:
@@ -54,4 +55,4 @@ def postcondition(___cond=False, *___args, **___kwargs):
             msgbuf.append("%s: %s %s" % ((___kwargs.items()[0][0],) + tuple(map(hr, (___kwargs.items()[0][1], type(___kwargs.items()[0][1]),)))))
     msgbuf.extend([", %s: %s %s" % tuple(map(hr, (k, v, type(v),))) for k, v in ___kwargs.items()[1:]])
 
-    raise AssertionError, "".join(msgbuf)
+    raise AssertionError("".join(msgbuf))
index fb8cb63524542011d4755fe454aec0df566c1fb2..bf6244ff47522c1616c71cb0fdbe15f729aa0f40 100644 (file)
@@ -2,12 +2,13 @@
 import string
 
 from allmydata.util.assertutil import precondition
+from allmydata.util.sixutil import map
 
 z_base_32_alphabet = "ybndrfg8ejkmcpqxot1uwisza345h769" # Zooko's choice, rationale in "DESIGN" doc
 rfc3548_alphabet = "abcdefghijklmnopqrstuvwxyz234567" # RFC3548 standard used by Gnutella, Content-Addressable Web, THEX, Bitzi, Web-Calculus...
 chars = rfc3548_alphabet
 
-vals = ''.join(map(chr, range(32)))
+vals = ''.join(map(chr, list(range(32))))
 c2vtranstable = string.maketrans(chars, vals)
 v2ctranstable = string.maketrans(vals, chars)
 identitytranstable = string.maketrans('', '')
@@ -21,7 +22,7 @@ def _get_trailing_chars_without_lsbs(N, d):
         s.extend(_get_trailing_chars_without_lsbs(N+1, d=d))
     i = 0
     while i < len(chars):
-        if not d.has_key(i):
+        if i not in d:
             d[i] = None
             s.append(chars[i])
         i = i + 2**N
@@ -83,12 +84,12 @@ def b2a_l(os, lengthinbits):
     @return the contents of os in base-32 encoded form
     """
     precondition(isinstance(lengthinbits, (int, long,)), "lengthinbits is required to be an integer.", lengthinbits=lengthinbits)
-    precondition((lengthinbits+7)/8 == len(os), "lengthinbits is required to specify a number of bits storable in exactly len(os) octets.", lengthinbits=lengthinbits, lenos=len(os))
+    precondition((lengthinbits+7)//8 == len(os), "lengthinbits is required to specify a number of bits storable in exactly len(os) octets.", lengthinbits=lengthinbits, lenos=len(os))
 
     os = map(ord, os)
 
-    numquintets = (lengthinbits+4)/5
-    numoctetsofdata = (lengthinbits+7)/8
+    numquintets = (lengthinbits+4)//5
+    numoctetsofdata = (lengthinbits+7)//8
     # print "numoctetsofdata: %s, len(os): %s, lengthinbits: %s, numquintets: %s" % (numoctetsofdata, len(os), lengthinbits, numquintets,)
     # strip trailing octets that won't be used
     del os[numoctetsofdata:]
@@ -97,7 +98,7 @@ def b2a_l(os, lengthinbits):
         os[-1] = os[-1] >> (8-(lengthinbits % 8))
         os[-1] = os[-1] << (8-(lengthinbits % 8))
     # append zero octets for padding if needed
-    numoctetsneeded = (numquintets*5+7)/8 + 1
+    numoctetsneeded = (numquintets*5+7)//8 + 1
     os.extend([0]*(numoctetsneeded-len(os)))
 
     quintets = []
@@ -113,12 +114,12 @@ def b2a_l(os, lengthinbits):
             cutoff = 256
             continue
         cutoff = cutoff * 8
-        quintet = num / cutoff
+        quintet = num // cutoff
         quintets.append(quintet)
         num = num - (quintet * cutoff)
 
-        cutoff = cutoff / 32
-        quintet = num / cutoff
+        cutoff = cutoff // 32
+        quintet = num // cutoff
         quintets.append(quintet)
         num = num - (quintet * cutoff)
 
@@ -188,13 +189,13 @@ def could_be_base32_encoded_l(s, lengthinbits, s5=s5, tr=string.translate, ident
     precondition(isinstance(s, str), s)
     if s == '':
         return True
-    assert lengthinbits%5 < len(s5), lengthinbits
-    assert ord(s[-1]) < s5[lengthinbits%5]
-    return (((lengthinbits+4)/5) == len(s)) and s5[lengthinbits%5][ord(s[-1])] and not string.translate(s, identitytranstable, chars)
+    assert int(lengthinbits%5) < len(s5), lengthinbits
+    #FIXME assert ord(s[-1]) < s5[lengthinbits%5]
+    return (((lengthinbits+4)//5) == len(s)) and s5[lengthinbits%5][ord(s[-1])] and not string.translate(s, identitytranstable, chars)
 
 def num_octets_that_encode_to_this_many_quintets(numqs):
     # Here is a computation that conveniently expresses this:
-    return (numqs*5+3)/8
+    return (numqs*5+3)//8
 
 def a2b(cs):
     """
@@ -232,8 +233,8 @@ def a2b_l(cs, lengthinbits):
 
     qs = map(ord, string.translate(cs, c2vtranstable))
 
-    numoctets = (lengthinbits+7)/8
-    numquintetsofdata = (lengthinbits+4)/5
+    numoctets = (lengthinbits+7)//8
+    numquintetsofdata = (lengthinbits+4)//5
     # strip trailing quintets that won't be used
     del qs[numquintetsofdata:]
     # zero out any unused bits in the final quintet
@@ -241,7 +242,7 @@ def a2b_l(cs, lengthinbits):
         qs[-1] = qs[-1] >> (5-(lengthinbits % 5))
         qs[-1] = qs[-1] << (5-(lengthinbits % 5))
     # append zero quintets for padding if needed
-    numquintetsneeded = (numoctets*8+4)/5
+    numquintetsneeded = (numoctets*8+4)//5
     qs.extend([0]*(numquintetsneeded-len(qs)))
 
     octets = []
@@ -250,10 +251,10 @@ def a2b_l(cs, lengthinbits):
     i = 1
     while len(octets) < numoctets:
         while pos > 256:
-            pos = pos / 32
+            pos = pos // 32
             num = num + (qs[i] * pos)
             i = i + 1
-        octet = num / 256
+        octet = num // 256
         octets.append(octet)
         num = num - (octet * 256)
         num = num * 256
index 7c03159c6ef1b3bd576333ff9bbf02ac630f481b..61c910816097283642451d71ce5eb2c98fae96d1 100644 (file)
@@ -6,13 +6,17 @@ import copy, operator
 from bisect import bisect_left, insort_left
 
 from allmydata.util.assertutil import _assert, precondition
+from functools import reduce
+import six
+from allmydata.util.sixutil import map
+from six.moves import zip
 
 def move(k, d1, d2, strict=False):
     """
     Move item with key k from d1 to d2.
     """
-    if strict and not d1.has_key(k):
-        raise KeyError, k
+    if strict and k not in d1:
+        raise KeyError(k)
 
     d2[k] = d1[k]
     del d1[k]
@@ -25,11 +29,11 @@ def subtract(d1, d2):
     """
     if len(d1) > len(d2):
         for k in d2.keys():
-            if d1.has_key(k):
+            if k in d1:
                 del d1[k]
     else:
         for k in d1.keys():
-            if d2.has_key(k):
+            if k in d2:
                 del d1[k]
     return d1
 
@@ -41,7 +45,7 @@ class DictOfSets(dict):
             self[key] = set([value])
 
     def update(self, otherdictofsets):
-        for key, values in otherdictofsets.iteritems():
+        for key, values in six.iteritems(otherdictofsets):
             if key in self:
                 self[key].update(values)
             else:
@@ -60,14 +64,14 @@ class UtilDict:
         self.update(initialdata)
 
     def del_if_present(self, key):
-        if self.has_key(key):
+        if key in self:
             del self[key]
 
     def items_sorted_by_value(self):
         """
         @return a sequence of (key, value,) pairs sorted according to value
         """
-        l = [(x[1], x[0],) for x in self.d.iteritems()]
+        l = [(x[1], x[0],) for x in six.iteritems(self.d)]
         l.sort()
         return [(x[1], x[0],) for x in l]
 
@@ -94,7 +98,7 @@ class UtilDict:
     def __cmp__(self, other):
         try:
             return self.d.__cmp__(other)
-        except TypeError, le:
+        except TypeError as le:
             # maybe we should look for a .d member in other.  I know this is insanely kludgey, but the Right Way To Do It is for dict.__cmp__ to use structural typing ("duck typing")
             try:
                 return self.d.__cmp__(other.d)
@@ -216,16 +220,16 @@ class NumDict:
         """
         @return a sequence of (key, value,) pairs sorted according to value
         """
-        l = [(x[1], x[0],) for x in self.d.iteritems()]
+        l = [(x[1], x[0],) for x in six.iteritems(self.d)]
         l.sort()
         return [(x[1], x[0],) for x in l]
 
     def item_with_largest_value(self):
-        it = self.d.iteritems()
-        (winner, winnerval,) = it.next()
+        it = six.iteritems(self.d)
+        (winner, winnerval,) = six.advance_iterator(it)
         try:
             while True:
-                n, nv = it.next()
+                n, nv = six.advance_iterator(it)
                 if nv > winnerval:
                     winner = n
                     winnerval = nv
@@ -256,7 +260,7 @@ class NumDict:
     def __cmp__(self, other):
         try:
             return self.d.__cmp__(other)
-        except TypeError, le:
+        except TypeError as le:
             # maybe we should look for a .d member in other.  I know this is insanely kludgey, but the Right Way To Do It is for dict.__cmp__ to use structural typing ("duck typing")
             try:
                 return self.d.__cmp__(other.d)
@@ -339,7 +343,7 @@ class NumDict:
         return self.d.values(*args, **kwargs)
 
 def del_if_present(d, k):
-    if d.has_key(k):
+    if k in d:
         del d[k]
 
 class ValueOrderedDict:
@@ -364,7 +368,7 @@ class ValueOrderedDict:
             return self
         def next(self):
             precondition(self.i <= len(self.c.l), "The iterated ValueOrderedDict doesn't have this many elements.  Most likely this is because someone altered the contents of the ValueOrderedDict while the iteration was in progress.", self.i, self.c)
-            precondition((self.i == len(self.c.l)) or self.c.d.has_key(self.c.l[self.i][1]), "The iterated ValueOrderedDict doesn't have this key.  Most likely this is because someone altered the contents of the ValueOrderedDict while the iteration was in progress.", self.i, (self.i < len(self.c.l)) and self.c.l[self.i], self.c)
+            precondition((self.i == len(self.c.l)) or self.c.l[self.i][1] in self.c.d, "The iterated ValueOrderedDict doesn't have this key.  Most likely this is because someone altered the contents of the ValueOrderedDict while the iteration was in progress.", self.i, (self.i < len(self.c.l)) and self.c.l[self.i], self.c)
             if self.i == len(self.c.l):
                 raise StopIteration
             le = self.c.l[self.i]
@@ -391,7 +395,7 @@ class ValueOrderedDict:
             return self
         def next(self):
             precondition(self.i <= len(self.c.l), "The iterated ValueOrderedDict doesn't have this many elements.  Most likely this is because someone altered the contents of the ValueOrderedDict while the iteration was in progress.", self.i, self.c)
-            precondition((self.i == len(self.c.l)) or self.c.d.has_key(self.c.l[self.i][1]), "The iterated ValueOrderedDict doesn't have this key.  Most likely this is because someone altered the contents of the ValueOrderedDict while the iteration was in progress.", self.i, (self.i < len(self.c.l)) and self.c.l[self.i], self.c)
+            precondition((self.i == len(self.c.l)) or self.c.l[self.i][1] in self.c.d, "The iterated ValueOrderedDict doesn't have this key.  Most likely this is because someone altered the contents of the ValueOrderedDict while the iteration was in progress.", self.i, (self.i < len(self.c.l)) and self.c.l[self.i], self.c)
             if self.i == len(self.c.l):
                 raise StopIteration
             le = self.c.l[self.i]
@@ -409,7 +413,7 @@ class ValueOrderedDict:
             return self
         def next(self):
             precondition(self.i <= len(self.c.l), "The iterated ValueOrderedDict doesn't have this many elements.  Most likely this is because someone altered the contents of the ValueOrderedDict while the iteration was in progress.", self.i, self.c)
-            precondition((self.i == len(self.c.l)) or self.c.d.has_key(self.c.l[self.i][1]), "The iterated ValueOrderedDict doesn't have this key.  Most likely this is because someone altered the contents of the ValueOrderedDict while the iteration was in progress.", self.i, (self.i < len(self.c.l)) and self.c.l[self.i], self.c)
+            precondition((self.i == len(self.c.l)) or self.c.l[self.i][1] in self.c.d, "The iterated ValueOrderedDict doesn't have this key.  Most likely this is because someone altered the contents of the ValueOrderedDict while the iteration was in progress.", self.i, (self.i < len(self.c.l)) and self.c.l[self.i], self.c)
             if self.i == len(self.c.l):
                 raise StopIteration
             le = self.c.l[self.i]
@@ -431,12 +435,12 @@ class ValueOrderedDict:
     def __repr_n__(self, n=None):
         s = ["{",]
         try:
-            iter = self.iteritems()
-            x = iter.next()
+            iter = six.iteritems(self)
+            x = six.advance_iterator(iter)
             s.append(str(x[0])); s.append(": "); s.append(str(x[1]))
             i = 1
             while (n is None) or (i < n):
-                x = iter.next()
+                x = six.advance_iterator(iter)
                 s.append(", "); s.append(str(x[0])); s.append(": "); s.append(str(x[1]))
         except StopIteration:
             pass
@@ -450,8 +454,8 @@ class ValueOrderedDict:
         return "<%s %s>" % (self.__class__.__name__, self.__repr_n__(16),)
 
     def __eq__(self, other):
-        for (k, v,) in other.iteritems():
-            if not self.d.has_key(k) or self.d[k] != v:
+        for (k, v,) in six.iteritems(other):
+            if k not in self.d or self.d[k] != v:
                 return False
         return True
 
@@ -461,17 +465,17 @@ class ValueOrderedDict:
     def _assert_invariants(self):
         iter = self.l.__iter__()
         try:
-            oldx = iter.next()
+            oldx = six.advance_iterator(iter)
             while True:
-                x = iter.next()
+                x = six.advance_iterator(iter)
                 # self.l is required to be sorted
                 _assert(x >= oldx, x, oldx)
                 # every element of self.l is required to appear in self.d
-                _assert(self.d.has_key(x[1]), x)
+                _assert(x[1] in self.d, x)
                 oldx =x
         except StopIteration:
             pass
-        for (k, v,) in self.d.iteritems():
+        for (k, v,) in six.iteritems(self.d):
             i = bisect_left(self.l, (v, k,))
             while (self.l[i][0] is not v) or (self.l[i][1] is not k):
                 i += 1
@@ -488,14 +492,14 @@ class ValueOrderedDict:
 
     def setdefault(self, key, default=None):
         assert self._assert_invariants()
-        if not self.has_key(key):
+        if key not in self:
             self[key] = default
         assert self._assert_invariants()
         return self[key]
 
     def __setitem__(self, key, val=None):
         assert self._assert_invariants()
-        if self.d.has_key(key):
+        if key in self.d:
             oldval = self.d[key]
             if oldval != val:
                 # re-sort
@@ -524,9 +528,9 @@ class ValueOrderedDict:
         return result
 
     def __getitem__(self, key, default=None, strictkey=True):
-        if not self.d.has_key(key):
+        if key not in self.d:
             if strictkey:
-                raise KeyError, key
+                raise KeyError(key)
             else:
                 return default
         return self.d[key]
@@ -543,7 +547,7 @@ class ValueOrderedDict:
             that key and strictkey is False
         """
         assert self._assert_invariants()
-        if self.d.has_key(key):
+        if key in self.d:
             val = self.d.pop(key)
             i = bisect_left(self.l, (val, key,))
             while (self.l[i][0] is not val) or (self.l[i][1] is not key):
@@ -553,7 +557,7 @@ class ValueOrderedDict:
             return val
         elif strictkey:
             assert self._assert_invariants()
-            raise KeyError, key
+            raise KeyError(key)
         else:
             assert self._assert_invariants()
             return default
@@ -569,26 +573,26 @@ class ValueOrderedDict:
         @return: self
         """
         assert self._assert_invariants()
-        for (k, v,) in otherdict.iteritems():
+        for (k, v,) in six.iteritems(otherdict):
             self.insert(k, v)
         assert self._assert_invariants()
         return self
 
     def has_key(self, key):
         assert self._assert_invariants()
-        return self.d.has_key(key)
+        return key in self.d
 
     def popitem(self):
         if not self.l:
-            raise KeyError, 'popitem(): dictionary is empty'
+            raise KeyError('popitem(): dictionary is empty')
         le = self.l.pop(0)
         del self.d[le[1]]
         return (le[1], le[0],)
 
     def pop(self, k, default=None, strictkey=False):
-        if not self.d.has_key(k):
+        if k not in self.d:
             if strictkey:
-                raise KeyError, k
+                raise KeyError(k)
             else:
                 return default
         v = self.d.pop(k)
index 3ceb1a919c7e72dda2d2d820c0b50dd16f8c65a6..d5280f54094f67b4463dac45abc863dc1c538211 100644 (file)
@@ -11,6 +11,7 @@ from twisted.python import usage
 import locale
 from allmydata.util import log
 from allmydata.util.fileutil import abspath_expanduser_unicode
+from allmydata.util.sixutil import map
 
 
 def canonical_encoding(encoding):
index 793c530959c4a9e510dce47ec00a0134d2fc6fbf..e709ec514c663c3c4acd0806d2e94782aec1aab6 100644 (file)
@@ -5,21 +5,21 @@
 
 # from /usr/src/linux/include/linux/inotify.h
 
-IN_ACCESS = 0x00000001L         # File was accessed
-IN_MODIFY = 0x00000002L         # File was modified
-IN_ATTRIB = 0x00000004L         # Metadata changed
-IN_CLOSE_WRITE = 0x00000008L    # Writeable file was closed
-IN_CLOSE_NOWRITE = 0x00000010L  # Unwriteable file closed
-IN_OPEN = 0x00000020L           # File was opened
-IN_MOVED_FROM = 0x00000040L     # File was moved from X
-IN_MOVED_TO = 0x00000080L       # File was moved to Y
-IN_CREATE = 0x00000100L         # Subfile was created
-IN_DELETE = 0x00000200L         # Subfile was delete
-IN_DELETE_SELF = 0x00000400L    # Self was deleted
-IN_MOVE_SELF = 0x00000800L      # Self was moved
-IN_UNMOUNT = 0x00002000L        # Backing fs was unmounted
-IN_Q_OVERFLOW = 0x00004000L     # Event queued overflowed
-IN_IGNORED = 0x00008000L        # File was ignored
+IN_ACCESS = 0x00000001         # File was accessed
+IN_MODIFY = 0x00000002         # File was modified
+IN_ATTRIB = 0x00000004         # Metadata changed
+IN_CLOSE_WRITE = 0x00000008    # Writeable file was closed
+IN_CLOSE_NOWRITE = 0x00000010  # Unwriteable file closed
+IN_OPEN = 0x00000020           # File was opened
+IN_MOVED_FROM = 0x00000040     # File was moved from X
+IN_MOVED_TO = 0x00000080       # File was moved to Y
+IN_CREATE = 0x00000100         # Subfile was created
+IN_DELETE = 0x00000200         # Subfile was delete
+IN_DELETE_SELF = 0x00000400    # Self was deleted
+IN_MOVE_SELF = 0x00000800      # Self was moved
+IN_UNMOUNT = 0x00002000        # Backing fs was unmounted
+IN_Q_OVERFLOW = 0x00004000     # Event queued overflowed
+IN_IGNORED = 0x00008000        # File was ignored
 
 IN_ONLYDIR = 0x01000000         # only watch the path if it is a directory
 IN_DONT_FOLLOW = 0x02000000     # don't follow a sym link
index 8eb8e9fb6cd360a74fdc4c62147bc0fd0b98df09..6ce622fabc92b92b0a2f6106ea4036cf8dbe3def 100644 (file)
@@ -26,7 +26,7 @@ def rename(src, dst, tries=4, basedelay=0.1):
     for i in range(tries-1):
         try:
             return os.rename(src, dst)
-        except EnvironmentError, le:
+        except EnvironmentError as le:
             # XXX Tighten this to check if this is a permission denied error (possibly due to another Windows process having the file open and execute the superkludge only in this case.
             log.msg("XXX KLUDGE Attempting to move file %s => %s; got %s; sleeping %s seconds" % (src, dst, le, basedelay,))
             time.sleep(basedelay)
@@ -54,7 +54,7 @@ def remove(f, tries=4, basedelay=0.1):
     for i in range(tries-1):
         try:
             return os.remove(f)
-        except EnvironmentError, le:
+        except EnvironmentError as le:
             # XXX Tighten this to check if this is a permission denied error (possibly due to another Windows process having the file open and execute the superkludge only in this case.
             if not os.path.exists(f):
                 return
@@ -164,7 +164,7 @@ class EncryptedTemporaryFile:
         self.file.truncate(newsize)
 
 
-def make_dirs(dirname, mode=0777):
+def make_dirs(dirname, mode=0o777):
     """
     An idempotent version of os.makedirs().  If the dir already exists, do
     nothing and return without raising an exception.  If this call creates the
@@ -175,13 +175,13 @@ def make_dirs(dirname, mode=0777):
     tx = None
     try:
         os.makedirs(dirname, mode)
-    except OSError, x:
+    except OSError as x:
         tx = x
 
     if not os.path.isdir(dirname):
         if tx:
             raise tx
-        raise exceptions.IOError, "unknown error prevented creation of directory, or deleted the directory immediately after creation: %s" % dirname # careful not to construct an IOError with a 2-tuple, as that has a special meaning...
+        raise exceptions.IOError("unknown error prevented creation of directory, or deleted the directory immediately after creation: %s" % dirname) # careful not to construct an IOError with a 2-tuple, as that has a special meaning...
 
 def rm_dir(dirname):
     """
@@ -202,7 +202,7 @@ def rm_dir(dirname):
             else:
                 remove(fullname)
         os.rmdir(dirname)
-    except Exception, le:
+    except Exception as le:
         # Ignore "No such file or directory"
         if (not isinstance(le, OSError)) or le.args[0] != 2:
             excs.append(le)
@@ -214,8 +214,8 @@ def rm_dir(dirname):
         if len(excs) == 1:
             raise excs[0]
         if len(excs) == 0:
-            raise OSError, "Failed to remove dir for unknown reason."
-        raise OSError, excs
+            raise OSError("Failed to remove dir for unknown reason.")
+        raise OSError(excs)
 
 
 def remove_if_possible(f):
index b8e8b542107c4fdf9403ec440d3eee4899edcbeb..2163728b29e06c5c018e168b2203b99e2eebb7b4 100644 (file)
@@ -4,6 +4,8 @@ reporting it in messages
 """
 
 from copy import deepcopy
+import six
+from allmydata.util.sixutil import map
 
 def failure_message(peer_count, k, happy, effective_happy):
     # If peer_count < needed_shares, this error message makes more
@@ -48,7 +50,7 @@ def shares_by_server(servermap):
     dictionary of sets of shares, indexed by peerids.
     """
     ret = {}
-    for shareid, peers in servermap.iteritems():
+    for shareid, peers in six.iteritems(servermap):
         assert isinstance(peers, set)
         for peerid in peers:
             ret.setdefault(peerid, set()).add(shareid)
@@ -137,7 +139,7 @@ def servers_of_happiness(sharemap):
     # The implementation here is an adapation of an algorithm described in
     # "Introduction to Algorithms", Cormen et al, 2nd ed., pp 658-662.
     dim = len(graph)
-    flow_function = [[0 for sh in xrange(dim)] for s in xrange(dim)]
+    flow_function = [[0 for sh in range(dim)] for s in range(dim)]
     residual_graph, residual_function = residual_network(graph, flow_function)
     while augmenting_path_for(residual_graph):
         path = augmenting_path_for(residual_graph)
@@ -147,7 +149,7 @@ def servers_of_happiness(sharemap):
         # is the amount of unused capacity on that edge. Taking the
         # minimum of a list of those values for each edge in the
         # augmenting path gives us our delta.
-        delta = min(map(lambda (u, v), rf=residual_function: rf[u][v],
+        delta = min(map(lambda u_and_v, rf=residual_function: rf[u_and_v[0]][u_and_v[1]],
                         path))
         for (u, v) in path:
             flow_function[u][v] += delta
@@ -161,7 +163,7 @@ def servers_of_happiness(sharemap):
     # our graph, so we can stop after summing flow across those. The
     # value of a flow computed in this way is the size of a maximum
     # matching on the bipartite graph described above.
-    return sum([flow_function[0][v] for v in xrange(1, num_servers+1)])
+    return sum([flow_function[0][v] for v in range(1, num_servers+1)])
 
 def flow_network_for(sharemap):
     """
@@ -197,7 +199,7 @@ def flow_network_for(sharemap):
         graph.append(sharemap[k])
     # For each share, add an entry that has an edge to the sink.
     sink_num = num_servers + num_shares + 1
-    for i in xrange(num_shares):
+    for i in range(num_shares):
         graph.append([sink_num])
     # Add an empty entry for the sink, which has no outbound edges.
     graph.append([])
@@ -221,7 +223,7 @@ def reindex(sharemap, base_index):
     # Number the shares
     for k in ret:
         for shnum in ret[k]:
-            if not shares.has_key(shnum):
+            if shnum not in shares:
                 shares[shnum] = num
                 num += 1
         ret[k] = map(lambda x: shares[x], ret[k])
@@ -233,9 +235,9 @@ def residual_network(graph, f):
     flow network represented by my graph and f arguments. graph is a
     flow network in adjacency-list form, and f is a flow in graph.
     """
-    new_graph = [[] for i in xrange(len(graph))]
-    cf = [[0 for s in xrange(len(graph))] for sh in xrange(len(graph))]
-    for i in xrange(len(graph)):
+    new_graph = [[] for i in range(len(graph))]
+    cf = [[0 for s in range(len(graph))] for sh in range(len(graph))]
+    for i in range(len(graph)):
         for v in graph[i]:
             if f[i][v] == 1:
                 # We add an edge (v, i) with cf[v,i] = 1. This means
@@ -284,9 +286,9 @@ def bfs(graph, s):
     GRAY  = 1
     # BLACK vertices are those we have seen and explored
     BLACK = 2
-    color        = [WHITE for i in xrange(len(graph))]
-    predecessor  = [None for i in xrange(len(graph))]
-    distance     = [-1 for i in xrange(len(graph))]
+    color        = [WHITE for i in range(len(graph))]
+    predecessor  = [None for i in range(len(graph))]
+    distance     = [-1 for i in range(len(graph))]
     queue = [s] # vertices that we haven't explored yet.
     color[s] = GRAY
     distance[s] = 0
index 603dcacf76803d519eb6619f6c7470f7060fe644..bb62ffb399176e82751d2aac1df45dde0c79186a 100644 (file)
@@ -15,18 +15,18 @@ class BetterRepr(Repr):
 
     def repr_function(self, obj, level):
         if hasattr(obj, 'func_code'):
-            return '<' + obj.func_name + '() at ' + os.path.basename(obj.func_code.co_filename) + ':' + str(obj.func_code.co_firstlineno) + '>'
+            return '<' + obj.__name__ + '() at ' + os.path.basename(obj.__code__.co_filename) + ':' + str(obj.__code__.co_firstlineno) + '>'
         else:
-            return '<' + obj.func_name + '() at (builtin)'
+            return '<' + obj.__name__ + '() at (builtin)'
 
     def repr_instance_method(self, obj, level):
         if hasattr(obj, 'func_code'):
-            return '<' + obj.im_class.__name__ + '.' + obj.im_func.__name__ + '() at ' + os.path.basename(obj.im_func.func_code.co_filename) + ':' + str(obj.im_func.func_code.co_firstlineno) + '>'
+            return '<' + obj.__self__.__class__.__name__ + '.' + obj.__func__.__name__ + '() at ' + os.path.basename(obj.__func__.__code__.co_filename) + ':' + str(obj.__func__.__code__.co_firstlineno) + '>'
         else:
-            return '<' + obj.im_class.__name__ + '.' + obj.im_func.__name__ + '() at (builtin)'
+            return '<' + obj.__self__.__class__.__name__ + '.' + obj.__func__.__name__ + '() at (builtin)'
 
     def repr_long(self, obj, level):
-        s = `obj` # XXX Hope this isn't too slow...
+        s = repr(obj) # XXX Hope this isn't too slow...
         if len(s) > self.maxlong:
             i = max(0, (self.maxlong-3)/2)
             j = max(0, self.maxlong-3-i)
index 2f7db051b791d2355afad9cbdbbd9f0779b34780..820b2034cdeaee23484bd5c1689b8b11377f536e 100644 (file)
@@ -192,12 +192,12 @@ def _synchronously_find_addresses_via_config():
 def _query(path, args, regex):
     env = {'LANG': 'en_US.UTF-8'}
     TRIES = 5
-    for trial in xrange(TRIES):
+    for trial in range(TRIES):
         try:
             p = subprocess.Popen([path] + list(args), stdout=subprocess.PIPE, stderr=subprocess.PIPE, env=env)
             (output, err) = p.communicate()
             break
-        except OSError, e:
+        except OSError as e:
             if e.errno == errno.EINTR and trial < TRIES-1:
                 continue
             raise
index bbdb40b08492a76269ea07f7ebe924c9e1cc63f6..53fc5e61565b61ffb9fd69aa1291150a90a44daa 100644 (file)
@@ -8,7 +8,7 @@ def div_ceil(n, d):
     """
     The smallest integer k such that k*d >= n.
     """
-    return (n/d) + (n%d != 0)
+    return (n//d) + (n%d != 0)
 
 def next_multiple(n, k):
     """
@@ -39,7 +39,7 @@ def next_power_of_k(n, k):
         return k**x
 
 def ave(l):
-    return sum(l) / len(l)
+    return sum(l) // len(l)
 
 def log_ceil(n, b):
     """
index 2f3794999a45706ff36dd89947faffe71de53c87..971c543a4edf25713576fbd6b371075188b0c42b 100644 (file)
@@ -1,3 +1,4 @@
+from __future__ import print_function
 
 import time
 from twisted.internet import task
@@ -45,6 +46,6 @@ class PollMixin:
                 if not e.check(*self._poll_should_ignore_these_errors):
                     errs.append(e)
             if errs:
-                print errs
+                print(errs)
                 self.fail("Errors snooped, terminating early")
 
diff --git a/src/allmydata/util/sixutil.py b/src/allmydata/util/sixutil.py
new file mode 100644 (file)
index 0000000..a86a4c2
--- /dev/null
@@ -0,0 +1,8 @@
+
+def map(f, xs, ys=None):
+    if ys is None:
+        return [f(x) for x in xs]
+    else:
+        if len(xs) != len(ys):
+            raise AssertionError("iterators must be the same length")
+        return [f(x, y) for (x, y) in zip(xs, ys)]
\ No newline at end of file
index 34c65e1e00f1dbf0f18ee92ca5b2b23c08f9e43c..9cf843dca3bb5577c3c68e91b4da1342a77b101f 100644 (file)
@@ -1,3 +1,4 @@
+from __future__ import print_function
 
 class Spans:
     """I represent a compressed list of booleans, one per index (an integer).
@@ -40,7 +41,7 @@ class Spans:
                     assert start > prev_end
                 prev_end = start+length
         except AssertionError:
-            print "BAD:", self.dump()
+            print("BAD:", self.dump())
             raise
 
     def add(self, start, length):
@@ -191,7 +192,8 @@ class Spans:
         not_other = bounds - other
         return self - not_other
 
-    def __contains__(self, (start,length)):
+    def __contains__(self, xxx_todo_changeme):
+        (start,length) = xxx_todo_changeme
         for span_start,span_length in self._spans:
             o = overlap(start, length, span_start, span_length)
             if o:
@@ -265,7 +267,7 @@ class DataSpans:
         for start, data in self.spans[1:]:
             if not start > prev_end:
                 # adjacent or overlapping: bad
-                print "ASSERTION FAILED", self.spans
+                print("ASSERTION FAILED", self.spans)
                 raise AssertionError
 
     def get(self, start, length):
index b1e56ef748e4d12ae949f5d253afdaa2b7b6e651..1122fb903938c8ac103a1731443ac708fc0baa22 100644 (file)
@@ -6,9 +6,11 @@
 # Transitive Grace Period Public License, version 1 or later.
 
 from __future__ import division
+from __future__ import print_function
 from allmydata.util.mathutil import round_sigfigs
 import math
 import sys
+from functools import reduce
 
 def pr_file_loss(p_list, k):
     """
@@ -97,7 +99,7 @@ def print_pmf(pmf, n=4, out=sys.stdout):
     significant digits.
     """
     for k, p in enumerate(pmf):
-        print >>out, "i=" + str(k) + ":", round_sigfigs(p, n)
+        print("i=" + str(k) + ":", round_sigfigs(p, n), file=out)
 
 def pr_backup_file_loss(p_list, backup_p, k):
     """
index 0f8f2f387dd89735e25ae208e8e6064851fd6878..e7573aab22c0b7605a689d7268e448ea80e2f81c 100644 (file)
@@ -27,7 +27,7 @@ def iso_utc_time_to_seconds(isotime, _conversion_re=re.compile(r"(?P<year>\d{4})
     """
     m = _conversion_re.match(isotime)
     if not m:
-        raise ValueError(isotime, "not a complete ISO8601 timestamp")
+        raise ValueError(isotime, "not a complete ISO8601 timestamp")
     year, month, day = int(m.group('year')), int(m.group('month')), int(m.group('day'))
     hour, minute, second = int(m.group('hour')), int(m.group('minute')), int(m.group('second'))
     subsecstr = m.group('subsecond')
index 6d8d8e2545f5ffb68fa8f71b90609910a923fb03..32fb312598a441912ed9998ae0015b17320f9a51 100644 (file)
@@ -196,6 +196,9 @@ class NormalizedVersion(object):
             self._cannot_compare(other)
         return self.parts == other.parts
 
+    def __hash__(self):
+        return hash(self.parts)
+
     def __lt__(self, other):
         if not isinstance(other, NormalizedVersion):
             self._cannot_compare(other)
index be6df366df1c7da9a8f27efe80e8cb6ff4a00a5f..b5045f947720b73f73a1f52ea882caa0c9b52edc 100644 (file)
@@ -14,6 +14,7 @@ from allmydata.interfaces import ExistingChildError, NoSuchChildError, \
 from allmydata.mutable.common import UnrecoverableFileError
 from allmydata.util import abbreviate
 from allmydata.util.encodingutil import to_str, quote_output
+import six
 
 
 TIME_FORMAT = "%H:%M:%S %d-%b-%Y"
@@ -116,7 +117,7 @@ def convert_children_json(nodemaker, children_json):
     children = {}
     if children_json:
         data = simplejson.loads(children_json)
-        for (namex, (ctype, propdict)) in data.iteritems():
+        for (namex, (ctype, propdict)) in six.iteritems(data):
             namex = unicode(namex)
             writecap = to_str(propdict.get("rw_uri"))
             readcap = to_str(propdict.get("ro_uri"))
index 21f6d429c23f7df5593709b4bcd301283b423d42..97aaa9689a914cb6f6ad0736ff00ee9ae9bfbb9c 100644 (file)
@@ -1,3 +1,4 @@
+from __future__ import print_function
 
 import simplejson
 import urllib
@@ -36,6 +37,7 @@ from allmydata.web.info import MoreInfo
 from allmydata.web.operations import ReloadMixin
 from allmydata.web.check_results import json_check_results, \
      json_check_and_repair_results
+import six
 
 class BlockingFileError(Exception):
     # TODO: catch and transform
@@ -74,7 +76,7 @@ class DirectoryNodeHandler(RenderMixin, rend.Page, ReplaceMeMixin):
 
     def got_child(self, node_or_failure, ctx, name):
         DEBUG = False
-        if DEBUG: print "GOT_CHILD", name, node_or_failure
+        if DEBUG: print("GOT_CHILD", name, node_or_failure)
         req = IRequest(ctx)
         method = req.method
         nonterminal = len(req.postpath) > 1
@@ -83,24 +85,24 @@ class DirectoryNodeHandler(RenderMixin, rend.Page, ReplaceMeMixin):
             f = node_or_failure
             f.trap(NoSuchChildError)
             # No child by this name. What should we do about it?
-            if DEBUG: print "no child", name
-            if DEBUG: print "postpath", req.postpath
+            if DEBUG: print("no child", name)
+            if DEBUG: print("postpath", req.postpath)
             if nonterminal:
-                if DEBUG: print " intermediate"
+                if DEBUG: print(" intermediate")
                 if should_create_intermediate_directories(req):
                     # create intermediate directories
-                    if DEBUG: print " making intermediate directory"
+                    if DEBUG: print(" making intermediate directory")
                     d = self.node.create_subdirectory(name)
                     d.addCallback(make_handler_for,
                                   self.client, self.node, name)
                     return d
             else:
-                if DEBUG: print " terminal"
+                if DEBUG: print(" terminal")
                 # terminal node
                 if (method,t) in [ ("POST","mkdir"), ("PUT","mkdir"),
                                    ("POST", "mkdir-with-children"),
                                    ("POST", "mkdir-immutable") ]:
-                    if DEBUG: print " making final directory"
+                    if DEBUG: print(" making final directory")
                     # final directory
                     kids = {}
                     if t in ("mkdir-with-children", "mkdir-immutable"):
@@ -121,14 +123,14 @@ class DirectoryNodeHandler(RenderMixin, rend.Page, ReplaceMeMixin):
                                   self.client, self.node, name)
                     return d
                 if (method,t) in ( ("PUT",""), ("PUT","uri"), ):
-                    if DEBUG: print " PUT, making leaf placeholder"
+                    if DEBUG: print(" PUT, making leaf placeholder")
                     # we were trying to find the leaf filenode (to put a new
                     # file in its place), and it didn't exist. That's ok,
                     # since that's the leaf node that we're about to create.
                     # We make a dummy one, which will respond to the PUT
                     # request by replacing itself.
                     return PlaceHolderNodeHandler(self.client, self.node, name)
-            if DEBUG: print " 404"
+            if DEBUG: print(" 404")
             # otherwise, we just return a no-such-child error
             return f
 
@@ -137,11 +139,11 @@ class DirectoryNodeHandler(RenderMixin, rend.Page, ReplaceMeMixin):
             if not IDirectoryNode.providedBy(node):
                 # we would have put a new directory here, but there was a
                 # file in the way.
-                if DEBUG: print "blocking"
+                if DEBUG: print("blocking")
                 raise WebError("Unable to create directory '%s': "
                                "a file was in the way" % name,
                                http.CONFLICT)
-        if DEBUG: print "good child"
+        if DEBUG: print("good child")
         return make_handler_for(node, self.client, self.node, name)
 
     def render_DELETE(self, ctx):
@@ -550,12 +552,12 @@ class DirectoryNodeHandler(RenderMixin, rend.Page, ReplaceMeMixin):
         body = req.content.read()
         try:
             children = simplejson.loads(body)
-        except ValueError, le:
+        except ValueError as le:
             le.args = tuple(le.args + (body,))
             # TODO test handling of bad JSON
             raise
         cs = {}
-        for name, (file_or_dir, mddict) in children.iteritems():
+        for name, (file_or_dir, mddict) in six.iteritems(children):
             name = unicode(name) # simplejson-2.0.1 returns str *or* unicode
             writecap = mddict.get('rw_uri')
             if writecap is not None:
@@ -870,7 +872,7 @@ def DirectoryJSONMetadata(ctx, dirnode):
     d = dirnode.list()
     def _got(children):
         kids = {}
-        for name, (childnode, metadata) in children.iteritems():
+        for name, (childnode, metadata) in six.iteritems(children):
             assert IFilesystemNode.providedBy(childnode), childnode
             rw_uri = childnode.get_write_uri()
             ro_uri = childnode.get_readonly_uri()
@@ -1032,7 +1034,8 @@ class ManifestResults(rend.Page, ReloadMixin):
     def data_items(self, ctx, data):
         return self.monitor.get_status()["manifest"]
 
-    def render_row(self, ctx, (path, cap)):
+    def render_row(self, ctx, xxx_todo_changeme):
+        (path, cap) = xxx_todo_changeme
         ctx.fillSlots("path", self.slashify_path(path))
         root = get_root(ctx)
         # TODO: we need a clean consistent way to get the type of a cap string
index cc8b2ce15ebf32e107dd835fdb80f97a83addbf5..373fd1ebb076f9bac24a155c2998ef1684923ffb 100644 (file)
@@ -8,6 +8,7 @@ import simplejson
 from allmydata import get_package_versions_string
 from allmydata.util import idlib
 from allmydata.web.common import getxmlfile, get_arg, TIME_FORMAT
+import six
 
 
 class IntroducerRoot(rend.Page):
@@ -62,7 +63,7 @@ class IntroducerRoot(rend.Page):
         res["announcement_summary"] = announcement_summary
         distinct_hosts = dict([(name, len(hosts))
                                for (name, hosts)
-                               in service_hosts.iteritems()])
+                               in six.iteritems(service_hosts)])
         res["announcement_distinct_hosts"] = distinct_hosts
 
         return simplejson.dumps(res, indent=1) + "\n"
index 8ee9f202768681212d68363426bca8084043c0d6..da6a976faeeec2635f804ae6fa0a3af3b0693a06 100644 (file)
@@ -16,7 +16,7 @@ MINUTE = 60
 HOUR = 60*MINUTE
 DAY = 24*HOUR
 
-(MONITOR, RENDERER, WHEN_ADDED) = range(3)
+(MONITOR, RENDERER, WHEN_ADDED) = list(range(3))
 
 class OphandleTable(rend.Page, service.Service):
     implements(IOpHandleTable)
index 89e6b6404736267126149d15e9eb032affb9c9ff..3409d046dcdb01c6dc8b44f7703b4fa4396f3874 100644 (file)
@@ -364,7 +364,7 @@ class DownloadStatusPage(DownloadResultsRendererMixin, rend.Page):
         rows = []
         for ev in events:
             ev = ev.copy()
-            if ev.has_key('server'):
+            if 'server' in ev:
                 ev["serverid"] = ev["server"].get_longname()
                 del ev["server"]
             # find an empty slot in the rows
index eaf5d5eb9115b95b99935d48c3e0a131fb169ec8..1e11e7c2a8889291e558b873f6a6013f9a9ae0cc 100644 (file)
@@ -1,3 +1,4 @@
+from __future__ import print_function
 
 done = False
 
@@ -28,7 +29,7 @@ def initialize():
     # So be paranoid about catching errors and reporting them to original_stderr,
     # so that we can at least see them.
     def _complain(message):
-        print >>original_stderr, isinstance(message, str) and message or repr(message)
+        print(isinstance(message, str) and message or repr(message), file=original_stderr)
         log.msg(message, level=log.WEIRD)
 
     # Work around <http://bugs.python.org/issue6058>.
@@ -120,7 +121,7 @@ def initialize():
                     if self._hConsole is None:
                         try:
                             self._stream.flush()
-                        except Exception, e:
+                        except Exception as e:
                             _complain("%s.flush: %r from %r" % (self.name, e, self._stream))
                             raise
 
@@ -144,7 +145,7 @@ def initialize():
                                 remaining -= n.value
                                 if remaining == 0: break
                                 text = text[n.value:]
-                    except Exception, e:
+                    except Exception as e:
                         _complain("%s.write: %r" % (self.name, e))
                         raise
 
@@ -152,7 +153,7 @@ def initialize():
                     try:
                         for line in lines:
                             self.write(line)
-                    except Exception, e:
+                    except Exception as e:
                         _complain("%s.writelines: %r" % (self.name, e))
                         raise
 
@@ -165,7 +166,7 @@ def initialize():
                 sys.stderr = UnicodeOutput(hStderr, None, STDERR_FILENO, '<Unicode console stderr>')
             else:
                 sys.stderr = UnicodeOutput(None, sys.stderr, old_stderr_fileno, '<Unicode redirected stderr>')
-    except Exception, e:
+    except Exception as e:
         _complain("exception %r while fixing up sys.stdout and sys.stderr" % (e,))
 
     # This works around <http://bugs.python.org/issue2128>.
@@ -184,10 +185,10 @@ def initialize():
         return re.sub(ur'\x7F[0-9a-fA-F]*\;', lambda m: unichr(int(m.group(0)[1:-1], 16)), s)
 
     try:
-        argv = [unmangle(argv_unicode[i]).encode('utf-8') for i in xrange(0, argc.value)]
-    except Exception, e:
+        argv = [unmangle(argv_unicode[i]).encode('utf-8') for i in range(0, argc.value)]
+    except Exception as e:
         _complain("%s:  could not unmangle Unicode arguments.\n%r"
-                  % (sys.argv[0], [argv_unicode[i] for i in xrange(0, argc.value)]))
+                  % (sys.argv[0], [argv_unicode[i] for i in range(0, argc.value)]))
         raise
 
     # Take only the suffix with the same number of arguments as sys.argv.
index 2b87689b89865cbeba186ceb7bdd6738b616c6c0..09bf21be8c6556392a29d8ce1d05fdc5decee312 100644 (file)
@@ -5,7 +5,7 @@ _AMD_KEY = r"Software\Allmydata"
 _BDIR_KEY = 'Base Dir Path'
 
 if sys.platform not in ('win32'):
-    raise ImportError, "registry cannot be used on non-windows systems"
+    raise ImportError("registry cannot be used on non-windows systems")
     class WindowsError(Exception): # stupid voodoo to appease pyflakes
         pass
 
@@ -32,7 +32,7 @@ def get_registry_setting(key, name, _topkey=None):
             regkey = _winreg.OpenKey(topkey, key)
 
             sublen, vallen, timestamp = _winreg.QueryInfoKey(regkey)
-            for validx in xrange(vallen):
+            for validx in range(vallen):
                 keyname, value, keytype = _winreg.EnumValue(regkey, validx)
                 if keyname == name and keytype == _winreg.REG_SZ:
                     return value
@@ -40,7 +40,7 @@ def get_registry_setting(key, name, _topkey=None):
         except WindowsError:
             continue
     # We didn't find the key:
-    raise KeyError(key, name, "registry setting not found")
+    raise KeyError(key, name, "registry setting not found")
 
 def set_registry_setting(key, name, data, reg_type=_winreg.REG_SZ,
                          _topkey=_winreg.HKEY_LOCAL_MACHINE, create_key_if_missing=True):
@@ -55,7 +55,7 @@ def set_registry_setting(key, name, data, reg_type=_winreg.REG_SZ,
         if create_key_if_missing:
             regkey = _winreg.CreateKey(_topkey, key)
         else:
-            raise KeyError(key, "registry key not found")
+            raise KeyError(key, "registry key not found")
 
     try:
         _winreg.DeleteValue(regkey, name)