]> git.rkrishnan.org Git - tahoe-lafs/tahoe-lafs.git/commitdiff
Change uses of os.path.expanduser and os.path.abspath. refs #2235
authorDaira Hopwood <daira@jacaranda.org>
Fri, 30 Jan 2015 00:50:18 +0000 (00:50 +0000)
committerDaira Hopwood <daira@jacaranda.org>
Fri, 30 Jan 2015 00:50:18 +0000 (00:50 +0000)
Signed-off-by: Daira Hopwood <daira@jacaranda.org>
15 files changed:
src/allmydata/client.py
src/allmydata/frontends/auth.py
src/allmydata/frontends/drop_upload.py
src/allmydata/introducer/server.py
src/allmydata/manhole.py
src/allmydata/node.py
src/allmydata/scripts/cli.py
src/allmydata/scripts/tahoe_get.py
src/allmydata/scripts/tahoe_put.py
src/allmydata/stats.py
src/allmydata/test/test_auth.py
src/allmydata/test/test_cli.py
src/allmydata/test/test_encodingutil.py
src/allmydata/test/test_mutable.py
src/allmydata/util/fileutil.py

index 64b09c076c2b06d8f8963b383eef2c0941367cad..42c2ff384fe2ef6dff487dcc749c09a18d3c5f41 100644 (file)
@@ -16,6 +16,7 @@ from allmydata.control import ControlServer
 from allmydata.introducer.client import IntroducerClient
 from allmydata.util import hashutil, base32, pollmixin, log, keyutil, idlib
 from allmydata.util.encodingutil import get_filesystem_encoding
+from allmydata.util.fileutil import abspath_expanduser_unicode
 from allmydata.util.abbreviate import parse_abbreviated_size
 from allmydata.util.time_format import parse_duration, parse_date
 from allmydata.stats import StatsProvider
@@ -450,8 +451,8 @@ class Client(node.Node, pollmixin.PollMixin):
 
         from allmydata.webish import WebishServer
         nodeurl_path = os.path.join(self.basedir, "node.url")
-        staticdir = self.get_config("node", "web.static", "public_html")
-        staticdir = os.path.expanduser(staticdir)
+        staticdir_config = self.get_config("node", "web.static", "public_html").decode("utf-8")
+        staticdir = abspath_expanduser_unicode(staticdir_config, base=self.basedir)
         ws = WebishServer(self, webport, nodeurl_path, staticdir)
         self.add_service(ws)
 
index fa7fd379405bada9ad376623674f905c8b39792c..712a888a0ad84cbc57163a613fe94b78360aa65e 100644 (file)
@@ -1,4 +1,5 @@
 import os
+
 from zope.interface import implements
 from twisted.web.client import getPage
 from twisted.internet import defer
@@ -7,6 +8,8 @@ from twisted.conch import error as conch_error
 from twisted.conch.ssh import keys
 
 from allmydata.util import base32
+from allmydata.util.fileutil import abspath_expanduser_unicode
+
 
 class NeedRootcapLookupScheme(Exception):
     """Accountname+Password-based access schemes require some kind of
@@ -28,7 +31,7 @@ class AccountFileChecker:
         self.passwords = {}
         self.pubkeys = {}
         self.rootcaps = {}
-        for line in open(os.path.expanduser(accountfile), "r"):
+        for line in open(abspath_expanduser_unicode(accountfile), "r"):
             line = line.strip()
             if line.startswith("#") or not line:
                 continue
index 6f25625e0030274f9156190c196bc483c46328b9..0e2b48b4981720e02d7a3935ac0c5876140327fc 100644 (file)
@@ -1,5 +1,5 @@
 
-import os, sys
+import sys
 
 from twisted.internet import defer
 from twisted.python.filepath import FilePath
@@ -9,6 +9,7 @@ from foolscap.api import eventually
 from allmydata.interfaces import IDirectoryNode
 
 from allmydata.util.encodingutil import quote_output, get_filesystem_encoding
+from allmydata.util.fileutil import abspath_expanduser_unicode
 from allmydata.immutable.upload import FileName
 
 
@@ -19,7 +20,7 @@ class DropUploader(service.MultiService):
         service.MultiService.__init__(self)
 
         try:
-            local_dir_u = os.path.expanduser(local_dir_utf8.decode('utf-8'))
+            local_dir_u = abspath_expanduser_unicode(local_dir_utf8.decode('utf-8'))
             if sys.platform == "win32":
                 local_dir = local_dir_u
             else:
index 43a02611446245363efba3fc0063fff62de8ea18..7031c3af68b792e45651476342b736f14c782757 100644 (file)
@@ -6,7 +6,7 @@ from foolscap.api import Referenceable
 import allmydata
 from allmydata import node
 from allmydata.util import log, rrefutil
-from allmydata.util.encodingutil import get_filesystem_encoding
+from allmydata.util.fileutil import abspath_expanduser_unicode
 from allmydata.introducer.interfaces import \
      RIIntroducerPublisherAndSubscriberService_v2
 from allmydata.introducer.common import convert_announcement_v1_to_v2, \
@@ -21,7 +21,7 @@ class IntroducerNode(node.Node):
     NODETYPE = "introducer"
     GENERATED_FILES = ['introducer.furl']
 
-    def __init__(self, basedir="."):
+    def __init__(self, basedir=u"."):
         node.Node.__init__(self, basedir)
         self.read_config()
         self.init_introducer()
@@ -33,8 +33,8 @@ class IntroducerNode(node.Node):
         introducerservice = IntroducerService(self.basedir)
         self.add_service(introducerservice)
 
-        old_public_fn = os.path.join(self.basedir, "introducer.furl").encode(get_filesystem_encoding())
-        private_fn = os.path.join(self.basedir, "private", "introducer.furl").encode(get_filesystem_encoding())
+        old_public_fn = os.path.join(self.basedir, u"introducer.furl")
+        private_fn = os.path.join(self.basedir, u"private", u"introducer.furl")
 
         if os.path.exists(old_public_fn):
             if os.path.exists(private_fn):
@@ -62,9 +62,9 @@ class IntroducerNode(node.Node):
         self.log("init_web(webport=%s)", args=(webport,), umid="2bUygA")
 
         from allmydata.webish import IntroducerWebishServer
-        nodeurl_path = os.path.join(self.basedir, "node.url")
-        staticdir = self.get_config("node", "web.static", "public_html")
-        staticdir = os.path.expanduser(staticdir)
+        nodeurl_path = os.path.join(self.basedir, u"node.url")
+        config_staticdir = self.get_config("node", "web.static", "public_html").decode('utf-8')
+        staticdir = abspath_expanduser_unicode(config_staticdir, base=self.basedir)
         ws = IntroducerWebishServer(self, webport, nodeurl_path, staticdir)
         self.add_service(ws)
 
index 1d6121dc0334636b589c6bb78dbfa10874518e27..63b62a545a1b3148cbb1cd53661d418a630708ce 100644 (file)
@@ -1,8 +1,8 @@
 
 # this is adapted from my code in Buildbot  -warner
 
-import os.path
 import binascii, base64
+
 from twisted.python import log
 from twisted.application import service, strports
 from twisted.cred import checkers, portal
@@ -12,6 +12,8 @@ from twisted.internet import protocol
 
 from zope.interface import implements
 
+from allmydata.util.fileutil import precondition_abspath
+
 # makeTelnetProtocol and _TelnetRealm are for the TelnetManhole
 
 class makeTelnetProtocol:
@@ -63,7 +65,8 @@ class AuthorizedKeysChecker(conchc.SSHPublicKeyDatabase):
     """
 
     def __init__(self, authorized_keys_file):
-        self.authorized_keys_file = os.path.expanduser(authorized_keys_file)
+        precondition_abspath(authorized_keys_file)
+        self.authorized_keys_file = authorized_keys_file
 
     def checkKey(self, credentials):
         f = open(self.authorized_keys_file)
@@ -244,14 +247,12 @@ class AuthorizedKeysManhole(_BaseManhole):
         'tcp:12345:interface=127.0.0.1'. Bare integers are treated as a
         simple tcp port.
 
-        @param keyfile: the name of a file (relative to the buildmaster's
-                        basedir) that contains SSH public keys of authorized
-                        users, one per line. This is the exact same format
-                        as used by sshd in ~/.ssh/authorized_keys .
+        @param keyfile: the path of a file that contains SSH public keys of
+                        authorized users, one per line. This is the exact
+                        same format as used by sshd in ~/.ssh/authorized_keys .
+                        The path should be absolute.
         """
 
-        # TODO: expanduser this, and make it relative to the buildmaster's
-        # basedir
         self.keyfile = keyfile
         c = AuthorizedKeysChecker(keyfile)
         _BaseManhole.__init__(self, port, c)
index d29ad22ce4c202edf3ca8772afbd5097eaec753e..710ce7d5045923dbaae3c9c94a8ac6e835e751d0 100644 (file)
@@ -93,12 +93,11 @@ class Node(service.MultiService):
         iputil.increase_rlimits()
 
     def init_tempdir(self):
-        local_tempdir_utf8 = "tmp" # default is NODEDIR/tmp/
-        tempdir = self.get_config("node", "tempdir", local_tempdir_utf8).decode('utf-8')
-        tempdir = os.path.join(self.basedir, tempdir)
+        tempdir_config = self.get_config("node", "tempdir", "tmp").decode('utf-8')
+        tempdir = abspath_expanduser_unicode(tempdir_config, base=self.basedir)
         if not os.path.exists(tempdir):
             fileutil.make_dirs(tempdir)
-        tempfile.tempdir = abspath_expanduser_unicode(tempdir)
+        tempfile.tempdir = tempdir
         # this should cause twisted.web.http (which uses
         # tempfile.TemporaryFile) to put large request bodies in the given
         # directory. Without this, the default temp dir is usually /tmp/,
@@ -220,11 +219,12 @@ class Node(service.MultiService):
     def setup_ssh(self):
         ssh_port = self.get_config("node", "ssh.port", "")
         if ssh_port:
-            ssh_keyfile = self.get_config("node", "ssh.authorized_keys_file").decode('utf-8')
+            ssh_keyfile_config = self.get_config("node", "ssh.authorized_keys_file").decode('utf-8')
+            ssh_keyfile = abspath_expanduser_unicode(ssh_keyfile_config, base=self.basedir)
             from allmydata import manhole
-            m = manhole.AuthorizedKeysManhole(ssh_port, ssh_keyfile.encode(get_filesystem_encoding()))
+            m = manhole.AuthorizedKeysManhole(ssh_port, ssh_keyfile)
             m.setServiceParent(self)
-            self.log("AuthorizedKeysManhole listening on %s" % ssh_port)
+            self.log("AuthorizedKeysManhole listening on %s" % (ssh_port,))
 
     def get_app_versions(self):
         # TODO: merge this with allmydata.get_package_versions
index e0e505df4b4cd660785c2bbaf462a4e37d332cb8..e240c9ecd1682af1a8e1b1823884f5fe4f864578 100644 (file)
@@ -140,15 +140,11 @@ class GetOptions(FilesystemOptions):
         # tahoe get FOO bar              # write to local file
         # tahoe get tahoe:FOO bar        # same
 
-        self.from_file = argv_to_unicode(arg1)
-
-        if arg2:
-            self.to_file = argv_to_unicode(arg2)
-        else:
-            self.to_file = None
+        if arg2 == "-":
+            arg2 = None
 
-        if self.to_file == "-":
-            self.to_file = None
+        self.from_file = argv_to_unicode(arg1)
+        self.to_file   = None if arg2 is None else argv_to_abspath(arg2)
 
     def getSynopsis(self):
         return "Usage:  %s [global-opts] get [options] REMOTE_FILE LOCAL_FILE" % (self.command_name,)
@@ -180,17 +176,11 @@ class PutOptions(FilesystemOptions):
     def parseArgs(self, arg1=None, arg2=None):
         # see Examples below
 
-        if arg1 is not None and arg2 is not None:
-            self.from_file = argv_to_unicode(arg1)
-            self.to_file =  argv_to_unicode(arg2)
-        elif arg1 is not None and arg2 is None:
-            self.from_file = argv_to_unicode(arg1) # might be "-"
-            self.to_file = None
-        else:
-            self.from_file = None
-            self.to_file = None
-        if self.from_file == u"-":
-            self.from_file = None
+        if arg1 == "-":
+            arg1 = None
+
+        self.from_file = None if arg1 is None else argv_to_abspath(arg1)
+        self.to_file   = None if arg2 is None else argv_to_unicode(arg2)
 
         if self['format']:
             if self['format'].upper() not in ("SDMF", "MDMF", "CHK"):
index 280d8c052abdd697d06e131c57551e79bbfe9dd6..73ef67b45cc2c5326520b7a6f08fd954c0de2207 100644 (file)
@@ -1,5 +1,5 @@
 
-import os, urllib
+import urllib
 from allmydata.scripts.common import get_alias, DEFAULT_ALIAS, escape_path, \
                                      UnknownAliasError
 from allmydata.scripts.common_http import do_http, format_http_error
@@ -26,7 +26,7 @@ def get(options):
     resp = do_http("GET", url)
     if resp.status in (200, 201,):
         if to_file:
-            outf = open(os.path.expanduser(to_file), "wb")
+            outf = open(to_file, "wb")
         else:
             outf = stdout
         while True:
index a85539efec87e0d96f4a839705a4261212848ac8..91b71c81405331146d89bacfd1c41f127767dc20 100644 (file)
@@ -1,7 +1,7 @@
 
-import os
 from cStringIO import StringIO
 import urllib
+
 from allmydata.scripts.common_http import do_http, format_http_success, format_http_error
 from allmydata.scripts.common import get_alias, DEFAULT_ALIAS, escape_path, \
                                      UnknownAliasError
@@ -73,7 +73,7 @@ def put(options):
         url += "?" + "&".join(queryargs)
 
     if from_file:
-        infileobj = open(os.path.expanduser(from_file), "rb")
+        infileobj = open(from_file, "rb")
     else:
         # do_http() can't use stdin directly: for one thing, we need a
         # Content-Length field. So we currently must copy it.
index 7db323ba5ce68b2fe30101cb0dc47c1fc0188add..22f01812402e816d2e6f1f35e8b364c97838f70d 100644 (file)
@@ -11,8 +11,8 @@ from twisted.application.internet import TimerService
 from zope.interface import implements
 from foolscap.api import eventually, DeadReferenceError, Referenceable, Tub
 
-from allmydata.util import log
-from allmydata.util.encodingutil import quote_output
+from allmydata.util import log, fileutil
+from allmydata.util.encodingutil import quote_local_unicode_path
 from allmydata.interfaces import RIStatsProvider, RIStatsGatherer, IStatsProducer
 
 class LoadMonitor(service.MultiService):
@@ -246,7 +246,7 @@ class StdOutStatsGatherer(StatsGatherer):
 class PickleStatsGatherer(StdOutStatsGatherer):
     # inherit from StdOutStatsGatherer for connect/disconnect notifications
 
-    def __init__(self, basedir=".", verbose=True):
+    def __init__(self, basedir=u".", verbose=True):
         self.verbose = verbose
         StatsGatherer.__init__(self, basedir)
         self.picklefile = os.path.join(basedir, "stats.pickle")
@@ -258,7 +258,7 @@ class PickleStatsGatherer(StdOutStatsGatherer):
             except Exception:
                 print ("Error while attempting to load pickle file %s.\n"
                        "You may need to restore this file from a backup, or delete it if no backup is available.\n" %
-                       quote_output(os.path.abspath(self.picklefile)))
+                       quote_local_unicode_path(self.picklefile))
                 raise
             f.close()
         else:
@@ -311,7 +311,7 @@ class StatsGathererService(service.MultiService):
     def save_portnum(self, junk):
         portnum = self.listener.getPortnum()
         portnumfile = os.path.join(self.basedir, 'portnum')
-        open(portnumfile, 'wb').write('%d\n' % (portnum,))
+        fileutil.write(portnumfile, '%d\n' % (portnum,))
 
     def tub_ready(self, ignored):
         ff = os.path.join(self.basedir, self.furl_file)
index b61531b1b31a043926c318f92f47d29f0d638a81..2b52f00af1d711f4e558d23f26944e2f37f2694e 100644 (file)
@@ -5,6 +5,8 @@ from twisted.conch import error as conch_error
 from twisted.conch.ssh import keys
 
 from allmydata.frontends import auth
+from allmydata.util.fileutil import abspath_expanduser_unicode
+
 
 DUMMY_KEY = keys.Key.fromString("""\
 -----BEGIN RSA PRIVATE KEY-----
@@ -37,7 +39,8 @@ class AccountFileCheckerKeyTests(unittest.TestCase):
     def setUp(self):
         self.account_file = filepath.FilePath(self.mktemp())
         self.account_file.setContent(DUMMY_ACCOUNTS)
-        self.checker = auth.AccountFileChecker(None, self.account_file.path)
+        abspath = abspath_expanduser_unicode(unicode(self.account_file.path))
+        self.checker = auth.AccountFileChecker(None, abspath)
 
     def test_unknown_user(self):
         """
index e415707d4390a40a2b5ffaea43e2fe942355a5c1..f672ea12095df7f241102ed482e08df51ce68e07 100644 (file)
@@ -39,7 +39,7 @@ from twisted.python import usage
 
 from allmydata.util.assertutil import precondition
 from allmydata.util.encodingutil import listdir_unicode, unicode_platform, \
-    quote_output, quote_local_unicode_path, get_io_encoding, get_filesystem_encoding, \
+    quote_output, get_io_encoding, get_filesystem_encoding, \
     unicode_to_output, unicode_to_argv, to_str
 from allmydata.util.fileutil import abspath_expanduser_unicode
 
index b37e2f7294473b0c2c1fdfc4f64f8ee7f4712be0..926c3659dfd933d52a6ff82c9479c4ff3ff26dfb 100644 (file)
@@ -61,7 +61,7 @@ from mock import patch
 import os, sys, locale
 
 from allmydata.test.common_util import ReallyEqualMixin
-from allmydata.util import encodingutil
+from allmydata.util import encodingutil, fileutil
 from allmydata.util.encodingutil import argv_to_unicode, unicode_to_url, \
     unicode_to_output, quote_output, quote_path, quote_local_unicode_path, \
     unicode_platform, listdir_unicode, FilenameEncodingError, get_io_encoding, \
@@ -275,8 +275,8 @@ class StdlibUnicode(unittest.TestCase):
         # to lumiere_nfc (on Mac OS X, it will be the NFD equivalent).
         self.failUnlessIn(lumiere_nfc + ".txt", set([normalize(fname) for fname in filenames]))
 
-        expanded = os.path.expanduser("~/" + lumiere_nfc)
-        self.failIfIn("~", expanded)
+        expanded = fileutil.expanduser(u"~/" + lumiere_nfc)
+        self.failIfIn(u"~", expanded)
         self.failUnless(expanded.endswith(lumiere_nfc), expanded)
 
     def test_open_unrepresentable(self):
index fb39af72e15a311576ddcbe46686df36ab40e876..26c7f244d3af7de9a8b01203780941c856e3ae82 100644 (file)
@@ -1,10 +1,13 @@
 import os, re, base64
 from cStringIO import StringIO
+
 from twisted.trial import unittest
 from twisted.internet import defer, reactor
+
 from allmydata import uri, client
 from allmydata.nodemaker import NodeMaker
 from allmydata.util import base32, consumer, fileutil, mathutil
+from allmydata.util.fileutil import abspath_expanduser_unicode
 from allmydata.util.hashutil import tagged_hash, ssk_writekey_hash, \
      ssk_pubkey_fingerprint_hash
 from allmydata.util.consumer import MemoryConsumer
@@ -3110,7 +3113,7 @@ class Version(GridTestMixin, unittest.TestCase, testutil.ShouldFailMixin, \
             fso = debug.FindSharesOptions()
             storage_index = base32.b2a(n.get_storage_index())
             fso.si_s = storage_index
-            fso.nodedirs = [unicode(os.path.dirname(os.path.abspath(storedir)))
+            fso.nodedirs = [os.path.dirname(abspath_expanduser_unicode(unicode(storedir)))
                             for (i,ss,storedir)
                             in self.iterate_servers()]
             fso.stdout = StringIO()
index 2e113fa83f79eb738fa987675b6d505a51608748..466361d992e1cdd51509c78275e1fadf8444c338 100644 (file)
@@ -263,9 +263,11 @@ def read(path):
     finally:
         rf.close()
 
-def put_file(pathname, inf):
+def put_file(path, inf):
+    precondition_abspath(path)
+
     # TODO: create temporary file and move into place?
-    outf = open(os.path.expanduser(pathname), "wb")
+    outf = open(path, "wb")
     try:
         while True:
             data = inf.read(32768)