]> git.rkrishnan.org Git - tahoe-lafs/tahoe-lafs.git/blobdiff - src/allmydata/client.py
Add simple auth-token to get JSON data
[tahoe-lafs/tahoe-lafs.git] / src / allmydata / client.py
index a1e99d9d4d371dcf380465f3f1d819dcad8b6160..12ebb148aa2672f0f9d988991110956b69a24969 100644 (file)
@@ -1,5 +1,6 @@
 import os, stat, time, weakref
 from allmydata import node
+from base64 import urlsafe_b64encode
 
 from zope.interface import implements
 from twisted.internet import reactor, defer
@@ -14,8 +15,10 @@ from allmydata.immutable.upload import Uploader
 from allmydata.immutable.offloaded import Helper
 from allmydata.control import ControlServer
 from allmydata.introducer.client import IntroducerClient
-from allmydata.util import hashutil, base32, pollmixin, log, keyutil
-from allmydata.util.encodingutil import get_filesystem_encoding
+from allmydata.util import hashutil, base32, pollmixin, log, keyutil, idlib
+from allmydata.util.encodingutil import get_filesystem_encoding, \
+     from_utf8_or_none
+from allmydata.util.fileutil import abspath_expanduser_unicode
 from allmydata.util.abbreviate import parse_abbreviated_size
 from allmydata.util.time_format import parse_duration, parse_date
 from allmydata.stats import StatsProvider
@@ -109,13 +112,13 @@ class Client(node.Node, pollmixin.PollMixin):
     PORTNUMFILE = "client.port"
     STOREDIR = 'storage'
     NODETYPE = "client"
-    SUICIDE_PREVENTION_HOTLINE_FILE = "suicide_prevention_hotline"
+    EXIT_TRIGGER_FILE = "exit_trigger"
 
     # This means that if a storage server treats me as though I were a
     # 1.0.0 storage client, it will work as they expect.
     OLDEST_SUPPORTED_VERSION = "1.0.0"
 
-    # this is a tuple of (needed, desired, total, max_segment_size). 'needed'
+    # This is a dictionary of (needed, desired, total, max_segment_size). 'needed'
     # is the number of shares required to reconstruct a file. 'desired' means
     # that we will abort an upload unless we can allocate space for at least
     # this many. 'total' is the total number of shares created by encoding.
@@ -127,13 +130,16 @@ class Client(node.Node, pollmixin.PollMixin):
                                    }
 
     def __init__(self, basedir="."):
+        #print "Client.__init__(%r)" % (basedir,)
         node.Node.__init__(self, basedir)
+        self.connected_enough_d = defer.Deferred()
         self.started_timestamp = time.time()
         self.logSource="Client"
-        self.DEFAULT_ENCODING_PARAMETERS = self.DEFAULT_ENCODING_PARAMETERS.copy()
+        self.encoding_params = self.DEFAULT_ENCODING_PARAMETERS.copy()
         self.init_introducer_client()
         self.init_stats_provider()
         self.init_secrets()
+        self.init_node_key()
         self.init_storage()
         self.init_control()
         self.helper = None
@@ -147,15 +153,18 @@ class Client(node.Node, pollmixin.PollMixin):
         # ControlServer and Helper are attached after Tub startup
         self.init_ftp_server()
         self.init_sftp_server()
-        self.init_drop_uploader()
-
-        hotline_file = os.path.join(self.basedir,
-                                    self.SUICIDE_PREVENTION_HOTLINE_FILE)
-        if os.path.exists(hotline_file):
-            age = time.time() - os.stat(hotline_file)[stat.ST_MTIME]
-            self.log("hotline file noticed (%ds old), starting timer" % age)
-            hotline = TimerService(1.0, self._check_hotline, hotline_file)
-            hotline.setServiceParent(self)
+        self.init_magic_folder()
+
+        # If the node sees an exit_trigger file, it will poll every second to see
+        # whether the file still exists, and what its mtime is. If the file does not
+        # exist or has not been modified for a given timeout, the node will exit.
+        exit_trigger_file = os.path.join(self.basedir,
+                                         self.EXIT_TRIGGER_FILE)
+        if os.path.exists(exit_trigger_file):
+            age = time.time() - os.stat(exit_trigger_file)[stat.ST_MTIME]
+            self.log("%s file noticed (%ds old), starting timer" % (self.EXIT_TRIGGER_FILE, age))
+            exit_trigger = TimerService(1.0, self._check_exit_trigger, exit_trigger_file)
+            exit_trigger.setServiceParent(self)
 
         # this needs to happen last, so it can use getServiceNamed() to
         # acquire references to StorageServer and other web-statusable things
@@ -209,22 +218,25 @@ class Client(node.Node, pollmixin.PollMixin):
         self.convergence = base32.a2b(convergence_s)
         self._secret_holder = SecretHolder(lease_secret, self.convergence)
 
-    def _maybe_create_node_key(self):
+    def init_node_key(self):
         # we only create the key once. On all subsequent runs, we re-use the
         # existing key
         def _make_key():
             sk_vs,vk_vs = keyutil.make_keypair()
             return sk_vs+"\n"
-        # for a while (between releases, before 1.10) this was known as
-        # server.privkey, but now it lives in node.privkey. This fallback can
-        # be removed after 1.10 is released.
-        sk_vs = self.get_private_config("server.privkey", None)
-        if not sk_vs:
-            sk_vs = self.get_or_create_private_config("node.privkey", _make_key)
+        sk_vs = self.get_or_create_private_config("node.privkey", _make_key)
         sk,vk_vs = keyutil.parse_privkey(sk_vs.strip())
         self.write_config("node.pubkey", vk_vs+"\n")
         self._node_key = sk
 
+    def get_long_nodeid(self):
+        # this matches what IServer.get_longname() says about us elsewhere
+        vk_bytes = self._node_key.get_verifying_key_bytes()
+        return "v0-"+base32.b2a(vk_bytes)
+
+    def get_long_tubid(self):
+        return idlib.nodeid_b2a(self.nodeid)
+
     def _init_permutation_seed(self, ss):
         seed = self.get_config_from_file("permutation-seed")
         if not seed:
@@ -252,8 +264,6 @@ class Client(node.Node, pollmixin.PollMixin):
             return
         readonly = self.get_config("storage", "readonly", False, boolean=True)
 
-        self._maybe_create_node_key()
-
         storedir = os.path.join(self.basedir, self.STOREDIR)
 
         data = self.get_config("storage", "reserved_space", None)
@@ -320,11 +330,14 @@ class Client(node.Node, pollmixin.PollMixin):
         if helper_furl in ("None", ""):
             helper_furl = None
 
-        DEP = self.DEFAULT_ENCODING_PARAMETERS
+        DEP = self.encoding_params
         DEP["k"] = int(self.get_config("client", "shares.needed", DEP["k"]))
         DEP["n"] = int(self.get_config("client", "shares.total", DEP["n"]))
         DEP["happy"] = int(self.get_config("client", "shares.happy", DEP["happy"]))
 
+        # for the CLI to authenticate to local JSON endpoints
+        self._auth_token = self._create_or_read_auth_token()
+
         self.init_client_storage_broker()
         self.history = History(self.stats_provider)
         self.terminator = Terminator()
@@ -334,10 +347,44 @@ class Client(node.Node, pollmixin.PollMixin):
         self.init_blacklist()
         self.init_nodemaker()
 
+    def get_auth_token(self):
+        """
+        This returns a local authentication token, which is just some
+        random data in "api_auth_token" which must be echoed to API
+        calls.
+
+        Currently only the URI '/magic' for magic-folder status; other
+        endpoints are invited to include this as well, as appropriate.
+        """
+        return self._auth_token
+
+    def _create_or_read_auth_token(self):
+        """
+        This returns the current auth-token data, possibly creating it and
+        writing 'private/api_auth_token' in the process.
+        """
+        fname = os.path.join(self.basedir, 'private', 'api_auth_token')
+        try:
+            with open(fname, 'rb') as f:
+                data = f.read()
+        except (OSError, IOError):
+            log.msg("Creating '%s'." % (fname,))
+            with open(fname, 'wb') as f:
+                data = urlsafe_b64encode(os.urandom(32))
+                f.write(data)
+        return data
+
     def init_client_storage_broker(self):
         # create a StorageFarmBroker object, for use by Uploader/Downloader
         # (and everybody else who wants to use storage servers)
-        sb = storage_client.StorageFarmBroker(self.tub, permute_peers=True)
+        ps = self.get_config("client", "peers.preferred", "").split(",")
+        preferred_peers = tuple([p.strip() for p in ps if p != ""])
+
+        connection_threshold = min(self.encoding_params["k"],
+                                   self.encoding_params["happy"] + 1)
+
+        sb = storage_client.StorageFarmBroker(self.tub, True, connection_threshold,
+                                              self.connected_enough_d, preferred_peers=preferred_peers)
         self.storage_broker = sb
 
         # load static server specifications from tahoe.cfg, if any.
@@ -445,14 +492,17 @@ class Client(node.Node, pollmixin.PollMixin):
 
         from allmydata.webish import WebishServer
         nodeurl_path = os.path.join(self.basedir, "node.url")
-        staticdir = self.get_config("node", "web.static", "public_html")
-        staticdir = os.path.expanduser(staticdir)
+        staticdir_config = self.get_config("node", "web.static", "public_html").decode("utf-8")
+        staticdir = abspath_expanduser_unicode(staticdir_config, base=self.basedir)
         ws = WebishServer(self, webport, nodeurl_path, staticdir)
         self.add_service(ws)
 
     def init_ftp_server(self):
         if self.get_config("ftpd", "enabled", False, boolean=True):
-            accountfile = self.get_config("ftpd", "accounts.file", None)
+            accountfile = from_utf8_or_none(
+                self.get_config("ftpd", "accounts.file", None))
+            if accountfile:
+                accountfile = abspath_expanduser_unicode(accountfile, base=self.basedir)
             accounturl = self.get_config("ftpd", "accounts.url", None)
             ftp_portstr = self.get_config("ftpd", "port", "8021")
 
@@ -462,47 +512,60 @@ class Client(node.Node, pollmixin.PollMixin):
 
     def init_sftp_server(self):
         if self.get_config("sftpd", "enabled", False, boolean=True):
-            accountfile = self.get_config("sftpd", "accounts.file", None)
+            accountfile = from_utf8_or_none(
+                self.get_config("sftpd", "accounts.file", None))
+            if accountfile:
+                accountfile = abspath_expanduser_unicode(accountfile, base=self.basedir)
             accounturl = self.get_config("sftpd", "accounts.url", None)
             sftp_portstr = self.get_config("sftpd", "port", "8022")
-            pubkey_file = self.get_config("sftpd", "host_pubkey_file")
-            privkey_file = self.get_config("sftpd", "host_privkey_file")
+            pubkey_file = from_utf8_or_none(self.get_config("sftpd", "host_pubkey_file"))
+            privkey_file = from_utf8_or_none(self.get_config("sftpd", "host_privkey_file"))
 
             from allmydata.frontends import sftpd
             s = sftpd.SFTPServer(self, accountfile, accounturl,
                                  sftp_portstr, pubkey_file, privkey_file)
             s.setServiceParent(self)
 
-    def init_drop_uploader(self):
+    def init_magic_folder(self):
+        #print "init_magic_folder"
         if self.get_config("drop_upload", "enabled", False, boolean=True):
-            if self.get_config("drop_upload", "upload.dircap", None):
-                raise OldConfigOptionError("The [drop_upload]upload.dircap option is no longer supported; please "
-                                           "put the cap in a 'private/drop_upload_dircap' file, and delete this option.")
-
-            upload_dircap = self.get_or_create_private_config("drop_upload_dircap")
-            local_dir_utf8 = self.get_config("drop_upload", "local.directory")
-
-            try:
-                from allmydata.frontends import drop_upload
-                s = drop_upload.DropUploader(self, upload_dircap, local_dir_utf8)
-                s.setServiceParent(self)
-                s.startService()
-            except Exception, e:
-                self.log("couldn't start drop-uploader: %r", args=(e,))
-
-    def _check_hotline(self, hotline_file):
-        if os.path.exists(hotline_file):
-            mtime = os.stat(hotline_file)[stat.ST_MTIME]
+            raise OldConfigOptionError("The [drop_upload] section must be renamed to [magic_folder].\n"
+                                       "See docs/frontends/magic-folder.rst for more information.")
+
+        if self.get_config("magic_folder", "enabled", False, boolean=True):
+            #print "magic folder enabled"
+            upload_dircap = self.get_private_config("magic_folder_dircap")
+            collective_dircap = self.get_private_config("collective_dircap")
+
+            local_dir_config = self.get_config("magic_folder", "local.directory").decode("utf-8")
+            local_dir = abspath_expanduser_unicode(local_dir_config, base=self.basedir)
+
+            dbfile = os.path.join(self.basedir, "private", "magicfolderdb.sqlite")
+            dbfile = abspath_expanduser_unicode(dbfile)
+
+            from allmydata.frontends import magic_folder
+            umask = self.get_config("magic_folder", "download.umask", 0077)
+            s = magic_folder.MagicFolder(self, upload_dircap, collective_dircap, local_dir, dbfile, umask)
+            self._magic_folder = s
+            s.setServiceParent(self)
+            s.startService()
+
+            # start processing the upload queue when we've connected to enough servers
+            self.connected_enough_d.addCallback(lambda ign: s.ready())
+
+    def _check_exit_trigger(self, exit_trigger_file):
+        if os.path.exists(exit_trigger_file):
+            mtime = os.stat(exit_trigger_file)[stat.ST_MTIME]
             if mtime > time.time() - 120.0:
                 return
             else:
-                self.log("hotline file too old, shutting down")
+                self.log("%s file too old, shutting down" % (self.EXIT_TRIGGER_FILE,))
         else:
-            self.log("hotline file missing, shutting down")
+            self.log("%s file missing, shutting down" % (self.EXIT_TRIGGER_FILE,))
         reactor.stop()
 
     def get_encoding_parameters(self):
-        return self.DEFAULT_ENCODING_PARAMETERS
+        return self.encoding_params
 
     def connected_to_introducer(self):
         if self.introducer_client: