from allmydata.immutable.offloaded import Helper
from allmydata.control import ControlServer
from allmydata.introducer.client import IntroducerClient
-from allmydata.util import hashutil, base32, pollmixin, log, keyutil
-from allmydata.util.encodingutil import get_filesystem_encoding
+from allmydata.util import hashutil, base32, pollmixin, log, keyutil, idlib
+from allmydata.util.encodingutil import get_filesystem_encoding, \
+ from_utf8_or_none
+from allmydata.util.fileutil import abspath_expanduser_unicode
from allmydata.util.abbreviate import parse_abbreviated_size
from allmydata.util.time_format import parse_duration, parse_date
from allmydata.stats import StatsProvider
PORTNUMFILE = "client.port"
STOREDIR = 'storage'
NODETYPE = "client"
- SUICIDE_PREVENTION_HOTLINE_FILE = "suicide_prevention_hotline"
+ EXIT_TRIGGER_FILE = "exit_trigger"
# This means that if a storage server treats me as though I were a
# 1.0.0 storage client, it will work as they expect.
OLDEST_SUPPORTED_VERSION = "1.0.0"
- # this is a tuple of (needed, desired, total, max_segment_size). 'needed'
+ # This is a dictionary of (needed, desired, total, max_segment_size). 'needed'
# is the number of shares required to reconstruct a file. 'desired' means
# that we will abort an upload unless we can allocate space for at least
# this many. 'total' is the total number of shares created by encoding.
node.Node.__init__(self, basedir)
self.started_timestamp = time.time()
self.logSource="Client"
- self.DEFAULT_ENCODING_PARAMETERS = self.DEFAULT_ENCODING_PARAMETERS.copy()
+ self.encoding_params = self.DEFAULT_ENCODING_PARAMETERS.copy()
self.init_introducer_client()
self.init_stats_provider()
self.init_secrets()
+ self.init_node_key()
self.init_storage()
self.init_control()
self.helper = None
self.init_sftp_server()
self.init_drop_uploader()
- hotline_file = os.path.join(self.basedir,
- self.SUICIDE_PREVENTION_HOTLINE_FILE)
- if os.path.exists(hotline_file):
- age = time.time() - os.stat(hotline_file)[stat.ST_MTIME]
- self.log("hotline file noticed (%ds old), starting timer" % age)
- hotline = TimerService(1.0, self._check_hotline, hotline_file)
- hotline.setServiceParent(self)
+ # If the node sees an exit_trigger file, it will poll every second to see
+ # whether the file still exists, and what its mtime is. If the file does not
+ # exist or has not been modified for a given timeout, the node will exit.
+ exit_trigger_file = os.path.join(self.basedir,
+ self.EXIT_TRIGGER_FILE)
+ if os.path.exists(exit_trigger_file):
+ age = time.time() - os.stat(exit_trigger_file)[stat.ST_MTIME]
+ self.log("%s file noticed (%ds old), starting timer" % (self.EXIT_TRIGGER_FILE, age))
+ exit_trigger = TimerService(1.0, self._check_exit_trigger, exit_trigger_file)
+ exit_trigger.setServiceParent(self)
# this needs to happen last, so it can use getServiceNamed() to
# acquire references to StorageServer and other web-statusable things
if webport:
self.init_web(webport) # strports string
+ def _sequencer(self):
+ seqnum_s = self.get_config_from_file("announcement-seqnum")
+ if not seqnum_s:
+ seqnum_s = "0"
+ seqnum = int(seqnum_s.strip())
+ seqnum += 1 # increment
+ self.write_config("announcement-seqnum", "%d\n" % seqnum)
+ nonce = _make_secret().strip()
+ return seqnum, nonce
+
def init_introducer_client(self):
self.introducer_furl = self.get_config("client", "introducer.furl")
ic = IntroducerClient(self.tub, self.introducer_furl,
self.nickname,
str(allmydata.__full_version__),
str(self.OLDEST_SUPPORTED_VERSION),
- self.get_app_versions())
+ self.get_app_versions(),
+ self._sequencer)
self.introducer_client = ic
# hold off on starting the IntroducerClient until our tub has been
# started, so we'll have a useful address on our RemoteReference, so
self.convergence = base32.a2b(convergence_s)
self._secret_holder = SecretHolder(lease_secret, self.convergence)
- def _maybe_create_node_key(self):
+ def init_node_key(self):
# we only create the key once. On all subsequent runs, we re-use the
# existing key
def _make_key():
sk_vs,vk_vs = keyutil.make_keypair()
return sk_vs+"\n"
- # for a while (between releases, before 1.10) this was known as
- # server.privkey, but now it lives in node.privkey. This fallback can
- # be removed after 1.10 is released.
- sk_vs = self.get_private_config("server.privkey", None)
- if not sk_vs:
- sk_vs = self.get_or_create_private_config("node.privkey", _make_key)
+ sk_vs = self.get_or_create_private_config("node.privkey", _make_key)
sk,vk_vs = keyutil.parse_privkey(sk_vs.strip())
self.write_config("node.pubkey", vk_vs+"\n")
- self._server_key = sk
+ self._node_key = sk
+
+ def get_long_nodeid(self):
+ # this matches what IServer.get_longname() says about us elsewhere
+ vk_bytes = self._node_key.get_verifying_key_bytes()
+ return "v0-"+base32.b2a(vk_bytes)
+
+ def get_long_tubid(self):
+ return idlib.nodeid_b2a(self.nodeid)
def _init_permutation_seed(self, ss):
seed = self.get_config_from_file("permutation-seed")
else:
# otherwise, we're free to use the more natural seed of our
# pubkey-based serverid
- vk_bytes = self._server_key.get_verifying_key_bytes()
+ vk_bytes = self._node_key.get_verifying_key_bytes()
seed = base32.b2a(vk_bytes)
self.write_config("permutation-seed", seed+"\n")
return seed.strip()
return
readonly = self.get_config("storage", "readonly", False, boolean=True)
- self._maybe_create_node_key()
-
storedir = os.path.join(self.basedir, self.STOREDIR)
data = self.get_config("storage", "reserved_space", None)
- reserved = None
try:
reserved = parse_abbreviated_size(data)
except ValueError:
log.msg("[storage]reserved_space= contains unparseable value %s"
% data)
+ raise
if reserved is None:
reserved = 0
discard = self.get_config("storage", "debug_discard", False,
ann = {"anonymous-storage-FURL": furl,
"permutation-seed-base32": self._init_permutation_seed(ss),
}
- self.introducer_client.publish("storage", ann, self._server_key)
+ self.introducer_client.publish("storage", ann, self._node_key)
d.addCallback(_publish)
d.addErrback(log.err, facility="tahoe.init",
level=log.BAD, umid="aLGBKw")
def init_client(self):
helper_furl = self.get_config("client", "helper.furl", None)
- DEP = self.DEFAULT_ENCODING_PARAMETERS
+ if helper_furl in ("None", ""):
+ helper_furl = None
+
+ DEP = self.encoding_params
DEP["k"] = int(self.get_config("client", "shares.needed", DEP["k"]))
DEP["n"] = int(self.get_config("client", "shares.total", DEP["n"]))
DEP["happy"] = int(self.get_config("client", "shares.happy", DEP["happy"]))
def init_client_storage_broker(self):
# create a StorageFarmBroker object, for use by Uploader/Downloader
# (and everybody else who wants to use storage servers)
- sb = storage_client.StorageFarmBroker(self.tub, permute_peers=True)
+ ps = self.get_config("client", "peers.preferred", "").split(",")
+ preferred_peers = tuple([p.strip() for p in ps if p != ""])
+ sb = storage_client.StorageFarmBroker(self.tub, permute_peers=True, preferred_peers=preferred_peers)
self.storage_broker = sb
# load static server specifications from tahoe.cfg, if any.
from allmydata.webish import WebishServer
nodeurl_path = os.path.join(self.basedir, "node.url")
- staticdir = self.get_config("node", "web.static", "public_html")
- staticdir = os.path.expanduser(staticdir)
+ staticdir_config = self.get_config("node", "web.static", "public_html").decode("utf-8")
+ staticdir = abspath_expanduser_unicode(staticdir_config, base=self.basedir)
ws = WebishServer(self, webport, nodeurl_path, staticdir)
self.add_service(ws)
def init_ftp_server(self):
if self.get_config("ftpd", "enabled", False, boolean=True):
- accountfile = self.get_config("ftpd", "accounts.file", None)
+ accountfile = from_utf8_or_none(
+ self.get_config("ftpd", "accounts.file", None))
+ if accountfile:
+ accountfile = abspath_expanduser_unicode(accountfile, base=self.basedir)
accounturl = self.get_config("ftpd", "accounts.url", None)
ftp_portstr = self.get_config("ftpd", "port", "8021")
def init_sftp_server(self):
if self.get_config("sftpd", "enabled", False, boolean=True):
- accountfile = self.get_config("sftpd", "accounts.file", None)
+ accountfile = from_utf8_or_none(
+ self.get_config("sftpd", "accounts.file", None))
+ if accountfile:
+ accountfile = abspath_expanduser_unicode(accountfile, base=self.basedir)
accounturl = self.get_config("sftpd", "accounts.url", None)
sftp_portstr = self.get_config("sftpd", "port", "8022")
- pubkey_file = self.get_config("sftpd", "host_pubkey_file")
- privkey_file = self.get_config("sftpd", "host_privkey_file")
+ pubkey_file = from_utf8_or_none(self.get_config("sftpd", "host_pubkey_file"))
+ privkey_file = from_utf8_or_none(self.get_config("sftpd", "host_privkey_file"))
from allmydata.frontends import sftpd
s = sftpd.SFTPServer(self, accountfile, accounturl,
except Exception, e:
self.log("couldn't start drop-uploader: %r", args=(e,))
- def _check_hotline(self, hotline_file):
- if os.path.exists(hotline_file):
- mtime = os.stat(hotline_file)[stat.ST_MTIME]
+ def _check_exit_trigger(self, exit_trigger_file):
+ if os.path.exists(exit_trigger_file):
+ mtime = os.stat(exit_trigger_file)[stat.ST_MTIME]
if mtime > time.time() - 120.0:
return
else:
- self.log("hotline file too old, shutting down")
+ self.log("%s file too old, shutting down" % (self.EXIT_TRIGGER_FILE,))
else:
- self.log("hotline file missing, shutting down")
+ self.log("%s file missing, shutting down" % (self.EXIT_TRIGGER_FILE,))
reactor.stop()
def get_encoding_parameters(self):
- return self.DEFAULT_ENCODING_PARAMETERS
+ return self.encoding_params
def connected_to_introducer(self):
if self.introducer_client: