]> git.rkrishnan.org Git - tahoe-lafs/tahoe-lafs.git/blob - src/allmydata/client.py
mutable WIP: clean up status handling, shrink the code a lot, improve test coverage
[tahoe-lafs/tahoe-lafs.git] / src / allmydata / client.py
1
2 import os, stat, time, re
3 from allmydata.interfaces import RIStorageServer
4 from allmydata import node
5
6 from zope.interface import implements
7 from twisted.internet import reactor
8 from twisted.application.internet import TimerService
9 from foolscap import Referenceable
10 from foolscap.logging import log
11 from pycryptopp.publickey import rsa
12
13 import allmydata
14 from allmydata.storage import StorageServer
15 from allmydata.upload import Uploader
16 from allmydata.download import Downloader
17 from allmydata.checker import Checker
18 from allmydata.offloaded import Helper
19 from allmydata.control import ControlServer
20 from allmydata.introducer import IntroducerClient
21 from allmydata.util import hashutil, base32, testutil
22 from allmydata.filenode import FileNode
23 from allmydata.dirnode import NewDirectoryNode
24 from allmydata.mutable.node import MutableFileNode, MutableWatcher
25 from allmydata.stats import StatsProvider
26 from allmydata.interfaces import IURI, INewDirectoryURI, IStatsProducer, \
27      IReadonlyNewDirectoryURI, IFileURI, IMutableFileURI, RIStubClient
28
29 KiB=1024
30 MiB=1024*KiB
31 GiB=1024*MiB
32 TiB=1024*GiB
33 PiB=1024*TiB
34
35 class StubClient(Referenceable):
36     implements(RIStubClient)
37
38 def _make_secret():
39     return base32.b2a(os.urandom(hashutil.CRYPTO_VAL_SIZE)) + "\n"
40
41 class Client(node.Node, testutil.PollMixin):
42     implements(IStatsProducer)
43
44     PORTNUMFILE = "client.port"
45     STOREDIR = 'storage'
46     NODETYPE = "client"
47     SUICIDE_PREVENTION_HOTLINE_FILE = "suicide_prevention_hotline"
48
49     # we're pretty narrow-minded right now
50     OLDEST_SUPPORTED_VERSION = allmydata.__version__
51
52     # this is a tuple of (needed, desired, total, max_segment_size). 'needed'
53     # is the number of shares required to reconstruct a file. 'desired' means
54     # that we will abort an upload unless we can allocate space for at least
55     # this many. 'total' is the total number of shares created by encoding.
56     # If everybody has room then this is is how many we will upload.
57     DEFAULT_ENCODING_PARAMETERS = {"k": 3,
58                                    "happy": 7,
59                                    "n": 10,
60                                    "max_segment_size": 128*KiB,
61                                    }
62
63     def __init__(self, basedir="."):
64         node.Node.__init__(self, basedir)
65         self.started_timestamp = time.time()
66         self.logSource="Client"
67         self.nickname = self.get_config("nickname")
68         if self.nickname is None:
69             self.nickname = "<unspecified>"
70         self.init_introducer_client()
71         self.init_stats_provider()
72         self.init_lease_secret()
73         self.init_storage()
74         self.init_control()
75         run_helper = self.get_config("run_helper")
76         if run_helper:
77             self.init_helper()
78         self.init_client()
79         self._key_generator = None
80         key_gen_furl = self.get_config('key_generator.furl')
81         if key_gen_furl:
82             self.init_key_gen(key_gen_furl)
83         # ControlServer and Helper are attached after Tub startup
84
85         hotline_file = os.path.join(self.basedir,
86                                     self.SUICIDE_PREVENTION_HOTLINE_FILE)
87         if os.path.exists(hotline_file):
88             age = time.time() - os.stat(hotline_file)[stat.ST_MTIME]
89             self.log("hotline file noticed (%ds old), starting timer" % age)
90             hotline = TimerService(1.0, self._check_hotline, hotline_file)
91             hotline.setServiceParent(self)
92
93         webport = self.get_config("webport")
94         if webport:
95             self.init_web(webport) # strports string
96
97     def init_introducer_client(self):
98         self.introducer_furl = self.get_config("introducer.furl", required=True)
99         ic = IntroducerClient(self.tub, self.introducer_furl,
100                               self.nickname,
101                               str(allmydata.__version__),
102                               str(self.OLDEST_SUPPORTED_VERSION))
103         self.introducer_client = ic
104         ic.setServiceParent(self)
105         # nodes that want to upload and download will need storage servers
106         ic.subscribe_to("storage")
107
108     def init_stats_provider(self):
109         gatherer_furl = self.get_config('stats_gatherer.furl')
110         if gatherer_furl:
111             self.stats_provider = StatsProvider(self, gatherer_furl)
112             self.add_service(self.stats_provider)
113             self.stats_provider.register_producer(self)
114         else:
115             self.stats_provider = None
116
117     def get_stats(self):
118         return { 'node.uptime': time.time() - self.started_timestamp }
119
120     def init_lease_secret(self):
121         secret_s = self.get_or_create_private_config("secret", _make_secret)
122         self._lease_secret = base32.a2b(secret_s)
123
124     def init_storage(self):
125         # should we run a storage server (and publish it for others to use)?
126         provide_storage = (self.get_config("no_storage") is None)
127         if not provide_storage:
128             return
129         readonly_storage = (self.get_config("readonly_storage") is not None)
130
131         storedir = os.path.join(self.basedir, self.STOREDIR)
132         sizelimit = None
133
134         data = self.get_config("sizelimit")
135         if data:
136             m = re.match(r"^(\d+)([kKmMgG]?[bB]?)$", data)
137             if not m:
138                 log.msg("SIZELIMIT_FILE contains unparseable value %s" % data)
139             else:
140                 number, suffix = m.groups()
141                 suffix = suffix.upper()
142                 if suffix.endswith("B"):
143                     suffix = suffix[:-1]
144                 multiplier = {"": 1,
145                               "K": 1000,
146                               "M": 1000 * 1000,
147                               "G": 1000 * 1000 * 1000,
148                               }[suffix]
149                 sizelimit = int(number) * multiplier
150         discard_storage = self.get_config("debug_discard_storage") is not None
151         ss = StorageServer(storedir, sizelimit,
152                            discard_storage, readonly_storage,
153                            self.stats_provider)
154         self.add_service(ss)
155         d = self.when_tub_ready()
156         # we can't do registerReference until the Tub is ready
157         def _publish(res):
158             furl_file = os.path.join(self.basedir, "private", "storage.furl")
159             furl = self.tub.registerReference(ss, furlFile=furl_file)
160             ri_name = RIStorageServer.__remote_name__
161             self.introducer_client.publish(furl, "storage", ri_name)
162         d.addCallback(_publish)
163         d.addErrback(log.err, facility="tahoe.init", level=log.BAD)
164
165     def init_client(self):
166         helper_furl = self.get_config("helper.furl")
167         convergence_s = self.get_or_create_private_config('convergence', _make_secret)
168         self.convergence = base32.a2b(convergence_s)
169         self.add_service(Uploader(helper_furl, self.stats_provider))
170         self.add_service(Downloader(self.stats_provider))
171         self.add_service(Checker())
172         self.add_service(MutableWatcher(self.stats_provider))
173         def _publish(res):
174             # we publish an empty object so that the introducer can count how
175             # many clients are connected and see what versions they're
176             # running.
177             sc = StubClient()
178             furl = self.tub.registerReference(sc)
179             ri_name = RIStubClient.__remote_name__
180             self.introducer_client.publish(furl, "stub_client", ri_name)
181         d = self.when_tub_ready()
182         d.addCallback(_publish)
183         d.addErrback(log.err, facility="tahoe.init", level=log.BAD)
184
185     def init_control(self):
186         d = self.when_tub_ready()
187         def _publish(res):
188             c = ControlServer()
189             c.setServiceParent(self)
190             control_url = self.tub.registerReference(c)
191             self.write_private_config("control.furl", control_url + "\n")
192         d.addCallback(_publish)
193         d.addErrback(log.err, facility="tahoe.init", level=log.BAD)
194
195     def init_helper(self):
196         d = self.when_tub_ready()
197         def _publish(self):
198             h = Helper(os.path.join(self.basedir, "helper"), self.stats_provider)
199             h.setServiceParent(self)
200             # TODO: this is confusing. BASEDIR/private/helper.furl is created
201             # by the helper. BASEDIR/helper.furl is consumed by the client
202             # who wants to use the helper. I like having the filename be the
203             # same, since that makes 'cp' work smoothly, but the difference
204             # between config inputs and generated outputs is hard to see.
205             helper_furlfile = os.path.join(self.basedir,
206                                            "private", "helper.furl")
207             self.tub.registerReference(h, furlFile=helper_furlfile)
208         d.addCallback(_publish)
209         d.addErrback(log.err, facility="tahoe.init", level=log.BAD)
210
211     def init_key_gen(self, key_gen_furl):
212         d = self.when_tub_ready()
213         def _subscribe(self):
214             self.tub.connectTo(key_gen_furl, self._got_key_generator)
215         d.addCallback(_subscribe)
216         d.addErrback(log.err, facility="tahoe.init", level=log.BAD)
217
218     def _got_key_generator(self, key_generator):
219         self._key_generator = key_generator
220         key_generator.notifyOnDisconnect(self._lost_key_generator)
221
222     def _lost_key_generator(self):
223         self._key_generator = None
224
225     def init_web(self, webport):
226         self.log("init_web(webport=%s)", args=(webport,))
227
228         from allmydata.webish import WebishServer
229         nodeurl_path = os.path.join(self.basedir, "node.url")
230         ws = WebishServer(webport, nodeurl_path)
231         if self.get_config("webport_allow_localfile") is not None:
232             ws.allow_local_access(True)
233         self.add_service(ws)
234
235     def _check_hotline(self, hotline_file):
236         if os.path.exists(hotline_file):
237             mtime = os.stat(hotline_file)[stat.ST_MTIME]
238             if mtime > time.time() - 20.0:
239                 return
240             else:
241                 self.log("hotline file too old, shutting down")
242         else:
243             self.log("hotline file missing, shutting down")
244         reactor.stop()
245
246     def get_all_peerids(self):
247         return self.introducer_client.get_all_peerids()
248
249     def get_permuted_peers(self, service_name, key):
250         """
251         @return: list of (peerid, connection,)
252         """
253         assert isinstance(service_name, str)
254         assert isinstance(key, str)
255         return self.introducer_client.get_permuted_peers(service_name, key)
256
257     def get_encoding_parameters(self):
258         return self.DEFAULT_ENCODING_PARAMETERS
259
260     def connected_to_introducer(self):
261         if self.introducer_client:
262             return self.introducer_client.connected_to_introducer()
263         return False
264
265     def get_renewal_secret(self):
266         return hashutil.my_renewal_secret_hash(self._lease_secret)
267
268     def get_cancel_secret(self):
269         return hashutil.my_cancel_secret_hash(self._lease_secret)
270
271     def debug_wait_for_client_connections(self, num_clients):
272         """Return a Deferred that fires (with None) when we have connections
273         to the given number of peers. Useful for tests that set up a
274         temporary test network and need to know when it is safe to proceed
275         with an upload or download."""
276         def _check():
277             current_clients = list(self.get_all_peerids())
278             return len(current_clients) >= num_clients
279         d = self.poll(_check, 0.5)
280         d.addCallback(lambda res: None)
281         return d
282
283
284     # these four methods are the primitives for creating filenodes and
285     # dirnodes. The first takes a URI and produces a filenode or (new-style)
286     # dirnode. The other three create brand-new filenodes/dirnodes.
287
288     def create_node_from_uri(self, u):
289         # this returns synchronously.
290         u = IURI(u)
291         if IReadonlyNewDirectoryURI.providedBy(u):
292             # new-style read-only dirnodes
293             return NewDirectoryNode(self).init_from_uri(u)
294         if INewDirectoryURI.providedBy(u):
295             # new-style dirnodes
296             return NewDirectoryNode(self).init_from_uri(u)
297         if IFileURI.providedBy(u):
298             # CHK
299             return FileNode(u, self)
300         assert IMutableFileURI.providedBy(u), u
301         return MutableFileNode(self).init_from_uri(u)
302
303     def notify_publish(self, publish_status):
304         self.getServiceNamed("mutable-watcher").notify_publish(publish_status)
305     def notify_retrieve(self, retrieve_status):
306         self.getServiceNamed("mutable-watcher").notify_retrieve(retrieve_status)
307     def notify_mapupdate(self, update_status):
308         self.getServiceNamed("mutable-watcher").notify_mapupdate(update_status)
309
310     def create_empty_dirnode(self):
311         n = NewDirectoryNode(self)
312         d = n.create(self._generate_pubprivkeys)
313         d.addCallback(lambda res: n)
314         return d
315
316     def create_mutable_file(self, contents=""):
317         n = MutableFileNode(self)
318         d = n.create(contents, self._generate_pubprivkeys)
319         d.addCallback(lambda res: n)
320         return d
321
322     def _generate_pubprivkeys(self, key_size):
323         if self._key_generator:
324             d = self._key_generator.callRemote('get_rsa_key_pair', key_size)
325             def make_key_objs((verifying_key, signing_key)):
326                 v = rsa.create_verifying_key_from_string(verifying_key)
327                 s = rsa.create_signing_key_from_string(signing_key)
328                 return v, s
329             d.addCallback(make_key_objs)
330             return d
331         else:
332             # RSA key generation for a 2048 bit key takes between 0.8 and 3.2
333             # secs
334             signer = rsa.generate(key_size)
335             verifier = signer.get_verifying_key()
336             return verifier, signer
337
338     def upload(self, uploadable):
339         uploader = self.getServiceNamed("uploader")
340         return uploader.upload(uploadable)
341
342
343     def list_all_upload_statuses(self):
344         uploader = self.getServiceNamed("uploader")
345         return uploader.list_all_upload_statuses()
346
347     def list_all_download_statuses(self):
348         downloader = self.getServiceNamed("downloader")
349         return downloader.list_all_download_statuses()
350
351     def list_all_mapupdate_statuses(self):
352         watcher = self.getServiceNamed("mutable-watcher")
353         return watcher.list_all_mapupdate_statuses()
354     def list_all_publish_statuses(self):
355         watcher = self.getServiceNamed("mutable-watcher")
356         return watcher.list_all_publish_statuses()
357     def list_all_retrieve_statuses(self):
358         watcher = self.getServiceNamed("mutable-watcher")
359         return watcher.list_all_retrieve_statuses()
360
361     def list_all_helper_statuses(self):
362         try:
363             helper = self.getServiceNamed("helper")
364         except KeyError:
365             return []
366         return helper.get_all_upload_statuses()
367