2 # do not import any allmydata modules at this level. Do that from inside
3 # individual functions instead.
4 import sys, struct, time, os
5 from twisted.python import usage
7 class DumpOptions(usage.Options):
8 """tahoe dump-share SHARE_FILENAME"""
10 def parseArgs(self, filename):
11 self['filename'] = filename
13 def dump_share(config, out=sys.stdout, err=sys.stderr):
14 from allmydata import uri, storage
16 # check the version, to see if we have a mutable or immutable share
17 print >>out, "share filename: %s" % config['filename']
19 f = open(config['filename'], "rb")
22 if prefix == storage.MutableShareFile.MAGIC:
23 return dump_mutable_share(config, out, err)
24 # otherwise assume it's immutable
25 f = storage.ShareFile(config['filename'])
26 # use a ReadBucketProxy to parse the bucket and find the uri extension
27 bp = storage.ReadBucketProxy(None)
28 offsets = bp._parse_offsets(f.read_share_data(0, 0x24))
29 seek = offsets['uri_extension']
30 length = struct.unpack(">L", f.read_share_data(seek, 4))[0]
32 UEB_data = f.read_share_data(seek, length)
34 unpacked = uri.unpack_extension_readable(UEB_data)
35 keys1 = ("size", "num_segments", "segment_size",
36 "needed_shares", "total_shares")
37 keys2 = ("codec_name", "codec_params", "tail_codec_params")
38 keys3 = ("plaintext_hash", "plaintext_root_hash",
39 "crypttext_hash", "crypttext_root_hash",
40 "share_root_hash", "UEB_hash")
41 display_keys = {"size": "file_size"}
44 dk = display_keys.get(k, k)
45 print >>out, "%20s: %s" % (dk, unpacked[k])
49 dk = display_keys.get(k, k)
50 print >>out, "%20s: %s" % (dk, unpacked[k])
54 dk = display_keys.get(k, k)
55 print >>out, "%20s: %s" % (dk, unpacked[k])
57 leftover = set(unpacked.keys()) - set(keys1 + keys2 + keys3)
60 print >>out, "LEFTOVER:"
61 for k in sorted(leftover):
62 print >>out, "%20s: %s" % (k, unpacked[k])
65 sizes['data'] = bp._data_size
66 sizes['validation'] = (offsets['uri_extension'] -
67 offsets['plaintext_hash_tree'])
68 sizes['uri-extension'] = len(UEB_data)
70 print >>out, " Size of data within the share:"
71 for k in sorted(sizes):
72 print >>out, "%20s: %s" % (k, sizes[k])
74 # display lease information too
75 leases = list(f.iter_leases())
77 for i,lease in enumerate(leases):
78 (owner_num, renew_secret, cancel_secret, expiration_time) = lease
79 when = format_expiration_time(expiration_time)
80 print >>out, " Lease #%d: owner=%d, expire in %s" % (i, owner_num,
83 print >>out, " No leases."
88 def format_expiration_time(expiration_time):
90 remains = expiration_time - now
91 when = "%ds" % remains
93 when += " (%d days)" % (remains / (24*3600))
95 when += " (%d hours)" % (remains / 3600)
99 def dump_mutable_share(config, out, err):
100 from allmydata import storage
101 from allmydata.util import base32, idlib
102 m = storage.MutableShareFile(config['filename'])
103 f = open(config['filename'], "rb")
104 WE, nodeid = m._read_write_enabler_and_nodeid(f)
105 num_extra_leases = m._read_num_extra_leases(f)
106 data_length = m._read_data_length(f)
107 extra_lease_offset = m._read_extra_lease_offset(f)
108 container_size = extra_lease_offset - m.DATA_OFFSET
109 leases = list(m._enumerate_leases(f))
111 share_type = "unknown"
112 f.seek(m.DATA_OFFSET)
113 if f.read(1) == "\x00":
114 # this slot contains an SMDF share
119 print >>out, "Mutable slot found:"
120 print >>out, " share_type: %s" % share_type
121 print >>out, " write_enabler: %s" % base32.b2a(WE)
122 print >>out, " WE for nodeid: %s" % idlib.nodeid_b2a(nodeid)
123 print >>out, " num_extra_leases: %d" % num_extra_leases
124 print >>out, " container_size: %d" % container_size
125 print >>out, " data_length: %d" % data_length
127 for (leasenum, (oid,et,rs,cs,anid)) in leases:
129 print >>out, " Lease #%d:" % leasenum
130 print >>out, " ownerid: %d" % oid
131 when = format_expiration_time(et)
132 print >>out, " expires in %s" % when
133 print >>out, " renew_secret: %s" % base32.b2a(rs)
134 print >>out, " cancel_secret: %s" % base32.b2a(cs)
135 print >>out, " secrets are for nodeid: %s" % idlib.nodeid_b2a(anid)
137 print >>out, "No leases."
140 if share_type == "SDMF":
141 dump_SDMF_share(m.DATA_OFFSET, data_length, config, out, err)
145 def dump_SDMF_share(offset, length, config, out, err):
146 from allmydata import mutable
147 from allmydata.util import base32
149 f = open(config['filename'], "rb")
151 data = f.read(min(length, 2000))
155 pieces = mutable.unpack_share(data)
156 except mutable.NeedMoreDataError, e:
157 # retry once with the larger size
158 size = e.needed_bytes
159 f = open(config['filename'], "rb")
161 data = f.read(min(length, size))
163 pieces = mutable.unpack_share(data)
165 (seqnum, root_hash, IV, k, N, segsize, datalen,
166 pubkey, signature, share_hash_chain, block_hash_tree,
167 share_data, enc_privkey) = pieces
169 print >>out, " SDMF contents:"
170 print >>out, " seqnum: %d" % seqnum
171 print >>out, " root_hash: %s" % base32.b2a(root_hash)
172 print >>out, " IV: %s" % base32.b2a(IV)
173 print >>out, " required_shares: %d" % k
174 print >>out, " total_shares: %d" % N
175 print >>out, " segsize: %d" % segsize
176 print >>out, " datalen: %d" % datalen
177 share_hash_ids = ",".join(sorted([str(hid)
178 for hid in share_hash_chain.keys()]))
179 print >>out, " share_hash_chain: %s" % share_hash_ids
180 print >>out, " block_hash_tree: %d nodes" % len(block_hash_tree)
186 class DumpCapOptions(usage.Options):
188 ["nodeid", "n", None, "storage server nodeid (ascii), to construct WE and secrets."],
189 ["client-secret", "c", None, "client's base secret (ascii), to construct secrets"],
190 ["client-dir", "d", None, "client's base directory, from which a -c secret will be read"],
192 def parseArgs(self, cap):
195 def dump_cap(config, out=sys.stdout, err=sys.stderr):
196 from allmydata import uri
197 from allmydata.util import base32
198 from base64 import b32decode
199 import urlparse, urllib
204 nodeid = b32decode(config['nodeid'].upper())
206 if config['client-secret']:
207 secret = base32.a2b(config['client-secret'])
208 elif config['client-dir']:
209 secretfile = os.path.join(config['client-dir'], "private", "secret")
211 secret = base32.a2b(open(secretfile, "r").read().strip())
212 except EnvironmentError:
215 if cap.startswith("http"):
216 scheme, netloc, path, params, query, fragment = urlparse.urlparse(cap)
217 assert path.startswith("/uri/")
218 cap = urllib.unquote(path[len("/uri/"):])
220 u = uri.from_string(cap)
223 dump_uri_instance(u, nodeid, secret, out, err)
225 def _dump_secrets(storage_index, secret, nodeid, out):
226 from allmydata.util import hashutil
227 from allmydata.util import base32
230 crs = hashutil.my_renewal_secret_hash(secret)
231 print >>out, " client renewal secret:", base32.b2a(crs)
232 frs = hashutil.file_renewal_secret_hash(crs, storage_index)
233 print >>out, " file renewal secret:", base32.b2a(frs)
235 renew = hashutil.bucket_renewal_secret_hash(frs, nodeid)
236 print >>out, " lease renewal secret:", base32.b2a(renew)
237 ccs = hashutil.my_cancel_secret_hash(secret)
238 print >>out, " client cancel secret:", base32.b2a(ccs)
239 fcs = hashutil.file_cancel_secret_hash(ccs, storage_index)
240 print >>out, " file cancel secret:", base32.b2a(fcs)
242 cancel = hashutil.bucket_cancel_secret_hash(fcs, nodeid)
243 print >>out, " lease cancel secret:", base32.b2a(cancel)
245 def dump_uri_instance(u, nodeid, secret, out, err, show_header=True):
246 from allmydata import storage, uri
247 from allmydata.util import base32, hashutil
249 if isinstance(u, uri.CHKFileURI):
251 print >>out, "CHK File:"
252 print >>out, " key:", base32.b2a(u.key)
253 print >>out, " UEB hash:", base32.b2a(u.uri_extension_hash)
254 print >>out, " size:", u.size
255 print >>out, " k/N: %d/%d" % (u.needed_shares, u.total_shares)
256 print >>out, " storage index:", storage.si_b2a(u.storage_index)
257 _dump_secrets(u.storage_index, secret, nodeid, out)
258 elif isinstance(u, uri.CHKFileVerifierURI):
260 print >>out, "CHK Verifier URI:"
261 print >>out, " UEB hash:", base32.b2a(u.uri_extension_hash)
262 print >>out, " size:", u.size
263 print >>out, " k/N: %d/%d" % (u.needed_shares, u.total_shares)
264 print >>out, " storage index:", storage.si_b2a(u.storage_index)
266 elif isinstance(u, uri.LiteralFileURI):
268 print >>out, "Literal File URI:"
269 print >>out, " data:", u.data
271 elif isinstance(u, uri.WriteableSSKFileURI):
273 print >>out, "SSK Writeable URI:"
274 print >>out, " writekey:", base32.b2a(u.writekey)
275 print >>out, " readkey:", base32.b2a(u.readkey)
276 print >>out, " storage index:", storage.si_b2a(u.storage_index)
277 print >>out, " fingerprint:", base32.b2a(u.fingerprint)
280 we = hashutil.ssk_write_enabler_hash(u.writekey, nodeid)
281 print >>out, " write_enabler:", base32.b2a(we)
283 _dump_secrets(u.storage_index, secret, nodeid, out)
285 elif isinstance(u, uri.ReadonlySSKFileURI):
287 print >>out, "SSK Read-only URI:"
288 print >>out, " readkey:", base32.b2a(u.readkey)
289 print >>out, " storage index:", storage.si_b2a(u.storage_index)
290 print >>out, " fingerprint:", base32.b2a(u.fingerprint)
291 elif isinstance(u, uri.SSKVerifierURI):
293 print >>out, "SSK Verifier URI:"
294 print >>out, " storage index:", storage.si_b2a(u.storage_index)
295 print >>out, " fingerprint:", base32.b2a(u.fingerprint)
297 elif isinstance(u, uri.NewDirectoryURI):
299 print >>out, "Directory Writeable URI:"
300 dump_uri_instance(u._filenode_uri, nodeid, secret, out, err, False)
301 elif isinstance(u, uri.ReadonlyNewDirectoryURI):
303 print >>out, "Directory Read-only URI:"
304 dump_uri_instance(u._filenode_uri, nodeid, secret, out, err, False)
305 elif isinstance(u, uri.NewDirectoryURIVerifier):
307 print >>out, "Directory Verifier URI:"
308 dump_uri_instance(u._filenode_uri, nodeid, secret, out, err, False)
310 print >>out, "unknown cap type"
312 class FindSharesOptions(usage.Options):
313 def parseArgs(self, storage_index_s, *nodedirs):
314 self.si_s = storage_index_s
315 self.nodedirs = nodedirs
317 def find_shares(config, out=sys.stdout, err=sys.stderr):
318 """Given a storage index and a list of node directories, emit a list of
319 all matching shares to stdout, one per line. For example:
321 find-shares.py 44kai1tui348689nrw8fjegc8c ~/testnet/node-*
325 /home/warner/testnet/node-1/storage/shares/44k/44kai1tui348689nrw8fjegc8c/5
326 /home/warner/testnet/node-1/storage/shares/44k/44kai1tui348689nrw8fjegc8c/9
327 /home/warner/testnet/node-2/storage/shares/44k/44kai1tui348689nrw8fjegc8c/2
329 from allmydata import storage
331 sharedir = storage.storage_index_to_dir(storage.si_a2b(config.si_s))
332 for d in config.nodedirs:
333 d = os.path.join(os.path.expanduser(d), "storage/shares", sharedir)
334 if os.path.exists(d):
335 for shnum in os.listdir(d):
336 print >>out, os.path.join(d, shnum)
341 class CatalogSharesOptions(usage.Options):
343 Run this as 'catalog-shares NODEDIRS..', and it will emit a line to stdout
344 for each share it finds:
346 CHK $SI $k/$N $filesize $UEB_hash $expiration $abspath_sharefile
347 SDMF $SI $k/$N $filesize $seqnum/$roothash $expiration $abspath_sharefile
348 UNKNOWN $abspath_sharefile
350 It may be useful to build up a catalog of shares from many storage servers
351 and then sort the results. If you see shares with the same SI but different
352 parameters/filesize/UEB_hash, then something is wrong.
355 def parseArgs(self, *nodedirs):
356 self.nodedirs = nodedirs
358 def describe_share(abs_sharefile, si_s, shnum_s, now, out, err):
359 from allmydata import uri, storage, mutable
360 from allmydata.util import base32
363 f = open(abs_sharefile, "rb")
366 if prefix == storage.MutableShareFile.MAGIC:
368 m = storage.MutableShareFile(abs_sharefile)
369 WE, nodeid = m._read_write_enabler_and_nodeid(f)
370 num_extra_leases = m._read_num_extra_leases(f)
371 data_length = m._read_data_length(f)
372 extra_lease_offset = m._read_extra_lease_offset(f)
373 container_size = extra_lease_offset - m.DATA_OFFSET
374 leases = list(m._enumerate_leases(f))
375 expiration_time = min( [expiration_time
377 (ownerid, expiration_time, rs, cs, nodeid))
379 expiration = max(0, expiration_time - now)
381 share_type = "unknown"
382 f.seek(m.DATA_OFFSET)
383 if f.read(1) == "\x00":
384 # this slot contains an SMDF share
387 if share_type == "SDMF":
388 f.seek(m.DATA_OFFSET)
389 data = f.read(min(data_length, 2000))
392 pieces = mutable.unpack_share(data)
393 except mutable.NeedMoreDataError, e:
394 # retry once with the larger size
395 size = e.needed_bytes
396 f.seek(m.DATA_OFFSET)
397 data = f.read(min(data_length, size))
398 pieces = mutable.unpack_share(data)
399 (seqnum, root_hash, IV, k, N, segsize, datalen,
400 pubkey, signature, share_hash_chain, block_hash_tree,
401 share_data, enc_privkey) = pieces
403 print >>out, "SDMF %s %d/%d %d #%d:%s %d %s" % \
404 (si_s, k, N, datalen,
405 seqnum, base32.b2a(root_hash),
406 expiration, abs_sharefile)
408 print >>out, "UNKNOWN mutable %s" % (abs_sharefile,)
410 elif struct.unpack(">L", prefix[:4]) == (1,):
413 sf = storage.ShareFile(abs_sharefile)
414 # use a ReadBucketProxy to parse the bucket and find the uri extension
415 bp = storage.ReadBucketProxy(None)
416 offsets = bp._parse_offsets(sf.read_share_data(0, 0x24))
417 seek = offsets['uri_extension']
418 length = struct.unpack(">L", sf.read_share_data(seek, 4))[0]
420 UEB_data = sf.read_share_data(seek, length)
421 expiration_time = min( [expiration_time
422 for (ownerid, rs, cs, expiration_time)
423 in sf.iter_leases()] )
424 expiration = max(0, expiration_time - now)
426 unpacked = uri.unpack_extension_readable(UEB_data)
427 k = unpacked["needed_shares"]
428 N = unpacked["total_shares"]
429 filesize = unpacked["size"]
430 ueb_hash = unpacked["UEB_hash"]
432 print >>out, "CHK %s %d/%d %d %s %d %s" % (si_s, k, N, filesize,
433 ueb_hash, expiration,
437 print >>out, "UNKNOWN really-unknown %s" % (abs_sharefile,)
442 def catalog_shares(config, out=sys.stdout, err=sys.stderr):
444 for d in config.nodedirs:
445 d = os.path.join(os.path.expanduser(d), "storage/shares")
446 if os.path.exists(d):
447 for abbrevdir in os.listdir(d):
448 abbrevdir = os.path.join(d, abbrevdir)
449 for si_s in os.listdir(abbrevdir):
450 si_dir = os.path.join(abbrevdir, si_s)
451 for shnum_s in os.listdir(si_dir):
452 abs_sharefile = os.path.join(si_dir, shnum_s)
453 abs_sharefile = os.path.abspath(abs_sharefile)
454 assert os.path.isfile(abs_sharefile)
455 describe_share(abs_sharefile, si_s, shnum_s, now,
462 ["dump-share", None, DumpOptions,
463 "Unpack and display the contents of a share (uri_extension and leases)."],
464 ["dump-cap", None, DumpCapOptions, "Unpack a read-cap or write-cap"],
465 ["find-shares", None, FindSharesOptions, "Locate sharefiles in node dirs"],
466 ["catalog-shares", None, CatalogSharesOptions, "Describe shares in node dirs"],
470 "dump-share": dump_share,
471 "dump-cap": dump_cap,
472 "find-shares": find_shares,
473 "catalog-shares": catalog_shares,