2 # do not import any allmydata modules at this level. Do that from inside
3 # individual functions instead.
4 import sys, struct, time, os
5 from twisted.python import usage
7 class DumpOptions(usage.Options):
9 ["filename", "f", None, "which file to dump"],
12 def parseArgs(self, filename=None):
14 self['filename'] = filename
16 def postOptions(self):
17 if not self['filename']:
18 raise usage.UsageError("<filename> parameter is required")
20 def dump_share(config, out=sys.stdout, err=sys.stderr):
21 from allmydata import uri, storage
23 # check the version, to see if we have a mutable or immutable share
24 f = open(config['filename'], "rb")
27 if prefix == storage.MutableShareFile.MAGIC:
28 return dump_mutable_share(config, out, err)
29 # otherwise assume it's immutable
30 f = storage.ShareFile(config['filename'])
31 # use a ReadBucketProxy to parse the bucket and find the uri extension
32 bp = storage.ReadBucketProxy(None)
33 offsets = bp._parse_offsets(f.read_share_data(0, 0x24))
34 seek = offsets['uri_extension']
35 length = struct.unpack(">L", f.read_share_data(seek, 4))[0]
37 data = f.read_share_data(seek, length)
39 unpacked = uri.unpack_extension_readable(data)
40 keys1 = ("size", "num_segments", "segment_size",
41 "needed_shares", "total_shares")
42 keys2 = ("codec_name", "codec_params", "tail_codec_params")
43 keys3 = ("plaintext_hash", "plaintext_root_hash",
44 "crypttext_hash", "crypttext_root_hash",
46 display_keys = {"size": "file_size"}
49 dk = display_keys.get(k, k)
50 print >>out, "%19s: %s" % (dk, unpacked[k])
54 dk = display_keys.get(k, k)
55 print >>out, "%19s: %s" % (dk, unpacked[k])
59 dk = display_keys.get(k, k)
60 print >>out, "%19s: %s" % (dk, unpacked[k])
62 leftover = set(unpacked.keys()) - set(keys1 + keys2 + keys3)
65 print >>out, "LEFTOVER:"
66 for k in sorted(leftover):
67 print >>out, "%s: %s" % (k, unpacked[k])
70 sizes['data'] = bp._data_size
71 sizes['validation'] = (offsets['uri_extension'] -
72 offsets['plaintext_hash_tree'])
73 sizes['uri-extension'] = len(data)
75 print >>out, "Size of data within the share:"
76 for k in sorted(sizes):
77 print >>out, "%19s: %s" % (k, sizes[k])
79 # display lease information too
80 leases = list(f.iter_leases())
82 for i,lease in enumerate(leases):
83 (owner_num, renew_secret, cancel_secret, expiration_time) = lease
84 when = format_expiration_time(expiration_time)
85 print >>out, "Lease #%d: owner=%d, expire in %s" % (i, owner_num,
88 print >>out, "No leases."
93 def format_expiration_time(expiration_time):
95 remains = expiration_time - now
96 when = "%ds" % remains
98 when += " (%d days)" % (remains / (24*3600))
100 when += " (%d hours)" % (remains / 3600)
104 def dump_mutable_share(config, out, err):
105 from allmydata import storage
106 from allmydata.util import idlib
107 m = storage.MutableShareFile(config['filename'])
108 f = open(config['filename'], "rb")
109 WE, nodeid = m._read_write_enabler_and_nodeid(f)
110 num_extra_leases = m._read_num_extra_leases(f)
111 data_length = m._read_data_length(f)
112 extra_lease_offset = m._read_extra_lease_offset(f)
113 container_size = extra_lease_offset - m.DATA_OFFSET
114 leases = list(m._enumerate_leases(f))
116 share_type = "unknown"
117 f.seek(m.DATA_OFFSET)
118 if f.read(1) == "\x00":
119 # this slot contains an SMDF share
124 print >>out, "Mutable slot found:"
125 print >>out, " share_type: %s" % share_type
126 print >>out, " write_enabler: %s" % idlib.b2a(WE)
127 print >>out, " WE for nodeid: %s" % idlib.nodeid_b2a(nodeid)
128 print >>out, " num_extra_leases: %d" % num_extra_leases
129 print >>out, " container_size: %d" % container_size
130 print >>out, " data_length: %d" % data_length
132 for (leasenum, (oid,et,rs,cs,anid)) in leases:
134 print >>out, " Lease #%d:" % leasenum
135 print >>out, " ownerid: %d" % oid
136 when = format_expiration_time(et)
137 print >>out, " expires in %s" % when
138 print >>out, " renew_secret: %s" % idlib.b2a(rs)
139 print >>out, " cancel_secret: %s" % idlib.b2a(cs)
140 print >>out, " secrets are for nodeid: %s" % idlib.nodeid_b2a(anid)
142 print >>out, "No leases."
145 if share_type == "SDMF":
146 dump_SDMF_share(m.DATA_OFFSET, data_length, config, out, err)
150 def dump_SDMF_share(offset, length, config, out, err):
151 from allmydata import mutable
152 from allmydata.util import idlib
154 f = open(config['filename'], "rb")
156 data = f.read(min(length, 2000))
160 pieces = mutable.unpack_share(data)
161 except mutable.NeedMoreDataError, e:
162 # retry once with the larger size
163 size = e.needed_bytes
164 f = open(config['filename'], "rb")
166 data = f.read(min(length, size))
168 pieces = mutable.unpack_share(data)
170 (seqnum, root_hash, IV, k, N, segsize, datalen,
171 pubkey, signature, share_hash_chain, block_hash_tree,
172 share_data, enc_privkey) = pieces
174 print >>out, " SDMF contents:"
175 print >>out, " seqnum: %d" % seqnum
176 print >>out, " root_hash: %s" % idlib.b2a(root_hash)
177 print >>out, " IV: %s" % idlib.b2a(IV)
178 print >>out, " required_shares: %d" % k
179 print >>out, " total_shares: %d" % N
180 print >>out, " segsize: %d" % segsize
181 print >>out, " datalen: %d" % datalen
182 share_hash_ids = ",".join(sorted([str(hid)
183 for hid in share_hash_chain.keys()]))
184 print >>out, " share_hash_chain: %s" % share_hash_ids
185 print >>out, " block_hash_tree: %d nodes" % len(block_hash_tree)
191 class DumpCapOptions(usage.Options):
193 ["nodeid", "n", None, "storage server nodeid (ascii), to construct WE and secrets."],
194 ["client-secret", "c", None, "client's base secret (ascii), to construct secrets"],
195 ["client-dir", "d", None, "client's base directory, from which a -c secret will be read"],
197 def parseArgs(self, cap):
200 def dump_cap(config, out=sys.stdout, err=sys.stderr):
201 from allmydata import uri
202 from allmydata.util.idlib import a2b
203 from base64 import b32decode
206 u = uri.from_string(cap)
209 nodeid = b32decode(config['nodeid'].upper())
211 if config['client-secret']:
212 secret = a2b(config['client-secret'])
213 elif config['client-dir']:
214 secretfile = os.path.join(config['client-dir'], "private", "secret")
216 secret = a2b(open(secretfile, "r").read().strip())
217 except EnvironmentError:
221 dump_uri_instance(u, nodeid, secret, out, err)
223 def _dump_secrets(storage_index, secret, nodeid, out):
224 from allmydata.util import hashutil
225 from allmydata.util.idlib import b2a
228 crs = hashutil.my_renewal_secret_hash(secret)
229 print >>out, " client renewal secret:", b2a(crs)
230 frs = hashutil.file_renewal_secret_hash(crs, storage_index)
231 print >>out, " file renewal secret:", b2a(frs)
233 renew = hashutil.bucket_renewal_secret_hash(frs, nodeid)
234 print >>out, " lease renewal secret:", b2a(renew)
235 ccs = hashutil.my_cancel_secret_hash(secret)
236 print >>out, " client cancel secret:", b2a(ccs)
237 fcs = hashutil.file_cancel_secret_hash(ccs, storage_index)
238 print >>out, " file cancel secret:", b2a(fcs)
240 cancel = hashutil.bucket_cancel_secret_hash(fcs, nodeid)
241 print >>out, " lease cancel secret:", b2a(cancel)
243 def dump_uri_instance(u, nodeid, secret, out, err, show_header=True):
244 from allmydata import uri
245 from allmydata.util.idlib import b2a
246 from allmydata.util import hashutil
248 if isinstance(u, uri.CHKFileURI):
250 print >>out, "CHK File:"
251 print >>out, " key:", b2a(u.key)
252 print >>out, " UEB hash:", b2a(u.uri_extension_hash)
253 print >>out, " size:", u.size
254 print >>out, " k/N: %d/%d" % (u.needed_shares, u.total_shares)
255 print >>out, " storage index:", b2a(u.storage_index)
256 _dump_secrets(u.storage_index, secret, nodeid, out)
257 elif isinstance(u, uri.CHKFileVerifierURI):
259 print >>out, "CHK Verifier URI:"
260 print >>out, " UEB hash:", b2a(u.uri_extension_hash)
261 print >>out, " size:", u.size
262 print >>out, " k/N: %d/%d" % (u.needed_shares, u.total_shares)
263 print >>out, " storage index:", b2a(u.storage_index)
265 elif isinstance(u, uri.LiteralFileURI):
267 print >>out, "Literal File URI:"
268 print >>out, " data:", u.data
270 elif isinstance(u, uri.WriteableSSKFileURI):
272 print >>out, "SSK Writeable URI:"
273 print >>out, " writekey:", b2a(u.writekey)
274 print >>out, " readkey:", b2a(u.readkey)
275 print >>out, " storage index:", b2a(u.storage_index)
276 print >>out, " fingerprint:", b2a(u.fingerprint)
279 we = hashutil.ssk_write_enabler_hash(u.writekey, nodeid)
280 print >>out, " write_enabler:", b2a(we)
282 _dump_secrets(u.storage_index, secret, nodeid, out)
284 elif isinstance(u, uri.ReadonlySSKFileURI):
286 print >>out, "SSK Read-only URI:"
287 print >>out, " readkey:", b2a(u.readkey)
288 print >>out, " storage index:", b2a(u.storage_index)
289 print >>out, " fingerprint:", b2a(u.fingerprint)
290 elif isinstance(u, uri.SSKVerifierURI):
292 print >>out, "SSK Verifier URI:"
293 print >>out, " storage index:", b2a(u.storage_index)
294 print >>out, " fingerprint:", b2a(u.fingerprint)
296 elif isinstance(u, uri.NewDirectoryURI):
298 print >>out, "Directory Writeable URI:"
299 dump_uri_instance(u._filenode_uri, nodeid, secret, out, err, False)
300 elif isinstance(u, uri.ReadonlyNewDirectoryURI):
302 print >>out, "Directory Read-only URI:"
303 dump_uri_instance(u._filenode_uri, nodeid, secret, out, err, False)
304 elif isinstance(u, uri.NewDirectoryURIVerifier):
306 print >>out, "Directory Verifier URI:"
307 dump_uri_instance(u._filenode_uri, nodeid, secret, out, err, False)
309 print >>out, "unknown cap type"
313 ["dump-share", None, DumpOptions,
314 "Unpack and display the contents of a share (uri_extension and leases)."],
315 ["dump-cap", None, DumpCapOptions, "Unpack a read-cap or write-cap"]
319 "dump-share": dump_share,
320 "dump-cap": dump_cap,