return self._read(0, 0x44)
def _parse_offsets(self, data):
- precondition(len(data) >= 0x4)
+ precondition(len(data) >= 0x4, len(data))
self._offsets = {}
(version,) = struct.unpack(">L", data[0:4])
if version != 1 and version != 2:
self._status.set_progress(1, progress)
return cryptdata
-
def get_plaintext_hashtree_leaves(self, first, last, num_segments):
+ """OBSOLETE; Get the leaf nodes of a merkle hash tree over the
+ plaintext segments, i.e. get the tagged hashes of the given segments.
+ The segment size is expected to be generated by the
+ IEncryptedUploadable before any plaintext is read or ciphertext
+ produced, so that the segment hashes can be generated with only a
+ single pass.
+
+ This returns a Deferred that fires with a sequence of hashes, using:
+
+ tuple(segment_hashes[first:last])
+
+ 'num_segments' is used to assert that the number of segments that the
+ IEncryptedUploadable handled matches the number of segments that the
+ encoder was expecting.
+
+ This method must not be called until the final byte has been read
+ from read_encrypted(). Once this method is called, read_encrypted()
+ can never be called again.
+ """
# this is currently unused, but will live again when we fix #453
if len(self._plaintext_segment_hashes) < num_segments:
# close out the last one
return defer.succeed(tuple(self._plaintext_segment_hashes[first:last]))
def get_plaintext_hash(self):
+ """OBSOLETE; Get the hash of the whole plaintext.
+
+ This returns a Deferred that fires with a tagged SHA-256 hash of the
+ whole plaintext, obtained from hashutil.plaintext_hash(data).
+ """
+ # this is currently unused, but will live again when we fix #453
h = self._plaintext_hasher.digest()
return defer.succeed(h)
is empty, the metadata will be an empty dictionary.
"""
- def set_uri(name, writecap, readcap=None, metadata=None, overwrite=True):
+ def set_uri(name, writecap, readcap, metadata=None, overwrite=True):
"""I add a child (by writecap+readcap) at the specific name. I return
a Deferred that fires when the operation finishes. If overwrite= is
True, I will replace any existing child of the same name, otherwise
"""
return self._version[0] # verinfo[0] == the sequence number
+ def get_servermap(self):
+ return self._servermap
- # TODO: Terminology?
def get_writekey(self):
"""
I return a writekey or None if I don't have a writekey.
return (version, seqnum, root_hash, IV, k, N, segsize, datalen, o)
def unpack_share(data):
- assert len(data) >= HEADER_LENGTH
+ assert len(data) >= HEADER_LENGTH, len(data)
o = {}
(version,
seqnum,
-import os.path
+import os, re
+
from allmydata.util import base32
+
+# Share numbers match this regex:
+NUM_RE=re.compile("^[0-9]+$")
+
+PREFIX = re.compile("^[%s]{2}$" % (base32.z_base_32_alphabet,))
+
+
class DataTooLargeError(Exception):
pass
-class UnknownMutableContainerVersionError(Exception):
+
+class UnknownContainerVersionError(Exception):
+ pass
+
+class UnknownMutableContainerVersionError(UnknownContainerVersionError):
pass
-class UnknownImmutableContainerVersionError(Exception):
+
+class UnknownImmutableContainerVersionError(UnknownContainerVersionError):
pass
def data_nickname(self, ctx, storage):
return self.nickname
def data_nodeid(self, ctx, storage):
- return idlib.nodeid_b2a(self.storage.get_nodeid())
+ return idlib.nodeid_b2a(self.storage.get_serverid())
def render_storage_running(self, ctx, storage):
if storage: