class ICodecDecoder(Interface):
def set_params(data_size, required_shares, max_shares):
- """Set the params. They have to be exactly the same ones that were used for encoding. """
+ """Set the params. They have to be exactly the same ones that were
+ used for encoding."""
def get_needed_shares():
"""Return the number of shares needed to reconstruct the data.
set_encrypted_uploadable() and set_shareholders() must be called
before this can be invoked.
- This returns a Deferred that fires with a verify cap when the upload process is
- complete. The verifycap, plus the encryption key, is sufficient to construct the read
- cap.
+ This returns a Deferred that fires with a verify cap when the upload
+ process is complete. The verifycap, plus the encryption key, is
+ sufficient to construct the read cap.
"""
class IDecoder(Interface):
called. Whatever it returns will be returned to the invoker of
Downloader.download.
"""
- # The following methods are just because that target might be a repairer.DownUpConnector,
- # and just because the current CHKUpload object expects to find the storage index and
- # encoding parameters in its Uploadable.
+ # The following methods are just because that target might be a
+ # repairer.DownUpConnector, and just because the current CHKUpload object
+ # expects to find the storage index and encoding parameters in its
+ # Uploadable.
def set_storageindex(storageindex):
""" Set the storage index. """
def set_encodingparams(encodingparams):
plaintext hashes, but don't need the redundant encrypted data)."""
def get_plaintext_hashtree_leaves(first, last, num_segments):
- """OBSOLETE; Get the leaf nodes of a merkle hash tree over the plaintext
- segments, i.e. get the tagged hashes of the given segments. The
- segment size is expected to be generated by the IEncryptedUploadable
- before any plaintext is read or ciphertext produced, so that the
- segment hashes can be generated with only a single pass.
+ """OBSOLETE; Get the leaf nodes of a merkle hash tree over the
+ plaintext segments, i.e. get the tagged hashes of the given segments.
+ The segment size is expected to be generated by the
+ IEncryptedUploadable before any plaintext is read or ciphertext
+ produced, so that the segment hashes can be generated with only a
+ single pass.
This returns a Deferred which fires with a sequence of hashes, using:
.file_size : the size of the file, in bytes
.servers_used : set of server peerids that were used during download
.server_problems : dict mapping server peerid to a problem string. Only
- servers that had problems (bad hashes, disconnects) are
- listed here.
+ servers that had problems (bad hashes, disconnects)
+ are listed here.
.servermap : dict mapping server peerid to a set of share numbers. Only
servers that had any shares are listed here.
.timings : dict of timing information, mapping name to seconds (float)
def get_data():
- """Return a dictionary that describes the state of the file/dir. LIT
- files always return an empty dictionary. Normal files and directories return a
- dictionary with the following keys (note that these use binary strings rather than
- base32-encoded ones) (also note that for mutable files, these counts are for the 'best'
- version):
+ """Return a dictionary that describes the state of the file/dir. LIT
+ files always return an empty dictionary. Normal files and directories
+ return a dictionary with the following keys (note that these use
+ binary strings rather than base32-encoded ones) (also note that for
+ mutable files, these counts are for the 'best' version):
count-shares-good: the number of distinct good shares that were found
count-shares-needed: 'k', the number of shares required for recovery
that was found to be corrupt. Each share
locator is a list of (serverid, storage_index,
sharenum).
- count-incompatible-shares: the number of shares which are of a share format unknown to
- this checker
- list-incompatible-shares: a list of 'share locators', one for each share that was found
- to be of an unknown format. Each share locator is a list of
- (serverid, storage_index, sharenum).
+ count-incompatible-shares: the number of shares which are of a share
+ format unknown to this checker
+ list-incompatible-shares: a list of 'share locators', one for each
+ share that was found to be of an unknown
+ format. Each share locator is a list of
+ (serverid, storage_index, sharenum).
servers-responding: list of (binary) storage server identifiers,
one for each server which responded to the share
- query (even if they said they didn't have shares,
- and even if they said they did have shares but then
- didn't send them when asked, or dropped the
- connection, or returned a Failure, and even if they
- said they did have shares and sent incorrect ones
- when asked)
+ query (even if they said they didn't have
+ shares, and even if they said they did have
+ shares but then didn't send them when asked, or
+ dropped the connection, or returned a Failure,
+ and even if they said they did have shares and
+ sent incorrect ones when asked)
sharemap: dict mapping share identifier to list of serverids
(binary strings). This indicates which servers are holding
which shares. For immutable files, the shareid is an
@param uri: a string or IURI-providing instance. This could be for a
LiteralFileNode, a CHK file node, a mutable file node, or
a directory node
- @return: an instance that provides IFilesystemNode (or more usefully one
- of its subclasses). File-specifying URIs will result in
+ @return: an instance that provides IFilesystemNode (or more usefully
+ one of its subclasses). File-specifying URIs will result in
IFileNode or IMutableFileNode -providing instances, like
FileNode, LiteralFileNode, or MutableFileNode.
Directory-specifying URIs will result in
storage servers.
"""
- def upload_from_file_to_uri(filename=str, convergence=ChoiceOf(None, StringConstraint(2**20))):
+ def upload_from_file_to_uri(filename=str,
+ convergence=ChoiceOf(None,
+ StringConstraint(2**20))):
"""Upload a file to the grid. This accepts a filename (which must be
absolute) that points to a file on the node's local disk. The node will
read the contents of this file, upload it to the grid, then return the
def get_stats():
"""
- returns a dictionary containing 'counters' and 'stats', each a dictionary
- with string counter/stat name keys, and numeric values. counters are
- monotonically increasing measures of work done, and stats are instantaneous
- measures (potentially time averaged internally)
+ returns a dictionary containing 'counters' and 'stats', each a
+ dictionary with string counter/stat name keys, and numeric values.
+ counters are monotonically increasing measures of work done, and
+ stats are instantaneous measures (potentially time averaged
+ internally)
"""
return DictOf(str, DictOf(str, ChoiceOf(float, int, long)))
class IValidatedThingProxy(Interface):
def start():
- """ Acquire a thing and validate it. Return a deferred which is eventually fired with
- self if the thing is valid or errbacked if it can't be acquired or validated. """
+ """ Acquire a thing and validate it. Return a deferred which is
+ eventually fired with self if the thing is valid or errbacked if it
+ can't be acquired or validated."""
class InsufficientVersionError(Exception):
def __init__(self, needed, got):