Number = IntegerConstraint(8) # 2**(8*8) == 16EiB ~= 18e18 ~= 18 exabytes
Offset = Number
ReadSize = int # the 'int' constraint is 2**31 == 2Gib -- large files are processed in not-so-large increments
-WriteEnablerSecret = Hash # used to protect mutable bucket modifications
-LeaseRenewSecret = Hash # used to protect bucket lease renewal requests
-LeaseCancelSecret = Hash # used to protect bucket lease cancellation requests
+WriteEnablerSecret = Hash # used to protect mutable share modifications
+LeaseRenewSecret = Hash # used to protect lease renewal requests
+LeaseCancelSecret = Hash # was used to protect lease cancellation requests
+
class RIBucketWriter(RemoteInterface):
""" Objects of this kind live on the server side. """
This secret is generated by the client and
stored for later comparison by the server. Each
server is given a different secret.
- @param cancel_secret: Like renew_secret, but protects bucket decref.
+ @param cancel_secret: This no longer allows lease cancellation, but
+ must still be a unique value identifying the
+ lease. XXX stop relying on it to be unique.
@param canary: If the canary is lost before close(), the bucket is
deleted.
@return: tuple of (alreadygot, allocated), where alreadygot is what we
For mutable shares, if the given renew_secret does not match an
existing lease, IndexError will be raised with a note listing the
server-nodeids on the existing leases, so leases on migrated shares
- can be renewed or cancelled. For immutable shares, IndexError
- (without the note) will be raised.
+ can be renewed. For immutable shares, IndexError (without the note)
+ will be raised.
"""
return Any()
This secret is generated by the client and
stored for later comparison by the server. Each
server is given a different secret.
- @param cancel_secret: Like renew_secret, but protects bucket decref.
+ @param cancel_secret: This no longer allows lease cancellation, but
+ must still be a unique value identifying the
+ lease. XXX stop relying on it to be unique.
The 'secrets' argument is a tuple of (write_enabler, renew_secret,
cancel_secret). The first is required to perform any write. The
"""
The interface for a writer around a mutable slot on a remote server.
"""
- def set_checkstring(checkstring, *args):
+ def set_checkstring(seqnum_or_checkstring, root_hash=None, salt=None):
"""
Set the checkstring that I will pass to the remote server when
writing.
writer-visible data using this writekey.
"""
- # TODO: Can this be overwrite instead of replace?
- def replace(new_contents):
+ def overwrite(new_contents):
"""Replace the contents of the mutable file, provided that no other
node has published (or is attempting to publish, concurrently) a
newer version of the file than this one.
is a file, or if must_be_file is True and the child is a directory,
I raise ChildOfWrongTypeError."""
- def create_subdirectory(name, initial_children={}, overwrite=True, metadata=None):
+ def create_subdirectory(name, initial_children={}, overwrite=True,
+ mutable=True, mutable_version=None, metadata=None):
"""I create and attach a directory at the given name. The new
directory can be empty, or it can be populated with children
according to 'initial_children', which takes a dictionary in the same
"""Specify the number of bytes that will be encoded. This must be
peformed before get_serialized_params() can be called.
"""
- def set_params(params):
- """Override the default encoding parameters. 'params' is a tuple of
- (k,d,n), where 'k' is the number of required shares, 'd' is the
- servers_of_happiness, and 'n' is the total number of shares that will
- be created.
-
- Encoding parameters can be set in three ways. 1: The Encoder class
- provides defaults (3/7/10). 2: the Encoder can be constructed with
- an 'options' dictionary, in which the
- needed_and_happy_and_total_shares' key can be a (k,d,n) tuple. 3:
- set_params((k,d,n)) can be called.
-
- If you intend to use set_params(), you must call it before
- get_share_size or get_param are called.
- """
def set_encrypted_uploadable(u):
"""Provide a source of encrypted upload data. 'u' must implement
resuming an interrupted upload (where we need to compute the
plaintext hashes, but don't need the redundant encrypted data)."""
- def get_plaintext_hashtree_leaves(first, last, num_segments):
- """OBSOLETE; Get the leaf nodes of a merkle hash tree over the
- plaintext segments, i.e. get the tagged hashes of the given segments.
- The segment size is expected to be generated by the
- IEncryptedUploadable before any plaintext is read or ciphertext
- produced, so that the segment hashes can be generated with only a
- single pass.
-
- This returns a Deferred which fires with a sequence of hashes, using:
-
- tuple(segment_hashes[first:last])
-
- 'num_segments' is used to assert that the number of segments that the
- IEncryptedUploadable handled matches the number of segments that the
- encoder was expecting.
-
- This method must not be called until the final byte has been read
- from read_encrypted(). Once this method is called, read_encrypted()
- can never be called again.
- """
-
- def get_plaintext_hash():
- """OBSOLETE; Get the hash of the whole plaintext.
-
- This returns a Deferred which fires with a tagged SHA-256 hash of the
- whole plaintext, obtained from hashutil.plaintext_hash(data).
- """
-
def close():
"""Just like IUploadable.close()."""
returns a Deferred that fires with an IUploadResults instance, from
which the URI of the file can be obtained as results.uri ."""
- def upload_ssk(write_capability, new_version, uploadable):
- """TODO: how should this work?"""
class ICheckable(Interface):
def check(monitor, verify=False, add_lease=False):
not. Unrecoverable files are obviously unhealthy. Non-distributed LIT
files always return True."""
- def needs_rebalancing():
- """Return a boolean, True if the file/dirs reliability could be
- improved by moving shares to new servers. Non-distributed LIT files
- always return False."""
-
# the following methods all return None for non-distributed LIT files
+ def get_happiness():
+ """Return the happiness count of the file."""
+
def get_encoding_needed():
- """Return 'k', the number of shares required for recovery"""
+ """Return 'k', the number of shares required for recovery."""
def get_encoding_expected():
- """Return 'N', the number of total shares generated"""
+ """Return 'N', the number of total shares generated."""
def get_share_counter_good():
"""Return the number of distinct good shares that were found. For
Tahoe process will typically have a single NodeMaker, but unit tests may
create simplified/mocked forms for testing purposes.
"""
- def create_from_cap(writecap, readcap=None, **kwargs):
+
+ def create_from_cap(writecap, readcap=None, deep_immutable=False, name=u"<unknown name>"):
"""I create an IFilesystemNode from the given writecap/readcap. I can
only provide nodes for existing file/directory objects: use my other
methods to create new objects. I return synchronously."""
class BadWriteEnablerError(Exception):
pass
-class RIControlClient(RemoteInterface):
+class RIControlClient(RemoteInterface):
def wait_for_client_connections(num_clients=int):
"""Do not return until we have connections to at least NUM_CLIENTS
storage servers.
"""
- def upload_from_file_to_uri(filename=str,
- convergence=ChoiceOf(None,
- StringConstraint(2**20))):
- """Upload a file to the grid. This accepts a filename (which must be
- absolute) that points to a file on the node's local disk. The node will
- read the contents of this file, upload it to the grid, then return the
- URI at which it was uploaded. If convergence is None then a random
- encryption key will be used, else the plaintext will be hashed, then
- that hash will be mixed together with the "convergence" string to form
- the encryption key.
- """
- return URI
+ # debug stuff
- def download_from_uri_to_file(uri=URI, filename=str):
- """Download a file from the grid, placing it on the node's local disk
- at the given filename (which must be absolute[?]). Returns the
- absolute filename where the file was written."""
+ def upload_random_data_from_file(size=int, convergence=str):
return str
- # debug stuff
+ def download_to_tempfile_and_delete(uri=str):
+ return None
def get_memory_usage():
"""Return a dict describes the amount of memory currently in use. The