uploader = self.parent.getServiceNamed("uploader")
u = upload.FileName(filename, convergence=convergence)
d = uploader.upload(u)
- d.addCallback(lambda results: results.uri)
+ d.addCallback(lambda results: results.get_uri())
return d
def remote_download_from_uri_to_file(self, uri, filename):
else:
up = upload.FileName(fn, convergence=None)
d1 = self.parent.upload(up)
- d1.addCallback(lambda results: results.uri)
+ d1.addCallback(lambda results: results.get_uri())
d1.addCallback(_record_uri, i)
d1.addCallback(_upload_one_file, i+1)
return d1
return defer.fail(NotWriteableError())
d = self._uploader.upload(uploadable)
d.addCallback(lambda results:
- self._create_and_validate_node(results.uri, None, name))
+ self._create_and_validate_node(results.get_uri(), None,
+ name))
d.addCallback(lambda node:
self.set_node(name, node, metadata, overwrite))
return d
self._uri_extension_data = uri_extension_data
self._uri_extension_hash = uri_extension_hash
self._verifycapstr = verifycapstr
- self.uri = None
def set_uri(self, uri):
- self.uri = uri
+ self._uri = uri
def get_file_size(self):
return self._file_size
+ def get_uri(self):
+ return self._uri
def get_ciphertext_fetched(self):
return self._ciphertext_fetched
def get_preexisting_shares(self):
"""I am returned by immutable upload() methods and contain the results of
the upload.
- I contain one public attribute:
- .uri : the CHK read-cap for the file
- """
+ Note that some of my methods return empty values (0 or an empty dict)
+ when called for non-distributed LIT files."""
- # some methods return empty values (0 or an empty dict) when called for
- # non-distributed LIT files
def get_file_size():
"""Return the file size, in bytes."""
+ def get_uri():
+ """Return the (string) URI of the object uploaded, a CHK readcap."""
def get_ciphertext_fetched():
"""Return the number of bytes fetched by the helpe for this upload,
or 0 if the helper did not need to fetch any bytes (or if there was
packed = pack_children(children, None, deep_immutable=True)
uploadable = Data(packed, convergence)
d = self.uploader.upload(uploadable)
- d.addCallback(lambda results: self.create_from_cap(None, results.uri))
+ d.addCallback(lambda results:
+ self.create_from_cap(None, results.get_uri()))
d.addCallback(self._create_dirnode)
return d
DATA = "data" * 100
d = c0.upload(Data(DATA, convergence=""))
def _stash_immutable(ur):
- self.imm = c0.create_node_from_uri(ur.uri)
+ self.imm = c0.create_node_from_uri(ur.get_uri())
self.uri = self.imm.get_uri()
d.addCallback(_stash_immutable)
d.addCallback(lambda ign:
DATA = "data" * 100
d = c0.upload(Data(DATA, convergence=""))
def _stash_immutable(ur):
- self.imm = c0.create_node_from_uri(ur.uri)
+ self.imm = c0.create_node_from_uri(ur.get_uri())
d.addCallback(_stash_immutable)
d.addCallback(lambda ign:
c0.create_mutable_file(MutableData("contents")))
return self.c0.upload(Data(DATA, convergence=""))
d.addCallback(_start)
def _do_check(ur):
- n = self.c0.create_node_from_uri(ur.uri)
+ n = self.c0.create_node_from_uri(ur.get_uri())
return n.check(Monitor(), verify=True)
d.addCallback(_do_check)
def _check(cr):
DATA = "data" * 100
d = c0.upload(upload.Data(DATA, convergence=""))
def _stash_bad(ur):
- self.uri_1share = ur.uri
- self.delete_shares_numbered(ur.uri, range(1,10))
+ self.uri_1share = ur.get_uri()
+ self.delete_shares_numbered(ur.get_uri(), range(1,10))
d.addCallback(_stash_bad)
# the download is abandoned as soon as it's clear that we won't get
def _created_immutable(ur):
# write the generated shares and URI to a file, which can then be
# incorporated into this one next time.
- f.write('immutable_uri = "%s"\n' % ur.uri)
+ f.write('immutable_uri = "%s"\n' % ur.get_uri())
f.write('immutable_shares = {\n')
- si = uri.from_string(ur.uri).get_storage_index()
+ si = uri.from_string(ur.get_uri()).get_storage_index()
si_dir = storage_index_to_dir(si)
for (i,ss,ssdir) in self.iterate_servers():
sharedir = os.path.join(ssdir, "shares", si_dir)
u.max_segment_size = 70 # 5 segs
d = self.c0.upload(u)
def _uploaded(ur):
- self.uri = ur.uri
+ self.uri = ur.get_uri()
self.n = self.c0.create_node_from_uri(self.uri)
return download_to_data(self.n)
d.addCallback(_uploaded)
con2 = MemoryConsumer()
d = self.c0.upload(u)
def _uploaded(ur):
- n = self.c0.create_node_from_uri(ur.uri)
+ n = self.c0.create_node_from_uri(ur.get_uri())
d1 = n.read(con1, 70, 20)
d2 = n.read(con2, 140, 20)
return defer.gatherResults([d1,d2])
con2 = MemoryConsumer()
d = self.c0.upload(u)
def _uploaded(ur):
- n = self.c0.create_node_from_uri(ur.uri)
+ n = self.c0.create_node_from_uri(ur.get_uri())
n._cnode._maybe_create_download_node()
n._cnode._node._build_guessed_tables(u.max_segment_size)
d1 = n.read(con1, 70, 20)
con2 = MemoryConsumer()
d = self.c0.upload(u)
def _uploaded(ur):
- n = self.c0.create_node_from_uri(ur.uri)
+ n = self.c0.create_node_from_uri(ur.get_uri())
n._cnode._maybe_create_download_node()
n._cnode._node._build_guessed_tables(u.max_segment_size)
d = n.read(con1, 12000, 20)
def _corruptor(s, debug=False):
which = 48 # first byte of block0
return s[:which] + chr(ord(s[which])^0x01) + s[which+1:]
- self.corrupt_all_shares(ur.uri, _corruptor)
- n = self.c0.create_node_from_uri(ur.uri)
+ self.corrupt_all_shares(ur.get_uri(), _corruptor)
+ n = self.c0.create_node_from_uri(ur.get_uri())
n._cnode._maybe_create_download_node()
n._cnode._node._build_guessed_tables(u.max_segment_size)
con1 = MemoryConsumer()
def _corruptor(s, debug=False):
which = 48 # first byte of block0
return s[:which] + chr(ord(s[which])^0x01) + s[which+1:]
- self.corrupt_all_shares(ur.uri, _corruptor)
- n = self.c0.create_node_from_uri(ur.uri)
+ self.corrupt_all_shares(ur.get_uri(), _corruptor)
+ n = self.c0.create_node_from_uri(ur.get_uri())
n._cnode._maybe_create_download_node()
n._cnode._node._build_guessed_tables(u.max_segment_size)
con1 = MemoryConsumer()
u.max_segment_size = 60 # 6 segs
d = self.c0.upload(u)
def _uploaded(ur):
- n = self.c0.create_node_from_uri(ur.uri)
+ n = self.c0.create_node_from_uri(ur.get_uri())
n._cnode._maybe_create_download_node()
n._cnode._node._build_guessed_tables(u.max_segment_size)
con2 = MemoryConsumer()
d = self.c0.upload(u)
def _uploaded(ur):
- n = self.c0.create_node_from_uri(ur.uri)
+ n = self.c0.create_node_from_uri(ur.get_uri())
n._cnode._maybe_create_download_node()
n._cnode._node._build_guessed_tables(u.max_segment_size)
d1 = n.read(con1, 70, 20)
d = self.c0.upload(u)
def _uploaded(ur):
- imm_uri = ur.uri
+ imm_uri = ur.get_uri()
self.shares = self.copy_shares(imm_uri)
d = defer.succeed(None)
# 'victims' is a list of corruption tests to run. Each one flips
d = self.c0.upload(u)
def _uploaded(ur):
- imm_uri = ur.uri
+ imm_uri = ur.get_uri()
self.shares = self.copy_shares(imm_uri)
corrupt_me = [(48, "block data", "Last failure: None"),
u = upload.Data(plaintext, None)
d = self.c0.upload(u)
def _uploaded(ur):
- imm_uri = ur.uri
+ imm_uri = ur.get_uri()
n = self.c0.create_node_from_uri(imm_uri)
return download_to_data(n)
d.addCallback(_uploaded)
u = upload.Data(plaintext, None)
d = self.c0.upload(u)
def _uploaded(ur):
- imm_uri = ur.uri
+ imm_uri = ur.get_uri()
n = self.c0.create_node_from_uri(imm_uri)
return download_to_data(n)
d.addCallback(_uploaded)
u = upload.Data(plaintext, None)
d = self.c0.upload(u)
def _uploaded(ur):
- imm_uri = ur.uri
+ imm_uri = ur.get_uri()
def _do_corrupt(which, newvalue):
def _corruptor(s, debug=False):
return s[:which] + chr(newvalue) + s[which+1:]
u.encoding_param_happy = 1
u.encoding_param_n = 100
d = self.c0.upload(u)
- d.addCallback(lambda ur: self.c0.create_node_from_uri(ur.uri))
+ d.addCallback(lambda ur: self.c0.create_node_from_uri(ur.get_uri()))
# returns a FileNode
return d
return upload_data(u, DATA, convergence="some convergence string")
d.addCallback(_ready)
def _uploaded(results):
- the_uri = results.uri
+ the_uri = results.get_uri()
assert "CHK" in the_uri
d.addCallback(_uploaded)
return upload_data(u, DATA, convergence="test convergence string")
d.addCallback(_ready)
def _uploaded(results):
- the_uri = results.uri
+ the_uri = results.get_uri()
assert "CHK" in the_uri
d.addCallback(_uploaded)
return upload_data(u, DATA, convergence="some convergence string")
d.addCallback(_ready)
def _uploaded(results):
- the_uri = results.uri
+ the_uri = results.get_uri()
assert "CHK" in the_uri
d.addCallback(_uploaded)
data = upload.Data(immutable_plaintext, convergence="")
d = self.c0.upload(data)
def _uploaded_immutable(upload_res):
- self.uri = upload_res.uri
+ self.uri = upload_res.get_uri()
self.shares = self.find_uri_shares(self.uri)
d.addCallback(_uploaded_immutable)
return d
c1.DEFAULT_ENCODING_PARAMETERS['happy'] = 1
d = c1.upload(Data(TEST_DATA, convergence=""))
def _after_upload(ur):
- self.uri = ur.uri
- self.filenode = self.g.clients[0].create_node_from_uri(ur.uri)
+ self.uri = ur.get_uri()
+ self.filenode = self.g.clients[0].create_node_from_uri(ur.get_uri())
return self.uri
d.addCallback(_after_upload)
return d
data = Data(DATA, "")
d = c0.upload(data)
def _uploaded(res):
- n = c0.create_node_from_uri(res.uri)
+ n = c0.create_node_from_uri(res.get_uri())
return download_to_data(n)
d.addCallback(_uploaded)
def _check(res):
c0.DEFAULT_ENCODING_PARAMETERS['max_segment_size'] = 12
d = c0.upload(upload.Data(common.TEST_DATA, convergence=""))
def _stash_uri(ur):
- self.uri = ur.uri
- self.c0_filenode = c0.create_node_from_uri(ur.uri)
- self.c1_filenode = c1.create_node_from_uri(ur.uri)
+ self.uri = ur.get_uri()
+ self.c0_filenode = c0.create_node_from_uri(ur.get_uri())
+ self.c1_filenode = c1.create_node_from_uri(ur.get_uri())
d.addCallback(_stash_uri)
return d
c0.DEFAULT_ENCODING_PARAMETERS['n'] = 66
d = c0.upload(upload.Data(DATA, convergence=""))
def _then(ur):
- self.uri = ur.uri
+ self.uri = ur.get_uri()
self.delete_shares_numbered(self.uri, [0])
- self.c0_filenode = c0.create_node_from_uri(ur.uri)
+ self.c0_filenode = c0.create_node_from_uri(ur.get_uri())
self._stash_counts()
return self.c0_filenode.check_and_repair(Monitor())
d.addCallback(_then)
return d1
d.addCallback(_do_upload)
def _upload_done(results):
- theuri = results.uri
+ theuri = results.get_uri()
log.msg("upload finished: uri is %s" % (theuri,))
self.uri = theuri
assert isinstance(self.uri, str), self.uri
u = upload.Data(HELPER_DATA, convergence=convergence)
d = self.extra_node.upload(u)
def _uploaded(results):
- n = self.clients[1].create_node_from_uri(results.uri)
+ n = self.clients[1].create_node_from_uri(results.get_uri())
return download_to_data(n)
d.addCallback(_uploaded)
def _check(newdata):
u.debug_stash_RemoteEncryptedUploadable = True
d = self.extra_node.upload(u)
def _uploaded(results):
- n = self.clients[1].create_node_from_uri(results.uri)
+ n = self.clients[1].create_node_from_uri(results.get_uri())
return download_to_data(n)
d.addCallback(_uploaded)
def _check(newdata):
d.addCallback(lambda res: self.extra_node.upload(u2))
def _uploaded(results):
- cap = results.uri
+ cap = results.get_uri()
log.msg("Second upload complete", level=log.NOISY,
facility="tahoe.test.test_system")
MiB = 1024*1024
def extract_uri(results):
- return results.uri
+ return results.get_uri()
# Some of these took longer than 480 seconds on Zandr's arm box, but this may
# have been due to an earlier test ERROR'ing out due to timeout, which seems
self.data = data
d = client.upload(data)
def _store_uri(ur):
- self.uri = ur.uri
+ self.uri = ur.get_uri()
d.addCallback(_store_uri)
d.addCallback(lambda ign:
self.find_uri_shares(self.uri))
DATA = "data" * 100
u = upload.Data(DATA, convergence="")
d = c0.upload(u)
- d.addCallback(lambda ur: c0.create_node_from_uri(ur.uri))
+ d.addCallback(lambda ur: c0.create_node_from_uri(ur.get_uri()))
m = monitor.Monitor()
d.addCallback(lambda fn: fn.check(m))
def _check(cr):
DATA = "data" * 100
d = c0.upload(upload.Data(DATA, convergence=""))
def _stash_uri(ur, which):
- self.uris[which] = ur.uri
+ self.uris[which] = ur.get_uri()
d.addCallback(_stash_uri, "good")
d.addCallback(lambda ign:
c0.upload(upload.Data(DATA+"1", convergence="")))
DATA = "data" * 100
d = c0.upload(upload.Data(DATA, convergence=""))
def _stash_uri(ur, which):
- self.uris[which] = ur.uri
+ self.uris[which] = ur.get_uri()
d.addCallback(_stash_uri, "good")
d.addCallback(lambda ign:
c0.upload(upload.Data(DATA+"1", convergence="")))
DATA = "data" * 100
d = c0.upload(upload.Data(DATA+"1", convergence=""))
def _stash_uri(ur, which):
- self.uris[which] = ur.uri
+ self.uris[which] = ur.get_uri()
d.addCallback(_stash_uri, "sick")
def _compute_fileurls(ignored):
DATA = "data" * 100
d = c0.upload(upload.Data(DATA, convergence=""))
def _stash_uri(ur, which):
- self.uris[which] = ur.uri
+ self.uris[which] = ur.get_uri()
d.addCallback(_stash_uri, "one")
d.addCallback(lambda ign:
c0.upload(upload.Data(DATA+"1", convergence="")))
d.addCallback(_stash_root)
d.addCallback(lambda ign: c0.upload(upload.Data(DATA, convergence="")))
def _stash_bad(ur):
- self.fileurls["1share"] = "uri/" + urllib.quote(ur.uri)
- self.delete_shares_numbered(ur.uri, range(1,10))
+ self.fileurls["1share"] = "uri/" + urllib.quote(ur.get_uri())
+ self.delete_shares_numbered(ur.get_uri(), range(1,10))
- u = uri.from_string(ur.uri)
+ u = uri.from_string(ur.get_uri())
u.key = testutil.flip_bit(u.key, 0)
baduri = u.to_string()
self.fileurls["0shares"] = "uri/" + urllib.quote(baduri)
d = c0.upload(upload.Data(DATA, convergence=""))
def _stash_uri_and_create_dir(ur):
- self.uri = ur.uri
+ self.uri = ur.get_uri()
self.url = "uri/"+self.uri
u = uri.from_string_filenode(self.uri)
self.si = u.get_storage_index()
# "PUT /uri", to create an unlinked file.
uploadable = FileHandle(req.content, client.convergence)
d = client.upload(uploadable)
- d.addCallback(lambda results: results.uri)
+ d.addCallback(lambda results: results.get_uri())
# that fires with the URI of the new file
return d
# usual upload-results page
def _done(upload_results, redir_to):
if "%(uri)s" in redir_to:
- redir_to = redir_to % {"uri": urllib.quote(upload_results.uri)
+ redir_to = redir_to % {"uri": urllib.quote(upload_results.get_uri())
}
return url.URL.fromString(redir_to)
d.addCallback(_done, when_done)
def data_uri(self, ctx, data):
d = self.upload_results()
- d.addCallback(lambda res: res.uri)
+ d.addCallback(lambda res: res.get_uri())
return d
def render_download_link(self, ctx, data):
d = self.upload_results()
- d.addCallback(lambda res: T.a(href="/uri/" + urllib.quote(res.uri))
- ["/uri/" + res.uri])
+ d.addCallback(lambda res:
+ T.a(href="/uri/" + urllib.quote(res.get_uri()))
+ ["/uri/" + res.get_uri()])
return d
def POSTUnlinkedSSK(req, client, version):