]> git.rkrishnan.org Git - tahoe-lafs/tahoe-lafs.git/commitdiff
switch UploadResults to use getters, hide internal data, for all but .uri
authorBrian Warner <warner@lothar.com>
Tue, 22 May 2012 04:14:28 +0000 (21:14 -0700)
committerBrian Warner <warner@lothar.com>
Tue, 22 May 2012 04:14:28 +0000 (21:14 -0700)
This hides attributes with e.g. _sharemap, and creates getters like
get_sharemap() to access them, for every field except .uri . This will
make it easier to modify the internal representation of .sharemap
without requiring callers to adjust quite yet.

".uri" has so many users that it seemed better to update it in a
subsequent patch.

src/allmydata/immutable/filenode.py
src/allmydata/immutable/offloaded.py
src/allmydata/immutable/upload.py
src/allmydata/interfaces.py
src/allmydata/test/test_system.py
src/allmydata/test/test_upload.py
src/allmydata/web/status.py

index c7fa82f6a4193bff7bdb702b3807f49ec7953fc5..e159be033329637ff30a557b63806395541df9d2 100644 (file)
@@ -117,9 +117,9 @@ class CiphertextFileNode:
 
                     sm = prr.data['sharemap']
                     assert isinstance(sm, DictOfSets), sm
-                    sm.update(ur.sharemap)
+                    sm.update(ur.get_sharemap())
                     servers_responding = set(prr.data['servers-responding'])
-                    for shnum, serverids in ur.sharemap.items():
+                    for shnum, serverids in ur.get_sharemap().items():
                         servers_responding.update(serverids)
                     servers_responding = sorted(servers_responding)
                     prr.data['servers-responding'] = servers_responding
index 8faf516abd6513dbcf8c3e24314c795f1215ab61..428cb1ce20c8f826cb32c1f3c76e4efd679248f7 100644 (file)
@@ -201,26 +201,27 @@ class CHKUploadHelper(Referenceable, upload.CHKUploader):
         return self._finished_observers.when_fired()
 
     def _finished(self, ur):
-        precondition(isinstance(ur.verifycapstr, str), ur.verifycapstr)
         assert interfaces.IUploadResults.providedBy(ur), ur
-        v = uri.from_string(ur.verifycapstr)
+        vcapstr = ur.get_verifycapstr()
+        precondition(isinstance(vcapstr, str), vcapstr)
+        v = uri.from_string(vcapstr)
         f_times = self._fetcher.get_times()
 
         hur = upload.HelperUploadResults()
         hur.timings = {"cumulative_fetch": f_times["cumulative_fetch"],
                        "total_fetch": f_times["total"],
                        }
-        for k in ur.timings:
-            hur.timings[k] = ur.timings[k]
+        for key,val in ur.get_timings().items():
+            hur.timings[key] = val
         hur.uri_extension_hash = v.uri_extension_hash
         hur.ciphertext_fetched = self._fetcher.get_ciphertext_fetched()
-        hur.preexisting_shares = ur.preexisting_shares
-        hur.sharemap = ur.sharemap
-        hur.servermap = ur.servermap
-        hur.pushed_shares = ur.pushed_shares
-        hur.file_size = ur.file_size
-        hur.uri_extension_data = ur.uri_extension_data
-        hur.verifycapstr = ur.verifycapstr
+        hur.preexisting_shares = ur.get_preexisting_shares()
+        hur.sharemap = ur.get_sharemap()
+        hur.servermap = ur.get_servermap()
+        hur.pushed_shares = ur.get_pushed_shares()
+        hur.file_size = ur.get_file_size()
+        hur.uri_extension_data = ur.get_uri_extension_data()
+        hur.verifycapstr = vcapstr
 
         self._reader.close()
         os.unlink(self._encoding_file)
index ccab2aabac673f92707312aa6b5eb39b366f8436..bfd613454daa305c8172b6273793c8db8d9fb696 100644 (file)
@@ -70,21 +70,39 @@ class UploadResults:
                  uri_extension_data,
                  uri_extension_hash,
                  verifycapstr):
-        self.file_size = file_size
-        self.ciphertext_fetched = ciphertext_fetched
-        self.preexisting_shares = preexisting_shares
-        self.pushed_shares = pushed_shares
-        self.sharemap = sharemap
-        self.servermap = servermap
-        self.timings = timings
-        self.uri_extension_data = uri_extension_data
-        self.uri_extension_hash = uri_extension_hash
-        self.verifycapstr = verifycapstr
+        self._file_size = file_size
+        self._ciphertext_fetched = ciphertext_fetched
+        self._preexisting_shares = preexisting_shares
+        self._pushed_shares = pushed_shares
+        self._sharemap = sharemap
+        self._servermap = servermap
+        self._timings = timings
+        self._uri_extension_data = uri_extension_data
+        self._uri_extension_hash = uri_extension_hash
+        self._verifycapstr = verifycapstr
         self.uri = None
 
     def set_uri(self, uri):
         self.uri = uri
 
+    def get_file_size(self):
+        return self._file_size
+    def get_ciphertext_fetched(self):
+        return self._ciphertext_fetched
+    def get_preexisting_shares(self):
+        return self._preexisting_shares
+    def get_pushed_shares(self):
+        return self._pushed_shares
+    def get_sharemap(self):
+        return self._sharemap
+    def get_servermap(self):
+        return self._servermap
+    def get_timings(self):
+        return self._timings
+    def get_uri_extension_data(self):
+        return self._uri_extension_data
+    def get_verifycapstr(self):
+        return self._verifycapstr
 
 # our current uri_extension is 846 bytes for small files, a few bytes
 # more for larger ones (since the filesize is encoded in decimal in a
@@ -1552,7 +1570,7 @@ class Uploader(service.MultiService, log.PrefixingLogMixin):
                     # Generate the uri from the verifycap plus the key.
                     d3 = uploadable.get_encryption_key()
                     def put_readcap_into_results(key):
-                        v = uri.from_string(uploadresults.verifycapstr)
+                        v = uri.from_string(uploadresults.get_verifycapstr())
                         r = uri.CHKFileURI(key, v.uri_extension_hash, v.needed_shares, v.total_shares, v.size)
                         uploadresults.set_uri(r.to_string())
                         return uploadresults
index 5f6f8388f77add84f586742df3b4842242f648ef..bac3df275235f649e568dcc15b2cf891c9ae8847 100644 (file)
@@ -1927,37 +1927,54 @@ class IMutableUploadable(Interface):
         """
 
 class IUploadResults(Interface):
-    """I am returned by upload() methods. I contain a number of public
-    attributes which can be read to determine the results of the upload. Some
-    of these are functional, some are timing information. All of these may be
-    None.
+    """I am returned by immutable upload() methods and contain the results of
+    the upload.
 
-     .file_size : the size of the file, in bytes
+    I contain one public attribute:
      .uri : the CHK read-cap for the file
-     .ciphertext_fetched : how many bytes were fetched by the helper
-     .sharemap: dict mapping share identifier to set of serverids
-                   (binary strings). This indicates which servers were given
-                   which shares. For immutable files, the shareid is an
-                   integer (the share number, from 0 to N-1). For mutable
-                   files, it is a string of the form 'seq%d-%s-sh%d',
-                   containing the sequence number, the roothash, and the
-                   share number.
-     .servermap : dict mapping server peerid to a set of share numbers
-     .timings : dict of timing information, mapping name to seconds (float)
-       total : total upload time, start to finish
-       storage_index : time to compute the storage index
-       peer_selection : time to decide which peers will be used
-       contacting_helper : initial helper query to upload/no-upload decision
-       helper_total : initial helper query to helper finished pushing
-       cumulative_fetch : helper waiting for ciphertext requests
-       total_fetch : helper start to last ciphertext response
-       cumulative_encoding : just time spent in zfec
-       cumulative_sending : just time spent waiting for storage servers
-       hashes_and_close : last segment push to shareholder close
-       total_encode_and_push : first encode to shareholder close
-
     """
 
+    # some methods return empty values (0 or an empty dict) when called for
+    # non-distributed LIT files
+    def get_file_size():
+        """Return the file size, in bytes."""
+    def get_ciphertext_fetched():
+        """Return the number of bytes fetched by the helpe for this upload,
+        or 0 if the helper did not need to fetch any bytes (or if there was
+        no helper)."""
+    def get_preexisting_shares():
+        """Return the number of shares that were already present in the grid."""
+    def get_pushed_shares():
+        """Return the number of shares that were uploaded."""
+    def get_sharemap():
+        """Return a dict mapping share identifier to set of serverids (binary
+        strings). This indicates which servers were given which shares. For
+        immutable files, the shareid is an integer (the share number, from 0
+        to N-1). For mutable files, it is a string of the form
+        'seq%d-%s-sh%d', containing the sequence number, the roothash, and
+        the share number."""
+    def get_servermap():
+        """Return dict mapping server peerid to a set of share numbers."""
+    def get_timings():
+        """Return dict of timing information, mapping name to seconds. All
+        times are floats:
+          total : total upload time, start to finish
+          storage_index : time to compute the storage index
+          peer_selection : time to decide which peers will be used
+          contacting_helper : initial helper query to upload/no-upload decision
+          helper_total : initial helper query to helper finished pushing
+          cumulative_fetch : helper waiting for ciphertext requests
+          total_fetch : helper start to last ciphertext response
+          cumulative_encoding : just time spent in zfec
+          cumulative_sending : just time spent waiting for storage servers
+          hashes_and_close : last segment push to shareholder close
+          total_encode_and_push : first encode to shareholder close
+        """
+    def get_uri_extension_data():
+        """Return the dict of UEB data created for this file."""
+    def get_verifycapstr():
+        """Return the (string) verify-cap URI for the uploaded object."""
+
 class IDownloadResults(Interface):
     """I am created internally by download() methods. I contain a number of
     public attributes which contain details about the download process.::
index 23bf1aa28b64f6138dd59d86578ebd3d70bdf6f0..4a8e40617b8fbbeaa5325b6cb56284a4a204d38f 100644 (file)
@@ -326,7 +326,7 @@ class SystemTest(SystemTestMixin, RunBinTahoeMixin, unittest.TestCase):
 
                 # this is really bytes received rather than sent, but it's
                 # convenient and basically measures the same thing
-                bytes_sent = results.ciphertext_fetched
+                bytes_sent = results.get_ciphertext_fetched()
                 self.failUnless(isinstance(bytes_sent, (int, long)), bytes_sent)
 
                 # We currently don't support resumption of upload if the data is
index cf050b7850ed85f39c5517191a26ecd8c26405b1..99116de421aeebae53ef4533a91afa2dd83ce910 100644 (file)
@@ -1161,7 +1161,7 @@ class EncodingParameters(GridTestMixin, unittest.TestCase, SetDEPMixin,
         # Make sure that only as many shares as necessary to satisfy
         # servers of happiness were pushed.
         d.addCallback(lambda results:
-            self.failUnlessEqual(results.pushed_shares, 3))
+            self.failUnlessEqual(results.get_pushed_shares(), 3))
         d.addCallback(lambda ign:
             self.failUnless(self._has_happy_share_distribution()))
         return d
index a1df1029bf0d014ca253330221e712de1506d45f..a51e5bae976decd33fd0cc81a4b11044ee032ac3 100644 (file)
@@ -22,17 +22,17 @@ class UploadResultsRendererMixin(RateAndTimeMixin):
 
     def render_pushed_shares(self, ctx, data):
         d = self.upload_results()
-        d.addCallback(lambda res: res.pushed_shares)
+        d.addCallback(lambda res: res.get_pushed_shares())
         return d
 
     def render_preexisting_shares(self, ctx, data):
         d = self.upload_results()
-        d.addCallback(lambda res: res.preexisting_shares)
+        d.addCallback(lambda res: res.get_preexisting_shares())
         return d
 
     def render_sharemap(self, ctx, data):
         d = self.upload_results()
-        d.addCallback(lambda res: res.sharemap)
+        d.addCallback(lambda res: res.get_sharemap())
         def _render(sharemap):
             if sharemap is None:
                 return "None"
@@ -46,7 +46,7 @@ class UploadResultsRendererMixin(RateAndTimeMixin):
 
     def render_servermap(self, ctx, data):
         d = self.upload_results()
-        d.addCallback(lambda res: res.servermap)
+        d.addCallback(lambda res: res.get_servermap())
         def _render(servermap):
             if servermap is None:
                 return "None"
@@ -64,12 +64,12 @@ class UploadResultsRendererMixin(RateAndTimeMixin):
 
     def data_file_size(self, ctx, data):
         d = self.upload_results()
-        d.addCallback(lambda res: res.file_size)
+        d.addCallback(lambda res: res.get_file_size())
         return d
 
     def _get_time(self, name):
         d = self.upload_results()
-        d.addCallback(lambda res: res.timings.get(name))
+        d.addCallback(lambda res: res.get_timings().get(name))
         return d
 
     def data_time_total(self, ctx, data):
@@ -105,8 +105,8 @@ class UploadResultsRendererMixin(RateAndTimeMixin):
     def _get_rate(self, name):
         d = self.upload_results()
         def _convert(r):
-            file_size = r.file_size
-            time = r.timings.get(name)
+            file_size = r.get_file_size()
+            time = r.get_timings().get(name)
             return compute_rate(file_size, time)
         d.addCallback(_convert)
         return d
@@ -126,9 +126,9 @@ class UploadResultsRendererMixin(RateAndTimeMixin):
     def data_rate_encode_and_push(self, ctx, data):
         d = self.upload_results()
         def _convert(r):
-            file_size = r.file_size
-            time1 = r.timings.get("cumulative_encoding")
-            time2 = r.timings.get("cumulative_sending")
+            file_size = r.get_file_size()
+            time1 = r.get_timings().get("cumulative_encoding")
+            time2 = r.get_timings().get("cumulative_sending")
             if (time1 is None or time2 is None):
                 return None
             else:
@@ -139,8 +139,8 @@ class UploadResultsRendererMixin(RateAndTimeMixin):
     def data_rate_ciphertext_fetch(self, ctx, data):
         d = self.upload_results()
         def _convert(r):
-            fetch_size = r.ciphertext_fetched
-            time = r.timings.get("cumulative_fetch")
+            fetch_size = r.get_ciphertext_fetched()
+            time = r.get_timings().get("cumulative_fetch")
             return compute_rate(fetch_size, time)
         d.addCallback(_convert)
         return d