expirer: fix prediction math, thanks to Zandr for the catch
authorBrian Warner <warner@allmydata.com>
Mon, 9 Mar 2009 20:42:17 +0000 (13:42 -0700)
committerBrian Warner <warner@allmydata.com>
Mon, 9 Mar 2009 20:42:17 +0000 (13:42 -0700)
src/allmydata/storage/expirer.py
src/allmydata/test/test_storage.py

index 5769cb64c2d64fad82d19cd6b0cb6e0528cf1bbf..9a7d13c154f2326ef09ee0d4cb6b80ee43750723 100644 (file)
@@ -354,7 +354,8 @@ class LeaseCheckingCrawler(ShareCrawler):
         cycle = {"space-recovered": cycle_sr}
 
         if progress["cycle-complete-percentage"] > 0.0:
-            m = 100.0 / progress["cycle-complete-percentage"]
+            pc = progress["cycle-complete-percentage"] / 100.0
+            m = (1-pc)/pc
             for a in ("actual", "original-leasetimer", "configured-leasetimer"):
                 for b in ("numbuckets", "numshares", "sharebytes", "diskbytes"):
                     k = a+"-"+b
index 66f2d4ae8782aef891da1f5071f388c6af8cf991..286efe1158179c0e264c451272a559ecc3ee224f 100644 (file)
@@ -1812,15 +1812,15 @@ class LeaseCrawler(unittest.TestCase, pollmixin.PollMixin, WebRenderingMixin):
         def _check_html_in_cycle(html):
             s = remove_tags(html)
             # the first bucket encountered gets deleted, and its prefix
-            # happens to be about 1/6th of the way through the ring, so the
-            # predictor thinks we'll have 6 shares and that we'll delete them
+            # happens to be about 1/5th of the way through the ring, so the
+            # predictor thinks we'll have 5 shares and that we'll delete them
             # all. This part of the test depends upon the SIs landing right
             # where they do now.
             self.failUnlessIn("The remainder of this cycle is expected to "
-                              "recover: 5 shares, 5 buckets", s)
+                              "recover: 4 shares, 4 buckets", s)
             self.failUnlessIn("The whole cycle is expected to examine "
-                              "6 shares in 6 buckets and to recover: "
-                              "6 shares, 6 buckets", s)
+                              "5 shares in 5 buckets and to recover: "
+                              "5 shares, 5 buckets", s)
         d.addCallback(_check_html_in_cycle)
 
         # wait for the crawler to finish the first cycle. Two shares should