From: Brian Warner Date: Mon, 9 Mar 2009 20:42:17 +0000 (-0700) Subject: expirer: fix prediction math, thanks to Zandr for the catch X-Git-Tag: allmydata-tahoe-1.4.0~69 X-Git-Url: https://git.rkrishnan.org/pf/content/en/footer/statistics?a=commitdiff_plain;h=1ccd426a34f3f9f06ab21e6849136566e037a487;p=tahoe-lafs%2Ftahoe-lafs.git expirer: fix prediction math, thanks to Zandr for the catch --- diff --git a/src/allmydata/storage/expirer.py b/src/allmydata/storage/expirer.py index 5769cb64..9a7d13c1 100644 --- a/src/allmydata/storage/expirer.py +++ b/src/allmydata/storage/expirer.py @@ -354,7 +354,8 @@ class LeaseCheckingCrawler(ShareCrawler): cycle = {"space-recovered": cycle_sr} if progress["cycle-complete-percentage"] > 0.0: - m = 100.0 / progress["cycle-complete-percentage"] + pc = progress["cycle-complete-percentage"] / 100.0 + m = (1-pc)/pc for a in ("actual", "original-leasetimer", "configured-leasetimer"): for b in ("numbuckets", "numshares", "sharebytes", "diskbytes"): k = a+"-"+b diff --git a/src/allmydata/test/test_storage.py b/src/allmydata/test/test_storage.py index 66f2d4ae..286efe11 100644 --- a/src/allmydata/test/test_storage.py +++ b/src/allmydata/test/test_storage.py @@ -1812,15 +1812,15 @@ class LeaseCrawler(unittest.TestCase, pollmixin.PollMixin, WebRenderingMixin): def _check_html_in_cycle(html): s = remove_tags(html) # the first bucket encountered gets deleted, and its prefix - # happens to be about 1/6th of the way through the ring, so the - # predictor thinks we'll have 6 shares and that we'll delete them + # happens to be about 1/5th of the way through the ring, so the + # predictor thinks we'll have 5 shares and that we'll delete them # all. This part of the test depends upon the SIs landing right # where they do now. self.failUnlessIn("The remainder of this cycle is expected to " - "recover: 5 shares, 5 buckets", s) + "recover: 4 shares, 4 buckets", s) self.failUnlessIn("The whole cycle is expected to examine " - "6 shares in 6 buckets and to recover: " - "6 shares, 6 buckets", s) + "5 shares in 5 buckets and to recover: " + "5 shares, 5 buckets", s) d.addCallback(_check_html_in_cycle) # wait for the crawler to finish the first cycle. Two shares should