From: Daira Hopwood Date: Mon, 27 May 2013 20:40:19 +0000 (+0100) Subject: Accounting crawler: make share deletion conditional (defaulting to False for now... X-Git-Url: https://git.rkrishnan.org/pf/content/%22news.html/-?a=commitdiff_plain;h=b65a93fcd1b716ecc42d1db51052fa26505b94c6;p=tahoe-lafs%2Ftahoe-lafs.git Accounting crawler: make share deletion conditional (defaulting to False for now). refs #1987, #1921 Signed-off-by: Daira Hopwood --- diff --git a/src/allmydata/storage/accounting_crawler.py b/src/allmydata/storage/accounting_crawler.py index 5e8a5d40..9a280df2 100644 --- a/src/allmydata/storage/accounting_crawler.py +++ b/src/allmydata/storage/accounting_crawler.py @@ -34,6 +34,7 @@ class AccountingCrawler(ShareCrawler): def __init__(self, backend, statefile, leasedb, clock=None): ShareCrawler.__init__(self, backend, statefile, clock=clock) self._leasedb = leasedb + self._enable_share_deletion = False def process_prefix(self, cycle, prefix, start_slice): # Assume that we can list every prefixdir in this prefix quickly. @@ -113,7 +114,8 @@ class AccountingCrawler(ShareCrawler): (si_s, shnum) = shareid log.msg(format="share SI=%(si_s)s shnum=%(shnum)s unexpectedly disappeared", si_s=si_s, shnum=shnum, level=log.WEIRD) - self._leasedb.remove_deleted_share(si_a2b(si_s), shnum) + if self._enable_share_deletion: + self._leasedb.remove_deleted_share(si_a2b(si_s), shnum) recovered_sharesets = [set() for st in xrange(len(SHARETYPES))] @@ -148,9 +150,11 @@ class AccountingCrawler(ShareCrawler): d3.addCallbacks(_deleted, _not_deleted) return d3 - # This only includes stable unleased shares (see ticket #1921). - unleased_sharemap = self._leasedb.get_unleased_shares_for_prefix(prefix) - d2 = for_items(_delete_share, unleased_sharemap) + d2 = defer.succeed(None) + if self._enable_share_deletion: + # This only includes stable unleased shares (see ticket #1921). + unleased_sharemap = self._leasedb.get_unleased_shares_for_prefix(prefix) + d2.addCallback(lambda ign: for_items(_delete_share, unleased_sharemap)) def _inc_recovered_sharesets(ign): self.increment(rec, "actual-buckets", sum([len(s) for s in recovered_sharesets])) diff --git a/src/allmydata/test/test_storage.py b/src/allmydata/test/test_storage.py index 65c876fa..38e91f50 100644 --- a/src/allmydata/test/test_storage.py +++ b/src/allmydata/test/test_storage.py @@ -5067,8 +5067,13 @@ class AccountingCrawlerTest(CrawlerTestMixin, WebRenderingMixin, ReallyEqualMixi self.failUnlessEqual((len(aa.get_leases(si)), len(sa.get_leases(si))), expected) + def _skip_if_share_deletion_is_disabled(self, server): + if not server.get_accounting_crawler()._enable_share_deletion: + raise unittest.SkipTest("share deletion by the accounting crawler is disabled") + def test_expire_age(self): server = self.create("test_expire_age", detached=True) + self._skip_if_share_deletion_is_disabled(server) # setting expiration_time to 2000 means that any lease which is more # than 2000s old will be expired. @@ -5188,6 +5193,7 @@ class AccountingCrawlerTest(CrawlerTestMixin, WebRenderingMixin, ReallyEqualMixi def test_expire_cutoff_date(self): server = self.create("test_expire_cutoff_date", detached=True) + self._skip_if_share_deletion_is_disabled(server) # setting cutoff-date to 2000 seconds ago means that any lease which # is more than 2000s old will be expired.