From: Daira Hopwood Date: Mon, 27 May 2013 20:40:19 +0000 (+0100) Subject: Accounting crawler: make share deletion conditional (defaulting to False for now... X-Git-Url: https://git.rkrishnan.org/%5B/%5D%20/uri/reliability?a=commitdiff_plain;h=c23e01c2c07526fa3f0b1a7572581c63758701d4;p=tahoe-lafs%2Ftahoe-lafs.git Accounting crawler: make share deletion conditional (defaulting to False for now). refs #1987, #1921 Signed-off-by: Daira Hopwood --- diff --git a/src/allmydata/storage/accounting_crawler.py b/src/allmydata/storage/accounting_crawler.py index 5e8a5d40..9a280df2 100644 --- a/src/allmydata/storage/accounting_crawler.py +++ b/src/allmydata/storage/accounting_crawler.py @@ -34,6 +34,7 @@ class AccountingCrawler(ShareCrawler): def __init__(self, backend, statefile, leasedb, clock=None): ShareCrawler.__init__(self, backend, statefile, clock=clock) self._leasedb = leasedb + self._enable_share_deletion = False def process_prefix(self, cycle, prefix, start_slice): # Assume that we can list every prefixdir in this prefix quickly. @@ -113,7 +114,8 @@ class AccountingCrawler(ShareCrawler): (si_s, shnum) = shareid log.msg(format="share SI=%(si_s)s shnum=%(shnum)s unexpectedly disappeared", si_s=si_s, shnum=shnum, level=log.WEIRD) - self._leasedb.remove_deleted_share(si_a2b(si_s), shnum) + if self._enable_share_deletion: + self._leasedb.remove_deleted_share(si_a2b(si_s), shnum) recovered_sharesets = [set() for st in xrange(len(SHARETYPES))] @@ -148,9 +150,11 @@ class AccountingCrawler(ShareCrawler): d3.addCallbacks(_deleted, _not_deleted) return d3 - # This only includes stable unleased shares (see ticket #1921). - unleased_sharemap = self._leasedb.get_unleased_shares_for_prefix(prefix) - d2 = for_items(_delete_share, unleased_sharemap) + d2 = defer.succeed(None) + if self._enable_share_deletion: + # This only includes stable unleased shares (see ticket #1921). + unleased_sharemap = self._leasedb.get_unleased_shares_for_prefix(prefix) + d2.addCallback(lambda ign: for_items(_delete_share, unleased_sharemap)) def _inc_recovered_sharesets(ign): self.increment(rec, "actual-buckets", sum([len(s) for s in recovered_sharesets])) diff --git a/src/allmydata/test/test_storage.py b/src/allmydata/test/test_storage.py index a0c42612..f75fe3a8 100644 --- a/src/allmydata/test/test_storage.py +++ b/src/allmydata/test/test_storage.py @@ -5061,8 +5061,13 @@ class AccountingCrawlerTest(CrawlerTestMixin, WebRenderingMixin, ReallyEqualMixi self.failUnlessEqual((len(aa.get_leases(si)), len(sa.get_leases(si))), expected) + def _skip_if_share_deletion_is_disabled(self, server): + if not server.get_accounting_crawler()._enable_share_deletion: + raise unittest.SkipTest("share deletion by the accounting crawler is disabled") + def test_expire_age(self): server = self.create("test_expire_age", detached=True) + self._skip_if_share_deletion_is_disabled(server) # setting expiration_time to 2000 means that any lease which is more # than 2000s old will be expired. @@ -5182,6 +5187,7 @@ class AccountingCrawlerTest(CrawlerTestMixin, WebRenderingMixin, ReallyEqualMixi def test_expire_cutoff_date(self): server = self.create("test_expire_cutoff_date", detached=True) + self._skip_if_share_deletion_is_disabled(server) # setting cutoff-date to 2000 seconds ago means that any lease which # is more than 2000s old will be expired.