From: Brian Warner Date: Thu, 19 Feb 2009 21:16:54 +0000 (-0700) Subject: test_crawler: disable the percentage-of-cpu-used test, since it is too unreliable... X-Git-Tag: allmydata-tahoe-1.4.0~179 X-Git-Url: https://git.rkrishnan.org/README.win32?a=commitdiff_plain;h=ff9811211d45eb1384eca00d047daba33a1c9a41;p=tahoe-lafs%2Ftahoe-lafs.git test_crawler: disable the percentage-of-cpu-used test, since it is too unreliable on our slow buildslaves. But leave the code in place for developers to run by hand. --- diff --git a/src/allmydata/test/test_crawler.py b/src/allmydata/test/test_crawler.py index a0a21cbb..26e03689 100644 --- a/src/allmydata/test/test_crawler.py +++ b/src/allmydata/test/test_crawler.py @@ -256,7 +256,13 @@ class Basic(unittest.TestCase, StallMixin, pollmixin.PollMixin): d.addCallback(_check) return d - def test_cpu_usage(self): + def OFF_test_cpu_usage(self): + # this test can't actually assert anything, because too many + # buildslave machines are slow. But on a fast developer machine, it + # can produce interesting results. So if you care about how well the + # Crawler is accomplishing it's run-slowly goals, re-enable this test + # and read the stdout when it runs. + self.basedir = "crawler/Basic/cpu_usage" fileutil.make_dirs(self.basedir) serverid = "\x00" * 20 @@ -286,23 +292,17 @@ class Basic(unittest.TestCase, StallMixin, pollmixin.PollMixin): # seconds), the overhead is enough to make a nominal 50% usage more # like 30%. Forcing sleep_time to 0 only gets us 67% usage. - # the windows/cygwin buildslaves, which are slow (even by windows - # standards) and have low-resolution timers, get more like 7% usage. - # On windows I'll extend the allowable range. - - min_ok = 20 - min_cycles = 1 - if "cygwin" in sys.platform.lower() or "win32" in sys.platform.lower(): - min_ok = 3 - min_cycles = 0 - start = time.time() d = self.stall(delay=4.0) def _done(res): elapsed = time.time() - start percent = 100.0 * c.accumulated / elapsed - self.failUnless(min_ok < percent < 70, "crawler got %d%%" % percent) - self.failUnless(c.cycles >= min_cycles, c.cycles) + # our buildslaves vary too much in their speeds and load levels, + # and many of them only manage to hit 7% usage when our target is + # 50%. So don't assert anything about the results, just log them. + print + print "crawler: got %d%% percent when trying for 50%%" % percent + print "crawler: got %d full cycles" % c.cycles d.addCallback(_done) return d