crawler: fix problems on windows and our slow cygwin slave
authorBrian Warner <warner@lothar.com>
Thu, 19 Feb 2009 05:24:31 +0000 (22:24 -0700)
committerBrian Warner <warner@lothar.com>
Thu, 19 Feb 2009 05:24:31 +0000 (22:24 -0700)
src/allmydata/storage/crawler.py
src/allmydata/test/test_crawler.py

index 071f20608a5ba677436edffcf8be4d8a767c761c..e79ba94ddca1001e7ddc44951678509a2719cca6 100644 (file)
@@ -1,5 +1,5 @@
 
-import os, time, struct, pickle
+import sys, os, time, struct, pickle
 from twisted.internet import reactor
 from twisted.application import service
 from allmydata.storage.server import si_b2a
@@ -89,6 +89,9 @@ class ShareCrawler(service.MultiService):
         f = open(tmpfile, "wb")
         pickle.dump(state, f)
         f.close()
+        if "win32" in sys.platform.lower():
+            # sigh, stupid windows can't use this technique
+            os.unlink(self.statefile)
         os.rename(tmpfile, self.statefile)
 
     def startService(self):
@@ -181,3 +184,4 @@ class ShareCrawler(service.MultiService):
 
     def yielding(self, sleep_time):
         pass
+
index dfee79a3415ebd08016fd2d8ce86f9eb2697826b..b9e737b164e24b476b3b3d288f432dc5b75f4ed0 100644 (file)
@@ -1,5 +1,6 @@
 
 import time
+import sys
 import os.path
 from twisted.trial import unittest
 from twisted.application import service
@@ -285,15 +286,20 @@ class Basic(unittest.TestCase, StallMixin, pollmixin.PollMixin):
         # seconds), the overhead is enough to make a nominal 50% usage more
         # like 30%. Forcing sleep_time to 0 only gets us 67% usage.
 
-        # who knows what will happen on our slower buildslaves. I'll ditch
-        # the cycles>1 test first.
+        # the windows/cygwin buildslaves, which are slow (even by windows
+        # standards) and have low-resolution timers, get more like 7% usage.
+        # On windows I'll extend the allowable range.
+
+        min_ok = 20
+        if "cygwin" in sys.platform.lower() or "win32" in sys.platform.lower():
+            min_ok = 3
 
         start = time.time()
         d = self.stall(delay=4.0)
         def _done(res):
             elapsed = time.time() - start
             percent = 100.0 * c.accumulated / elapsed
-            self.failUnless(20 < percent < 70, "crawler got %d%%" % percent)
+            self.failUnless(min_ok < percent < 70, "crawler got %d%%" % percent)
             self.failUnless(c.cycles >= 1, c.cycles)
         d.addCallback(_done)
         return d
@@ -318,3 +324,4 @@ class Basic(unittest.TestCase, StallMixin, pollmixin.PollMixin):
             return c.first_cycle_finished
         d = self.poll(_check)
         return d
+