test_crawler: disable the percentage-of-cpu-used test, since it is too unreliable on our slow buildslaves. But leave the code in place for developers to run by hand.

This commit is contained in:
Brian Warner 2009-02-19 14:16:54 -07:00
parent 790a10d1b2
commit ff9811211d

View File

@ -256,7 +256,13 @@ class Basic(unittest.TestCase, StallMixin, pollmixin.PollMixin):
d.addCallback(_check) d.addCallback(_check)
return d return d
def test_cpu_usage(self): def OFF_test_cpu_usage(self):
# this test can't actually assert anything, because too many
# buildslave machines are slow. But on a fast developer machine, it
# can produce interesting results. So if you care about how well the
# Crawler is accomplishing it's run-slowly goals, re-enable this test
# and read the stdout when it runs.
self.basedir = "crawler/Basic/cpu_usage" self.basedir = "crawler/Basic/cpu_usage"
fileutil.make_dirs(self.basedir) fileutil.make_dirs(self.basedir)
serverid = "\x00" * 20 serverid = "\x00" * 20
@ -286,23 +292,17 @@ class Basic(unittest.TestCase, StallMixin, pollmixin.PollMixin):
# seconds), the overhead is enough to make a nominal 50% usage more # seconds), the overhead is enough to make a nominal 50% usage more
# like 30%. Forcing sleep_time to 0 only gets us 67% usage. # like 30%. Forcing sleep_time to 0 only gets us 67% usage.
# the windows/cygwin buildslaves, which are slow (even by windows
# standards) and have low-resolution timers, get more like 7% usage.
# On windows I'll extend the allowable range.
min_ok = 20
min_cycles = 1
if "cygwin" in sys.platform.lower() or "win32" in sys.platform.lower():
min_ok = 3
min_cycles = 0
start = time.time() start = time.time()
d = self.stall(delay=4.0) d = self.stall(delay=4.0)
def _done(res): def _done(res):
elapsed = time.time() - start elapsed = time.time() - start
percent = 100.0 * c.accumulated / elapsed percent = 100.0 * c.accumulated / elapsed
self.failUnless(min_ok < percent < 70, "crawler got %d%%" % percent) # our buildslaves vary too much in their speeds and load levels,
self.failUnless(c.cycles >= min_cycles, c.cycles) # and many of them only manage to hit 7% usage when our target is
# 50%. So don't assert anything about the results, just log them.
print
print "crawler: got %d%% percent when trying for 50%%" % percent
print "crawler: got %d full cycles" % c.cycles
d.addCallback(_done) d.addCallback(_done)
return d return d