CheckResults: replace get_data() with as_dict(), use getters in web status

This commit is contained in:
Brian Warner
2012-05-25 00:14:08 -07:00
parent 4867dca3f0
commit 8daacbcf69
5 changed files with 32 additions and 36 deletions

View File

@ -109,7 +109,7 @@ class CheckResults:
def get_sharemap(self): def get_sharemap(self):
return self._data["sharemap"] return self._data["sharemap"]
def get_data(self): def as_dict(self):
return self._data return self._data
def get_summary(self): def get_summary(self):

View File

@ -280,7 +280,7 @@ class DeepCheckWebGood(DeepCheckBase, unittest.TestCase):
needs_rebalancing = bool( num_servers < 10 ) needs_rebalancing = bool( num_servers < 10 )
if not incomplete: if not incomplete:
self.failUnlessEqual(cr.needs_rebalancing(), needs_rebalancing, self.failUnlessEqual(cr.needs_rebalancing(), needs_rebalancing,
str((where, cr, cr.get_data()))) str((where, cr, cr.as_dict())))
self.failUnlessEqual(cr.get_share_counter_good(), 10, where) self.failUnlessEqual(cr.get_share_counter_good(), 10, where)
self.failUnlessEqual(cr.get_encoding_needed(), 3, where) self.failUnlessEqual(cr.get_encoding_needed(), 3, where)
self.failUnlessEqual(cr.get_encoding_expected(), 10, where) self.failUnlessEqual(cr.get_encoding_expected(), 10, where)

View File

@ -117,7 +117,7 @@ class Verifier(GridTestMixin, unittest.TestCase, RepairTestMixin):
judgement(vr) judgement(vr)
except unittest.FailTest, e: except unittest.FailTest, e:
# FailTest just uses e.args[0] == str # FailTest just uses e.args[0] == str
new_arg = str(e.args[0]) + "\nvr.data is: " + str(vr.get_data()) new_arg = str(e.args[0]) + "\nvr.data is: " + str(vr.as_dict())
e.args = (new_arg,) e.args = (new_arg,)
raise raise
d.addCallback(_check) d.addCallback(_check)
@ -127,7 +127,7 @@ class Verifier(GridTestMixin, unittest.TestCase, RepairTestMixin):
""" Verify says the file is healthy when none of the shares have been """ Verify says the file is healthy when none of the shares have been
touched in a way that matters. It doesn't use more than seven times touched in a way that matters. It doesn't use more than seven times
as many reads as it needs.""" as many reads as it needs."""
self.failUnless(vr.is_healthy(), (vr, vr.is_healthy(), vr.get_data())) self.failUnless(vr.is_healthy(), (vr, vr.is_healthy(), vr.as_dict()))
self.failUnlessEqual(vr.get_share_counter_good(), 10) self.failUnlessEqual(vr.get_share_counter_good(), 10)
self.failUnlessEqual(len(vr.get_sharemap()), 10) self.failUnlessEqual(len(vr.get_sharemap()), 10)
self.failUnlessEqual(vr.get_encoding_needed(), 3) self.failUnlessEqual(vr.get_encoding_needed(), 3)
@ -162,7 +162,7 @@ class Verifier(GridTestMixin, unittest.TestCase, RepairTestMixin):
giving you the share data. Test that verifier handles these answers giving you the share data. Test that verifier handles these answers
correctly. It doesn't use more than seven times as many reads as it correctly. It doesn't use more than seven times as many reads as it
needs.""" needs."""
self.failIf(vr.is_healthy(), (vr, vr.is_healthy(), vr.get_data())) self.failIf(vr.is_healthy(), (vr, vr.is_healthy(), vr.as_dict()))
self.failUnlessEqual(vr.get_share_counter_good(), 9) self.failUnlessEqual(vr.get_share_counter_good(), 9)
self.failUnlessEqual(len(vr.get_sharemap()), 9) self.failUnlessEqual(len(vr.get_sharemap()), 9)
self.failUnlessEqual(vr.get_encoding_needed(), 3) self.failUnlessEqual(vr.get_encoding_needed(), 3)
@ -182,7 +182,7 @@ class Verifier(GridTestMixin, unittest.TestCase, RepairTestMixin):
# offsets) to something larger than 2 will trigger a # offsets) to something larger than 2 will trigger a
# ShareVersionIncompatible exception, which should be counted in # ShareVersionIncompatible exception, which should be counted in
# list-incompatible-shares, rather than list-corrupt-shares. # list-incompatible-shares, rather than list-corrupt-shares.
self.failIf(vr.is_healthy(), (vr, vr.is_healthy(), vr.get_data())) self.failIf(vr.is_healthy(), (vr, vr.is_healthy(), vr.as_dict()))
self.failUnlessEqual(vr.get_share_counter_good(), 9) self.failUnlessEqual(vr.get_share_counter_good(), 9)
self.failUnlessEqual(len(vr.get_sharemap()), 9) self.failUnlessEqual(len(vr.get_sharemap()), 9)
self.failUnlessEqual(vr.get_encoding_needed(), 3) self.failUnlessEqual(vr.get_encoding_needed(), 3)
@ -201,7 +201,7 @@ class Verifier(GridTestMixin, unittest.TestCase, RepairTestMixin):
# corruption of fields that the server does not check (which is most # corruption of fields that the server does not check (which is most
# of them), which will be detected by the client as it downloads # of them), which will be detected by the client as it downloads
# those shares. # those shares.
self.failIf(vr.is_healthy(), (vr, vr.is_healthy(), vr.get_data())) self.failIf(vr.is_healthy(), (vr, vr.is_healthy(), vr.as_dict()))
self.failUnlessEqual(vr.get_share_counter_good(), 9) self.failUnlessEqual(vr.get_share_counter_good(), 9)
self.failUnlessEqual(vr.get_encoding_needed(), 3) self.failUnlessEqual(vr.get_encoding_needed(), 3)
self.failUnlessEqual(vr.get_encoding_expected(), 10) self.failUnlessEqual(vr.get_encoding_expected(), 10)
@ -490,7 +490,7 @@ class Repairer(GridTestMixin, unittest.TestCase, RepairTestMixin,
self.failIfBigger(delta_reads, MAX_DELTA_READS) self.failIfBigger(delta_reads, MAX_DELTA_READS)
self.failIfBigger(delta_allocates, (DELTA_WRITES_PER_SHARE * 7)) self.failIfBigger(delta_allocates, (DELTA_WRITES_PER_SHARE * 7))
self.failIf(pre.is_healthy()) self.failIf(pre.is_healthy())
self.failUnless(post.is_healthy(), post.get_data()) self.failUnless(post.is_healthy(), post.as_dict())
# Make sure we really have 10 shares. # Make sure we really have 10 shares.
shares = self.find_uri_shares(self.uri) shares = self.find_uri_shares(self.uri)

View File

@ -885,9 +885,8 @@ class EncodingParameters(GridTestMixin, unittest.TestCase, SetDEPMixin,
m = monitor.Monitor() m = monitor.Monitor()
d.addCallback(lambda fn: fn.check(m)) d.addCallback(lambda fn: fn.check(m))
def _check(cr): def _check(cr):
data = cr.get_data() self.failUnlessEqual(cr.get_encoding_needed(), 7)
self.failUnlessEqual(data["count-shares-needed"], 7) self.failUnlessEqual(cr.get_encoding_expected(), 12)
self.failUnlessEqual(data["count-shares-expected"], 12)
d.addCallback(_check) d.addCallback(_check)
return d return d

View File

@ -8,30 +8,27 @@ from allmydata.web.operations import ReloadMixin
from allmydata.interfaces import ICheckAndRepairResults, ICheckResults from allmydata.interfaces import ICheckAndRepairResults, ICheckResults
from allmydata.util import base32, idlib from allmydata.util import base32, idlib
def json_check_counts(d): def json_check_counts(r):
r = {} d = {"count-shares-good": r.get_share_counter_good(),
r["count-shares-good"] = d["count-shares-good"] "count-shares-needed": r.get_encoding_needed(),
r["count-shares-needed"] = d["count-shares-needed"] "count-shares-expected": r.get_encoding_expected(),
r["count-shares-expected"] = d["count-shares-expected"] "count-good-share-hosts": r.get_host_counter_good_shares(),
r["count-good-share-hosts"] = d["count-good-share-hosts"] "count-corrupt-shares": len(r.get_corrupt_shares()),
r["count-corrupt-shares"] = d["count-corrupt-shares"] "list-corrupt-shares": [ (idlib.nodeid_b2a(serverid),
r["list-corrupt-shares"] = [ (idlib.nodeid_b2a(serverid), base32.b2a(si), shnum)
base32.b2a(si), shnum) for (serverid, si, shnum)
for (serverid, si, shnum) in r.get_corrupt_shares() ],
in d["list-corrupt-shares"] ] "servers-responding": [idlib.nodeid_b2a(serverid)
r["servers-responding"] = [idlib.nodeid_b2a(serverid) for serverid in r.get_servers_responding()],
for serverid in d["servers-responding"]] "sharemap": dict([(shareid, [idlib.nodeid_b2a(serverid)
sharemap = {} for serverid in serverids])
for (shareid, serverids) in d["sharemap"].items(): for (shareid, serverids)
sharemap[shareid] = [idlib.nodeid_b2a(serverid) in r.get_sharemap().items()]),
for serverid in serverids] "count-wrong-shares": r.get_share_counter_wrong(),
r["sharemap"] = sharemap "count-recoverable-versions": r.get_version_counter_recoverable(),
"count-unrecoverable-versions": r.get_version_counter_unrecoverable(),
r["count-wrong-shares"] = d["count-wrong-shares"] }
r["count-recoverable-versions"] = d["count-recoverable-versions"] return d
r["count-unrecoverable-versions"] = d["count-unrecoverable-versions"]
return r
def json_check_results(r): def json_check_results(r):
if r is None: if r is None:
@ -43,7 +40,7 @@ def json_check_results(r):
data = {} data = {}
data["storage-index"] = r.get_storage_index_string() data["storage-index"] = r.get_storage_index_string()
data["summary"] = r.get_summary() data["summary"] = r.get_summary()
data["results"] = json_check_counts(r.get_data()) data["results"] = json_check_counts(r)
data["results"]["needs-rebalancing"] = r.needs_rebalancing() data["results"]["needs-rebalancing"] = r.needs_rebalancing()
data["results"]["healthy"] = r.is_healthy() data["results"]["healthy"] = r.is_healthy()
data["results"]["recoverable"] = r.is_recoverable() data["results"]["recoverable"] = r.is_recoverable()