mirror of
https://github.com/tahoe-lafs/tahoe-lafs.git
synced 2025-01-19 03:06:33 +00:00
naming: finish renaming "CheckerResults" to "CheckResults"
This commit is contained in:
parent
167742c2b3
commit
ef60e85ec6
@ -4,7 +4,7 @@ from allmydata.interfaces import ICheckResults, ICheckAndRepairResults, \
|
||||
IDeepCheckResults, IDeepCheckAndRepairResults, IURI
|
||||
from allmydata.util import base32
|
||||
|
||||
class CheckerResults:
|
||||
class CheckResults:
|
||||
implements(ICheckResults)
|
||||
|
||||
def __init__(self, uri, storage_index):
|
||||
|
@ -1,6 +1,6 @@
|
||||
from foolscap import DeadReferenceError
|
||||
from allmydata import hashtree
|
||||
from allmydata.check_results import CheckerResults
|
||||
from allmydata.check_results import CheckResults
|
||||
from allmydata.immutable import download
|
||||
from allmydata.uri import CHKFileVerifierURI
|
||||
from allmydata.util.assertutil import precondition
|
||||
@ -202,7 +202,7 @@ class Checker(log.PrefixingLogMixin):
|
||||
return d
|
||||
|
||||
def _format_results(self, results):
|
||||
cr = CheckerResults(self._verifycap, self._verifycap.storage_index)
|
||||
cr = CheckResults(self._verifycap, self._verifycap.storage_index)
|
||||
d = {}
|
||||
d['count-shares-needed'] = self._verifycap.needed_shares
|
||||
d['count-shares-expected'] = self._verifycap.total_shares
|
||||
|
@ -1,6 +1,6 @@
|
||||
from twisted.internet import defer
|
||||
from allmydata import storage
|
||||
from allmydata.check_results import CheckerResults, CheckAndRepairResults
|
||||
from allmydata.check_results import CheckResults, CheckAndRepairResults
|
||||
from allmydata.immutable import download
|
||||
from allmydata.util import nummedobj
|
||||
from allmydata.util.assertutil import precondition
|
||||
@ -81,10 +81,10 @@ class Repairer(LogMixin):
|
||||
|
||||
def _repair_phase(self, unused=None):
|
||||
bogusresults = CheckAndRepairResults(self._storageindex) # XXX THIS REPAIRER NOT HERE YET
|
||||
bogusresults.pre_repair_results = CheckerResults(self._verifycap, self._storageindex)
|
||||
bogusresults.pre_repair_results = CheckResults(self._verifycap, self._storageindex)
|
||||
bogusresults.pre_repair_results.set_healthy(True)
|
||||
bogusresults.pre_repair_results.set_needs_rebalancing(False)
|
||||
bogusresults.post_repair_results = CheckerResults(self._verifycap, self._storageindex)
|
||||
bogusresults.post_repair_results = CheckResults(self._verifycap, self._storageindex)
|
||||
bogusresults.post_repair_results.set_healthy(True)
|
||||
bogusresults.post_repair_results.set_needs_rebalancing(False)
|
||||
bogusdata = {}
|
||||
|
@ -4,7 +4,7 @@ from twisted.python import failure
|
||||
from allmydata import hashtree
|
||||
from allmydata.uri import from_string
|
||||
from allmydata.util import hashutil, base32, idlib, log
|
||||
from allmydata.check_results import CheckAndRepairResults, CheckerResults
|
||||
from allmydata.check_results import CheckAndRepairResults, CheckResults
|
||||
|
||||
from common import MODE_CHECK, CorruptShareError
|
||||
from servermap import ServerMap, ServermapUpdater
|
||||
@ -17,7 +17,7 @@ class MutableChecker:
|
||||
self._monitor = monitor
|
||||
self.bad_shares = [] # list of (nodeid,shnum,failure)
|
||||
self._storage_index = self._node.get_storage_index()
|
||||
self.results = CheckerResults(from_string(node.get_uri()), self._storage_index)
|
||||
self.results = CheckResults(from_string(node.get_uri()), self._storage_index)
|
||||
self.need_repair = False
|
||||
self.responded = set() # set of (binary) nodeids
|
||||
|
||||
@ -298,7 +298,7 @@ class MutableCheckAndRepairer(MutableChecker):
|
||||
d = self._node.repair(self.results)
|
||||
def _repair_finished(repair_results):
|
||||
self.cr_results.repair_successful = True
|
||||
r = CheckerResults(from_string(self._node.get_uri()), self._storage_index)
|
||||
r = CheckResults(from_string(self._node.get_uri()), self._storage_index)
|
||||
self.cr_results.post_repair_results = r
|
||||
self._fill_checker_results(repair_results.servermap, r)
|
||||
self.cr_results.repair_results = repair_results # TODO?
|
||||
|
@ -11,7 +11,7 @@ from allmydata import uri, dirnode, client
|
||||
from allmydata.introducer.server import IntroducerNode
|
||||
from allmydata.interfaces import IURI, IMutableFileNode, IFileNode, \
|
||||
FileTooLargeError, NotEnoughSharesError, ICheckable
|
||||
from allmydata.check_results import CheckerResults, CheckAndRepairResults, \
|
||||
from allmydata.check_results import CheckResults, CheckAndRepairResults, \
|
||||
DeepCheckResults, DeepCheckAndRepairResults
|
||||
from allmydata.mutable.common import CorruptShareError
|
||||
from allmydata.storage import storage_index_to_dir
|
||||
@ -52,7 +52,7 @@ class FakeCHKFileNode:
|
||||
return self.storage_index
|
||||
|
||||
def check(self, monitor, verify=False):
|
||||
r = CheckerResults(self.my_uri, self.storage_index)
|
||||
r = CheckResults(self.my_uri, self.storage_index)
|
||||
is_bad = self.bad_shares.get(self.storage_index, None)
|
||||
data = {}
|
||||
data["count-shares-needed"] = 3
|
||||
@ -186,7 +186,7 @@ class FakeMutableFileNode:
|
||||
return self.storage_index
|
||||
|
||||
def check(self, monitor, verify=False):
|
||||
r = CheckerResults(self.my_uri, self.storage_index)
|
||||
r = CheckResults(self.my_uri, self.storage_index)
|
||||
is_bad = self.bad_shares.get(self.storage_index, None)
|
||||
data = {}
|
||||
data["count-shares-needed"] = 3
|
||||
|
@ -15,7 +15,7 @@ from allmydata.util import hashutil, base32
|
||||
from allmydata.monitor import Monitor
|
||||
from allmydata.test.common import make_chk_file_uri, make_mutable_file_uri, \
|
||||
FakeDirectoryNode, create_chk_filenode, ErrorMixin, SystemTestMixin
|
||||
from allmydata.check_results import CheckerResults, CheckAndRepairResults
|
||||
from allmydata.check_results import CheckResults, CheckAndRepairResults
|
||||
import common_util as testutil
|
||||
|
||||
# to test dirnode.py, we want to construct a tree of real DirectoryNodes that
|
||||
@ -42,7 +42,7 @@ class Marker:
|
||||
return self.storage_index
|
||||
|
||||
def check(self, monitor, verify=False):
|
||||
r = CheckerResults(uri.from_string(self.nodeuri), None)
|
||||
r = CheckResults(uri.from_string(self.nodeuri), None)
|
||||
r.set_healthy(True)
|
||||
r.set_recoverable(True)
|
||||
return defer.succeed(r)
|
||||
|
@ -156,7 +156,7 @@ class ResultsBase:
|
||||
target = target + "?output=%s" % output
|
||||
return T.a(href=target)[si_s]
|
||||
|
||||
class LiteralCheckerResults(rend.Page, ResultsBase):
|
||||
class LiteralCheckResults(rend.Page, ResultsBase):
|
||||
docFactory = getxmlfile("literal-check-results.xhtml")
|
||||
|
||||
def renderHTTP(self, ctx):
|
||||
@ -188,7 +188,7 @@ class CheckerBase:
|
||||
return T.div[T.a(href=return_to)["Return to parent directory"]]
|
||||
return ""
|
||||
|
||||
class CheckerResults(CheckerBase, rend.Page, ResultsBase):
|
||||
class CheckResults(CheckerBase, rend.Page, ResultsBase):
|
||||
docFactory = getxmlfile("check-results.xhtml")
|
||||
|
||||
def __init__(self, results):
|
||||
@ -289,7 +289,7 @@ class DeepCheckResults(rend.Page, ResultsBase, ReloadMixin):
|
||||
si = base32.a2b(name)
|
||||
r = self.monitor.get_status()
|
||||
try:
|
||||
return CheckerResults(r.get_results_for_storage_index(si))
|
||||
return CheckResults(r.get_results_for_storage_index(si))
|
||||
except KeyError:
|
||||
raise WebError("No detailed results for SI %s" % html.escape(name),
|
||||
http.NOT_FOUND)
|
||||
|
@ -23,7 +23,7 @@ from allmydata.web.common import text_plain, WebError, \
|
||||
getxmlfile, RenderMixin
|
||||
from allmydata.web.filenode import ReplaceMeMixin, \
|
||||
FileNodeHandler, PlaceHolderNodeHandler
|
||||
from allmydata.web.check_results import CheckerResults, \
|
||||
from allmydata.web.check_results import CheckResults, \
|
||||
CheckAndRepairResults, DeepCheckResults, DeepCheckAndRepairResults
|
||||
from allmydata.web.info import MoreInfo
|
||||
from allmydata.web.operations import ReloadMixin
|
||||
@ -351,7 +351,7 @@ class DirectoryNodeHandler(RenderMixin, rend.Page, ReplaceMeMixin):
|
||||
d.addCallback(lambda res: CheckAndRepairResults(res))
|
||||
else:
|
||||
d = self.node.check(Monitor(), verify)
|
||||
d.addCallback(lambda res: CheckerResults(res))
|
||||
d.addCallback(lambda res: CheckResults(res))
|
||||
return d
|
||||
|
||||
def _start_operation(self, monitor, renderer, ctx):
|
||||
|
@ -14,8 +14,8 @@ from allmydata.util import log, base32
|
||||
|
||||
from allmydata.web.common import text_plain, WebError, IClient, RenderMixin, \
|
||||
boolean_of_arg, get_arg, should_create_intermediate_directories
|
||||
from allmydata.web.check_results import CheckerResults, \
|
||||
CheckAndRepairResults, LiteralCheckerResults
|
||||
from allmydata.web.check_results import CheckResults, \
|
||||
CheckAndRepairResults, LiteralCheckResults
|
||||
from allmydata.web.info import MoreInfo
|
||||
|
||||
class ReplaceMeMixin:
|
||||
@ -257,13 +257,13 @@ class FileNodeHandler(RenderMixin, rend.Page, ReplaceMeMixin):
|
||||
verify = boolean_of_arg(get_arg(req, "verify", "false"))
|
||||
repair = boolean_of_arg(get_arg(req, "repair", "false"))
|
||||
if isinstance(self.node, LiteralFileNode):
|
||||
return defer.succeed(LiteralCheckerResults())
|
||||
return defer.succeed(LiteralCheckResults())
|
||||
if repair:
|
||||
d = self.node.check_and_repair(Monitor(), verify)
|
||||
d.addCallback(lambda res: CheckAndRepairResults(res))
|
||||
else:
|
||||
d = self.node.check(Monitor(), verify)
|
||||
d.addCallback(lambda res: CheckerResults(res))
|
||||
d.addCallback(lambda res: CheckResults(res))
|
||||
return d
|
||||
|
||||
def render_DELETE(self, ctx):
|
||||
|
Loading…
Reference in New Issue
Block a user