overhaul checker invocation

Removed the Checker service, removed checker results storage (both in-memory
and the tiny stub of sqlite-based storage). Added ICheckable, all
check/verify is now done by calling the check() method on filenodes and
dirnodes (immutable files, literal files, mutable files, and directory
instances).

Checker results are returned in a Results instance, with an html() method for
display. Checker results have been temporarily removed from the wui directory
listing until we make some other fixes.

Also fixed client.create_node_from_uri() to create LiteralFileNodes properly,
since they have different checking behavior. Previously we were creating full
FileNodes with LIT uris inside, which were downloadable but not checkable.
This commit is contained in:
Brian Warner 2008-07-15 17:23:25 -07:00
parent 340b7add4f
commit 94e619c1f6
14 changed files with 236 additions and 289 deletions

@ -6,13 +6,42 @@ This does no verification of the shares whatsoever. If the peer claims to
have the share, we believe them.
"""
import time, os.path
from zope.interface import implements
from twisted.internet import defer
from twisted.application import service
from twisted.python import log
from allmydata.interfaces import IVerifierURI
from allmydata import uri, download, storage
from allmydata.util import hashutil
from allmydata.interfaces import IVerifierURI, ICheckerResults
from allmydata import download, storage
from allmydata.util import hashutil, base32
class Results:
implements(ICheckerResults)
def __init__(self, storage_index):
# storage_index might be None for, say, LIT files
self.storage_index = storage_index
if storage_index is None:
self.storage_index_s = "<none>"
else:
self.storage_index_s = base32.b2a(storage_index)[:6]
def is_healthy(self):
return self.healthy
def html_summary(self):
if self.healthy:
return "<span>healthy</span>"
return "<span>NOT HEALTHY</span>"
def html(self):
s = "<div>\n"
s += "<h1>Checker Results for Immutable SI=%s</h1>\n" % self.storage_index_s
if self.healthy:
s += "<h2>Healthy!</h2>\n"
else:
s += "<h2>Not Healthy!</h2>\n"
s += "</div>\n"
return s
class SimpleCHKFileChecker:
"""Return a list of (needed, total, found, sharemap), where sharemap maps
@ -21,7 +50,7 @@ class SimpleCHKFileChecker:
def __init__(self, peer_getter, uri_to_check):
self.peer_getter = peer_getter
self.found_shares = set()
self.uri_to_check = uri_to_check
self.uri_to_check = IVerifierURI(uri_to_check)
self.sharemap = {}
'''
@ -65,17 +94,22 @@ class SimpleCHKFileChecker:
def _done(self, res):
u = self.uri_to_check
return (u.needed_shares, u.total_shares, len(self.found_shares),
self.sharemap)
r = Results(self.uri_to_check.storage_index)
r.healthy = bool(len(self.found_shares) >= u.needed_shares)
r.stuff = (u.needed_shares, u.total_shares, len(self.found_shares),
self.sharemap)
return r
class VerifyingOutput:
def __init__(self, total_length):
def __init__(self, total_length, results):
self._crypttext_hasher = hashutil.crypttext_hasher()
self.length = 0
self.total_length = total_length
self._segment_number = 0
self._crypttext_hash_tree = None
self._opened = False
self._results = results
results.healthy = False
def setup_hashtrees(self, plaintext_hashtree, crypttext_hashtree):
self._crypttext_hash_tree = crypttext_hashtree
@ -96,7 +130,8 @@ class VerifyingOutput:
self.crypttext_hash = self._crypttext_hasher.digest()
def finish(self):
return True
self._results.healthy = True
return self._results
class SimpleCHKFileVerifier(download.FileDownloader):
@ -118,7 +153,8 @@ class SimpleCHKFileVerifier(download.FileDownloader):
self._si_s = storage.si_b2a(self._storage_index)
self.init_logging()
self._output = VerifyingOutput(self._size)
r = Results(self._storage_index)
self._output = VerifyingOutput(self._size, r)
self._paused = False
self._stopped = False
@ -166,74 +202,3 @@ class SimpleCHKFileVerifier(download.FileDownloader):
d.addCallback(self._done)
return d
class SQLiteCheckerResults:
def __init__(self, results_file):
pass
def add_results(self, uri_to_check, when, results):
pass
def get_results_for(self, uri_to_check):
return []
class InMemoryCheckerResults:
def __init__(self):
self.results = {} # indexed by uri
def add_results(self, uri_to_check, when, results):
if uri_to_check not in self.results:
self.results[uri_to_check] = []
self.results[uri_to_check].append( (when, results) )
def get_results_for(self, uri_to_check):
return self.results.get(uri_to_check, [])
class Checker(service.MultiService):
"""I am a service that helps perform file checks.
"""
name = "checker"
def __init__(self):
service.MultiService.__init__(self)
self.results = None
def startService(self):
service.MultiService.startService(self)
if self.parent:
results_file = os.path.join(self.parent.basedir,
"checker_results.db")
if os.path.exists(results_file):
self.results = SQLiteCheckerResults(results_file)
else:
self.results = InMemoryCheckerResults()
def check(self, uri_to_check):
if uri_to_check is None:
return defer.succeed(True)
uri_to_check = IVerifierURI(uri_to_check)
if isinstance(uri_to_check, uri.CHKFileVerifierURI):
peer_getter = self.parent.get_permuted_peers
c = SimpleCHKFileChecker(peer_getter, uri_to_check)
d = c.check()
else:
return defer.succeed(True) # TODO I don't know how to check, but I'm pretending to succeed.
def _done(res):
# TODO: handle exceptions too, record something useful about them
if self.results:
self.results.add_results(uri_to_check, time.time(), res)
return res
d.addCallback(_done)
return d
def verify(self, uri_to_verify):
if uri_to_verify is None:
return defer.succeed(True)
uri_to_verify = IVerifierURI(uri_to_verify)
if isinstance(uri_to_verify, uri.CHKFileVerifierURI):
v = SimpleCHKFileVerifier(self.parent, uri_to_verify)
return v.start()
else:
return defer.succeed(True) # TODO I don't know how to verify, but I'm pretending to succeed.
def checker_results_for(self, uri_to_check):
if uri_to_check and self.results:
return self.results.get_results_for(IVerifierURI(uri_to_check))
return []

@ -14,12 +14,12 @@ import allmydata
from allmydata.storage import StorageServer
from allmydata.upload import Uploader
from allmydata.download import Downloader
from allmydata.checker import Checker
from allmydata.offloaded import Helper
from allmydata.control import ControlServer
from allmydata.introducer.client import IntroducerClient
from allmydata.util import hashutil, base32, testutil
from allmydata.filenode import FileNode
from allmydata.filenode import FileNode, LiteralFileNode
from allmydata.uri import LiteralFileURI
from allmydata.dirnode import NewDirectoryNode
from allmydata.mutable.node import MutableFileNode, MutableWatcher
from allmydata.stats import StatsProvider
@ -173,7 +173,6 @@ class Client(node.Node, testutil.PollMixin):
self._node_cache = weakref.WeakValueDictionary() # uri -> node
self.add_service(Uploader(helper_furl, self.stats_provider))
self.add_service(Downloader(self.stats_provider))
self.add_service(Checker())
self.add_service(MutableWatcher(self.stats_provider))
def _publish(res):
# we publish an empty object so that the introducer can count how
@ -302,8 +301,10 @@ class Client(node.Node, testutil.PollMixin):
# new-style dirnodes
node = NewDirectoryNode(self).init_from_uri(u)
elif IFileURI.providedBy(u):
# CHK
node = FileNode(u, self)
if isinstance(u, LiteralFileURI):
node = LiteralFileNode(u, self) # LIT
else:
node = FileNode(u, self) # CHK
else:
assert IMutableFileURI.providedBy(u), u
node = MutableFileNode(self).init_from_uri(u)

@ -8,7 +8,7 @@ from allmydata.mutable.common import NotMutableError
from allmydata.mutable.node import MutableFileNode
from allmydata.interfaces import IMutableFileNode, IDirectoryNode,\
IURI, IFileNode, IMutableFileURI, IVerifierURI, IFilesystemNode, \
ExistingChildError
ExistingChildError, ICheckable
from allmydata.util import hashutil, mathutil
from allmydata.util.hashutil import netstring
from allmydata.util.limiter import ConcurrencyLimiter
@ -112,7 +112,7 @@ class Adder:
return new_contents
class NewDirectoryNode:
implements(IDirectoryNode)
implements(IDirectoryNode, ICheckable)
filenode_class = MutableFileNode
def __init__(self, client):
@ -242,9 +242,9 @@ class NewDirectoryNode:
def get_verifier(self):
return self._uri.get_verifier().to_string()
def check(self):
def check(self, verify=False, repair=False):
"""Perform a file check. See IChecker.check for details."""
return defer.succeed(None) # TODO
return self._node.check(verify, repair)
def list(self):
"""I return a Deferred that fires with a dictionary mapping child

@ -1063,13 +1063,15 @@ class Downloader(service.MultiService):
assert t.write
assert t.close
if self.stats_provider:
self.stats_provider.count('downloader.files_downloaded', 1)
self.stats_provider.count('downloader.bytes_downloaded', u.get_size())
if isinstance(u, uri.LiteralFileURI):
dl = LiteralDownloader(self.parent, u, t)
elif isinstance(u, uri.CHKFileURI):
if self.stats_provider:
# these counters are meant for network traffic, and don't
# include LIT files
self.stats_provider.count('downloader.files_downloaded', 1)
self.stats_provider.count('downloader.bytes_downloaded', u.get_size())
dl = FileDownloader(self.parent, u, t)
else:
raise RuntimeError("I don't know how to download a %s" % u)

@ -1,11 +1,13 @@
from zope.interface import implements
from twisted.internet import defer
from allmydata.interfaces import IFileNode, IFileURI, IURI
from allmydata.interfaces import IFileNode, IFileURI, IURI, ICheckable
from allmydata import uri
from allmydata.checker import SimpleCHKFileChecker, SimpleCHKFileVerifier, \
Results
class FileNode:
implements(IFileNode)
implements(IFileNode, ICheckable)
def __init__(self, uri, client):
u = IFileURI(uri)
@ -39,9 +41,16 @@ class FileNode:
def get_verifier(self):
return IFileURI(self.uri).get_verifier()
def check(self):
verifier = self.get_verifier()
return self._client.getServiceNamed("checker").check(verifier)
def check(self, verify=False, repair=False):
assert repair is False # not implemented yet
vcap = self.get_verifier()
if verify:
v = SimpleCHKFileVerifier(self._client, vcap)
return v.start()
else:
peer_getter = self._client.get_permuted_peers
v = SimpleCHKFileChecker(peer_getter, vcap)
return v.check()
def download(self, target):
downloader = self._client.getServiceNamed("downloader")
@ -54,7 +63,7 @@ class FileNode:
class LiteralFileNode:
implements(IFileNode)
implements(IFileNode, ICheckable)
def __init__(self, my_uri, client):
u = IFileURI(my_uri)
@ -89,10 +98,15 @@ class LiteralFileNode:
def get_verifier(self):
return None
def check(self):
return None
def check(self, verify=False, repair=False):
# neither verify= nor repair= affect LIT files
r = Results(None)
r.healthy = True
r.problems = []
return defer.succeed(r)
def download(self, target):
# note that this does not update the stats_provider
data = IURI(self.uri).data
target.open(len(data))
target.write(data)

@ -1432,66 +1432,82 @@ class IUploader(Interface):
def upload_ssk(write_capability, new_version, uploadable):
"""TODO: how should this work?"""
class IChecker(Interface):
def check(uri_to_check, repair=False):
"""Accepts an IVerifierURI, and checks upon the health of its target.
class ICheckable(Interface):
def check(verify=False, repair=False):
"""Check upon my health, optionally repairing any problems.
For now, uri_to_check must be an IVerifierURI. In the future we
expect to relax that to be anything that can be adapted to
IVerifierURI (like read-only or read-write dirnode/filenode URIs).
This returns a Deferred that fires with an instance that provides
ICheckerResults.
This returns a Deferred. For dirnodes, this fires with either True or
False (dirnodes are not distributed, so their health is a boolean).
Filenodes and dirnodes (which provide IFilesystemNode) are also
checkable. Instances that represent verifier-caps will be checkable
but not downloadable. Some objects (like LIT files) do not actually
live in the grid, and their checkers indicate a healthy result.
For filenodes, this fires with a tuple of (needed_shares,
total_shares, found_shares, sharemap). The first three are ints. The
basic health of the file is found_shares / needed_shares: if less
than 1.0, the file is unrecoverable.
If verify=False, a relatively lightweight check will be performed: I
will ask all servers if they have a share for me, and I will believe
whatever they say. If there are at least N distinct shares on the
grid, my results will indicate r.is_healthy()==True. This requires a
roundtrip to each server, but does not transfer very much data, so
the network bandwidth is fairly low.
The sharemap has a key for each sharenum. The value is a list of
(binary) nodeids who hold that share. If two shares are kept on the
same nodeid, they will fail as a pair, and overall reliability is
decreased.
If verify=True, a more resource-intensive check will be performed:
every share will be downloaded, and the hashes will be validated on
every bit. I will ignore any shares that failed their hash checks. If
there are at least N distinct valid shares on the grid, my results
will indicate r.is_healthy()==True. This requires N/k times as much
download bandwidth (and server disk IO) as a regular download. If a
storage server is holding a corrupt share, or is experiencing memory
failures during retrieval, or is malicious or buggy, then
verification will detect the problem, but checking will not.
The IChecker instance remembers the results of the check. By default,
these results are stashed in RAM (and are forgotten at shutdown). If
a file named 'checker_results.db' exists in the node's basedir, it is
used as a sqlite database of results, making them persistent across
runs. To start using this feature, just 'touch checker_results.db',
and the node will initialize it properly the next time it is started.
If repair=True, then a non-healthy result will cause an immediate
repair operation, to generate and upload new shares. After repair,
the file will be as healthy as we can make it. Details about what
sort of repair is done will be put in the checker results. My
Deferred will not fire until the repair is complete.
TODO: any problems seen during checking will be reported to the
health-manager.furl, a centralized object which is responsible for
figuring out why files are unhealthy so corrective action can be
taken.
"""
def verify(uri_to_check, repair=False):
"""Accepts an IVerifierURI, and verifies the crypttext of the target.
class ICheckerResults(Interface):
"""I contain the detailed results of a check/verify/repair operation.
This is a more-intensive form of checking. For verification, the
file's crypttext contents are retrieved, and the associated hash
checks are performed. If a storage server is holding a corrupted
share, verification will detect the problem, but checking will not.
This returns a Deferred that fires with True if the crypttext hashes
look good, and will probably raise an exception if anything goes
wrong.
The IFilesystemNode.check()/verify()/repair() methods all return
instances that provide ICheckerResults.
"""
For dirnodes, 'verify' is the same as 'check', so the Deferred will
fire with True or False.
def is_healthy():
"""Return a bool, True if the file is fully healthy, False if it is
damaged in any way."""
Verification currently only uses a minimal subset of peers, so a lot
of share corruption will not be caught by it. We expect to improve
this in the future.
"""
def html_summary():
"""Return a short string, with a single <span> element, that
describes summarized results of the check. This will be displayed on
the web-interface directory page, in a narrow column, showing stored
results for all files at the same time."""
def checker_results_for(uri_to_check):
"""Accepts an IVerifierURI, and returns a list of previously recorded
checker results. This method performs no checking itself: it merely
reports the results of checks that have taken place in the past.
def html():
"""Return a string, with a single <div> element that describes the
detailed results of the check/verify operation. This string will be
displayed on a page all by itself."""
# The old checker results (for only immutable files) were described
# with this:
# For filenodes, this fires with a tuple of (needed_shares,
# total_shares, found_shares, sharemap). The first three are ints. The
# basic health of the file is found_shares / needed_shares: if less
# than 1.0, the file is unrecoverable.
#
# The sharemap has a key for each sharenum. The value is a list of
# (binary) nodeids who hold that share. If two shares are kept on the
# same nodeid, they will fail as a pair, and overall reliability is
# decreased.
Each element of the list is a two-entry tuple: (when, results).
The 'when' values are timestamps (float seconds since epoch), and the
results are as defined in the check() method.
Note: at the moment, this is specified to return synchronously. We
might need to back away from this in the future.
"""
class IClient(Interface):
def upload(uploadable):

@ -1,9 +1,11 @@
import struct
from zope.interface import implements
from twisted.internet import defer
from twisted.python import failure
from allmydata import hashtree
from allmydata.util import hashutil
from allmydata.util import hashutil, base32
from allmydata.interfaces import ICheckerResults
from common import MODE_CHECK, CorruptShareError
from servermap import ServerMap, ServermapUpdater
@ -136,9 +138,9 @@ class MutableChecker:
pass
def _return_results(self, res):
r = {}
r['healthy'] = self.healthy
r['problems'] = self.problems
r = Results(self._storage_index)
r.healthy = self.healthy
r.problems = self.problems
return r
@ -146,3 +148,28 @@ class MutableChecker:
self.healthy = False
self.problems.append( (peerid, self._storage_index, shnum, what) )
class Results:
implements(ICheckerResults)
def __init__(self, storage_index):
self.storage_index = storage_index
self.storage_index_s = base32.b2a(storage_index)[:6]
def is_healthy(self):
return self.healthy
def html_summary(self):
if self.healthy:
return "<span>healthy</span>"
return "<span>NOT HEALTHY</span>"
def html(self):
s = "<div>\n"
s += "<h1>Checker Results for Mutable SI=%s</h1>\n" % self.storage_index_s
if self.healthy:
s += "<h2>Healthy!</h2>\n"
else:
s += "<h2>Not Healthy!</h2>\n"
s += "</div>\n"
return s

@ -6,7 +6,7 @@ from zope.interface import implements
from twisted.internet import defer, reactor
from twisted.python import log
from foolscap.eventual import eventually
from allmydata.interfaces import IMutableFileNode, IMutableFileURI
from allmydata.interfaces import IMutableFileNode, IMutableFileURI, ICheckable
from allmydata.util import hashutil
from allmydata.util.assertutil import precondition
from allmydata.uri import WriteableSSKFileURI
@ -47,7 +47,7 @@ class BackoffAgent:
# use client.create_mutable_file() to make one of these
class MutableFileNode:
implements(IMutableFileNode)
implements(IMutableFileNode, ICheckable)
SIGNATURE_KEY_SIZE = 2048
DEFAULT_ENCODING = (3, 10)

@ -4,7 +4,7 @@ from zope.interface import implements
from twisted.internet import defer
from twisted.python import failure
from twisted.application import service
from allmydata import uri, dirnode
from allmydata import uri, dirnode, checker
from allmydata.interfaces import IURI, IMutableFileNode, IFileNode, \
FileTooLargeError
from allmydata.encode import NotEnoughSharesError
@ -104,6 +104,12 @@ class FakeMutableFileNode:
def get_size(self):
return "?" # TODO: see mutable.MutableFileNode.get_size
def check(self, verify=False, repair=False):
r = checker.Results(None)
r.healthy = True
r.problems = []
return defer.succeed(r)
def download_best_version(self):
return defer.succeed(self.all_contents[self.storage_index])
def overwrite(self, new_contents):

@ -112,7 +112,7 @@ class Dirnode(unittest.TestCase, testutil.ShouldFailMixin, testutil.StallMixin):
d = self.client.create_empty_dirnode()
d.addCallback(lambda dn: dn.check())
def _done(res):
pass
self.failUnless(res.is_healthy())
d.addCallback(_done)
return d

@ -50,9 +50,11 @@ class Node(unittest.TestCase):
v = fn1.get_verifier()
self.failUnlessEqual(v, None)
self.failUnlessEqual(fn1.check(), None)
target = download.Data()
d = fn1.download(target)
d = fn1.check()
def _check_checker_results(cr):
self.failUnless(cr.is_healthy())
d.addCallback(_check_checker_results)
d.addCallback(lambda res: fn1.download(download.Data()))
def _check(res):
self.failUnlessEqual(res, DATA)
d.addCallback(_check)

@ -1121,23 +1121,23 @@ class Roundtrip(unittest.TestCase, testutil.ShouldFailMixin):
class CheckerMixin:
def check_good(self, r, where):
self.failUnless(r['healthy'], where)
self.failIf(r['problems'], where)
self.failUnless(r.healthy, where)
self.failIf(r.problems, where)
return r
def check_bad(self, r, where):
self.failIf(r['healthy'], where)
self.failIf(r.healthy, where)
return r
def check_expected_failure(self, r, expected_exception, substring, where):
for (peerid, storage_index, shnum, f) in r['problems']:
for (peerid, storage_index, shnum, f) in r.problems:
if f.check(expected_exception):
self.failUnless(substring in str(f),
"%s: substring '%s' not in '%s'" %
(where, substring, str(f)))
return
self.fail("%s: didn't see expected exception %s in problems %s" %
(where, expected_exception, r['problems']))
(where, expected_exception, r.problems))
class Checker(unittest.TestCase, CheckerMixin):

@ -8,9 +8,10 @@ from twisted.internet import threads # CLI tests use deferToThread
from twisted.internet.error import ConnectionDone, ConnectionLost
from twisted.application import service
import allmydata
from allmydata import client, uri, download, upload, storage, offloaded
from allmydata import client, uri, download, upload, storage, offloaded, \
filenode
from allmydata.introducer.server import IntroducerNode
from allmydata.util import deferredutil, fileutil, idlib, mathutil, testutil
from allmydata.util import fileutil, idlib, mathutil, testutil
from allmydata.util import log, base32
from allmydata.scripts import runner, cli
from allmydata.interfaces import IDirectoryNode, IFileNode, IFileURI
@ -913,7 +914,6 @@ class SystemTest(testutil.SignalMixin, testutil.PollMixin, testutil.StallMixin,
# P/s2-rw/
# P/test_put/ (empty)
d.addCallback(self._test_checker)
d.addCallback(self._test_verifier)
d.addCallback(self._grab_stats)
return d
test_vdrive.timeout = 1100
@ -1394,7 +1394,7 @@ class SystemTest(testutil.SignalMixin, testutil.PollMixin, testutil.StallMixin,
d.addCallback(lambda res: self.GET("statistics"))
def _got_stats(res):
self.failUnless("Node Statistics" in res)
self.failUnless(" 'downloader.files_downloaded': 8," in res)
self.failUnless(" 'downloader.files_downloaded': 5," in res, res)
d.addCallback(_got_stats)
d.addCallback(lambda res: self.GET("statistics?t=json"))
def _got_stats_json(res):
@ -1877,96 +1877,37 @@ class SystemTest(testutil.SignalMixin, testutil.PollMixin, testutil.StallMixin,
return d
def _test_checker(self, res):
d = self._private_node.build_manifest()
d.addCallback(self._test_checker_2)
return d
def _test_checker_2(self, manifest):
checker1 = self.clients[1].getServiceNamed("checker")
self.failUnlessEqual(checker1.checker_results_for(None), [])
self.failUnlessEqual(checker1.checker_results_for(list(manifest)[0]),
[])
dl = []
starting_time = time.time()
for si in manifest:
dl.append(checker1.check(si))
d = deferredutil.DeferredListShouldSucceed(dl)
def _check_checker_results(res):
for i in res:
if type(i) is bool:
self.failUnless(i is True)
else:
(needed, total, found, sharemap) = i
self.failUnlessEqual(needed, 3)
self.failUnlessEqual(total, 10)
self.failUnlessEqual(found, total)
self.failUnlessEqual(len(sharemap.keys()), 10)
peers = set()
for shpeers in sharemap.values():
peers.update(shpeers)
self.failUnlessEqual(len(peers), self.numclients)
d.addCallback(_check_checker_results)
def _check_stored_results(res):
finish_time = time.time()
all_results = []
for si in manifest:
results = checker1.checker_results_for(si)
if not results:
# TODO: implement checker for mutable files and implement tests of that checker
continue
self.failUnlessEqual(len(results), 1)
when, those_results = results[0]
self.failUnless(isinstance(when, (int, float)))
self.failUnless(starting_time <= when <= finish_time)
all_results.append(those_results)
_check_checker_results(all_results)
d.addCallback(_check_stored_results)
d.addCallback(self._test_checker_3)
return d
def _test_checker_3(self, res):
# check one file, through FileNode.check()
d = self._private_node.get_child_at_path(u"personal/sekrit data")
d.addCallback(lambda n: n.check())
def _checked(results):
# 'sekrit data' is small, and fits in a LiteralFileNode, so
# checking it is trivial and always returns True
self.failUnlessEqual(results, True)
d.addCallback(_checked)
c0 = self.clients[1]
n = c0.create_node_from_uri(self._root_directory_uri)
d.addCallback(lambda res: n.get_child_at_path(u"subdir1/mydata567"))
d.addCallback(lambda n: n.check())
def _checked2(results):
# mydata567 is large and lives in a CHK
(needed, total, found, sharemap) = results
self.failUnlessEqual(needed, 3)
self.failUnlessEqual(total, 10)
self.failUnlessEqual(found, 10)
self.failUnlessEqual(len(sharemap), 10)
for shnum in range(10):
self.failUnlessEqual(len(sharemap[shnum]), 1)
d.addCallback(_checked2)
return d
def _test_verifier(self, res):
checker1 = self.clients[1].getServiceNamed("checker")
d = self._private_node.build_manifest()
def _check_all(manifest):
dl = []
for si in manifest:
dl.append(checker1.verify(si))
return deferredutil.DeferredListShouldSucceed(dl)
d.addCallback(_check_all)
def _done(res):
for i in res:
self.failUnless(i is True)
d.addCallback(_done)
d.addCallback(lambda res: checker1.verify(None))
d.addCallback(self.failUnlessEqual, True)
ut = upload.Data("too big to be literal" * 200, convergence=None)
d = self._personal_node.add_file(u"big file", ut)
d.addCallback(lambda res: self._personal_node.check())
def _check_dirnode_results(r):
self.failUnless(r.is_healthy())
d.addCallback(_check_dirnode_results)
d.addCallback(lambda res: self._personal_node.check(verify=True))
d.addCallback(_check_dirnode_results)
d.addCallback(lambda res: self._personal_node.get(u"big file"))
def _got_chk_filenode(n):
self.failUnless(isinstance(n, filenode.FileNode))
d = n.check()
def _check_filenode_results(r):
self.failUnless(r.is_healthy())
d.addCallback(_check_filenode_results)
d.addCallback(lambda res: n.check(verify=True))
d.addCallback(_check_filenode_results)
return d
d.addCallback(_got_chk_filenode)
d.addCallback(lambda res: self._personal_node.get(u"sekrit data"))
def _got_lit_filenode(n):
self.failUnless(isinstance(n, filenode.LiteralFileNode))
d = n.check()
def _check_filenode_results(r):
self.failUnless(r.is_healthy())
d.addCallback(_check_filenode_results)
d.addCallback(lambda res: n.check(verify=True))
d.addCallback(_check_filenode_results)
return d
d.addCallback(_got_lit_filenode)
return d

@ -535,34 +535,7 @@ class DirectoryAsHTML(rend.Page):
ctx.fillSlots("data", childdata)
try:
checker = IClient(ctx).getServiceNamed("checker")
except KeyError:
checker = None
if checker:
d = defer.maybeDeferred(checker.checker_results_for,
target.get_verifier())
def _got(checker_results):
recent_results = reversed(checker_results[-5:])
if IFileNode.providedBy(target):
results = ("[" +
", ".join(["%d/%d" % (found, needed)
for (when,
(needed, total, found, sharemap))
in recent_results]) +
"]")
elif IDirectoryNode.providedBy(target):
results = ("[" +
"".join([{True:"+",False:"-"}[res]
for (when, res) in recent_results]) +
"]")
else:
results = "%d results" % len(checker_results)
return results
d.addCallback(_got)
results = d
else:
results = "--"
results = "--"
# TODO: include a link to see more results, including timestamps
# TODO: use a sparkline
ctx.fillSlots("checker_results", results)