test_web: improve checker-results coverage with a no-network -based test, enhance no-network harness to assist, fix some bugs in web/check_results.py that were exposed

This commit is contained in:
Brian Warner 2009-02-16 22:12:42 -07:00
parent 059f24adca
commit 522641bb32
3 changed files with 312 additions and 7 deletions

View File

@ -19,8 +19,9 @@ from twisted.application import service
from foolscap import Referenceable
from foolscap.eventual import fireEventually
from base64 import b32encode
from allmydata import uri as tahoe_uri
from allmydata.client import Client
from allmydata.storage import StorageServer
from allmydata.storage import StorageServer, storage_index_to_dir
from allmydata.util import fileutil, idlib, hashutil, rrefutil
from allmydata.introducer.client import RemoteServiceConnector
@ -131,6 +132,8 @@ class NoNetworkClient(Client):
def get_permuted_peers(self, service_name, key):
return sorted(self._servers, key=lambda x: sha.new(key+x[0]).digest())
def get_nickname_for_peerid(self, peerid):
return None
class NoNetworkGrid(service.MultiService):
@ -202,6 +205,10 @@ class GridTestMixin:
self.g = NoNetworkGrid(self.basedir,
client_config_hooks=client_config_hooks)
self.g.setServiceParent(self.s)
self.client_webports = [c.getServiceNamed("webish").listener._port.getHost().port
for c in self.g.clients]
self.client_baseurls = ["http://localhost:%d/" % p
for p in self.client_webports]
def get_clientdir(self, i=0):
return self.g.clients[i].basedir
@ -213,3 +220,21 @@ class GridTestMixin:
for i in sorted(self.g.servers_by_number.keys()):
ss = self.g.servers_by_number[i]
yield (i, ss, ss.storedir)
def find_shares(self, uri):
si = tahoe_uri.from_string(uri).get_storage_index()
prefixdir = storage_index_to_dir(si)
shares = []
for i,ss in self.g.servers_by_number.items():
serverid = ss.my_nodeid
basedir = os.path.join(ss.storedir, "shares", prefixdir)
if not os.path.exists(basedir):
continue
for f in os.listdir(basedir):
try:
shnum = int(f)
shares.append((shnum, serverid, os.path.join(basedir, f)))
except ValueError:
pass
return sorted(shares)

View File

@ -1,5 +1,6 @@
import os.path, re, urllib
import simplejson
from StringIO import StringIO
from twisted.application import service
from twisted.trial import unittest
from twisted.internet import defer, reactor
@ -8,14 +9,16 @@ from twisted.python import failure, log
from allmydata import interfaces, uri, webish
from allmydata.immutable import upload, download
from allmydata.web import status, common
from allmydata.scripts.debug import CorruptShareOptions, corrupt_share
from allmydata.util import fileutil, base32
from allmydata.util.assertutil import precondition
from allmydata.test.common import FakeDirectoryNode, FakeCHKFileNode, \
FakeMutableFileNode, create_chk_filenode
FakeMutableFileNode, create_chk_filenode, WebErrorMixin
from allmydata.interfaces import IURI, INewDirectoryURI, \
IReadonlyNewDirectoryURI, IFileURI, IMutableFileURI, IMutableFileNode
from allmydata.mutable import servermap, publish, retrieve
import common_util as testutil
from allmydata.test.no_network import GridTestMixin
# create a fake uploader/downloader, and a couple of fake dirnodes, then
# create a webserver that works against them
@ -2519,3 +2522,278 @@ class Util(unittest.TestCase):
self.failUnlessEqual(common.abbreviate_size(1230), "1.2kB")
self.failUnlessEqual(common.abbreviate_size(123), "123B")
class Grid(GridTestMixin, WebErrorMixin, unittest.TestCase):
def GET(self, urlpath, followRedirect=False, return_response=False,
method="GET", **kwargs):
# if return_response=True, this fires with (data, statuscode,
# respheaders) instead of just data.
assert not isinstance(urlpath, unicode)
url = self.client_baseurls[0] + urlpath
factory = HTTPClientGETFactory(url, method=method,
followRedirect=followRedirect, **kwargs)
reactor.connectTCP("localhost", self.client_webports[0], factory)
d = factory.deferred
def _got_data(data):
return (data, factory.status, factory.response_headers)
if return_response:
d.addCallback(_got_data)
return factory.deferred
def CHECK(self, ign, which, **kwargs):
fileurl = self.fileurls[which]
url = fileurl + "?" + "&".join(["%s=%s" % (k,v)
for (k,v) in kwargs.items()])
return self.GET(url, method="POST")
def test_filecheck(self):
self.basedir = "web/Grid/filecheck"
self.set_up_grid()
c0 = self.g.clients[0]
self.uris = {}
DATA = "data" * 100
d = c0.upload(upload.Data(DATA, convergence=""))
def _stash_uri(ur, which):
self.uris[which] = ur.uri
d.addCallback(_stash_uri, "good")
d.addCallback(lambda ign:
c0.upload(upload.Data(DATA+"1", convergence="")))
d.addCallback(_stash_uri, "sick")
d.addCallback(lambda ign:
c0.upload(upload.Data(DATA+"2", convergence="")))
d.addCallback(_stash_uri, "dead")
def _stash_mutable_uri(n, which):
self.uris[which] = n.get_uri()
assert isinstance(self.uris[which], str)
d.addCallback(lambda ign: c0.create_mutable_file(DATA+"3"))
d.addCallback(_stash_mutable_uri, "corrupt")
d.addCallback(lambda ign:
c0.upload(upload.Data("literal", convergence="")))
d.addCallback(_stash_uri, "small")
def _compute_fileurls(ignored):
self.fileurls = {}
for which in self.uris:
self.fileurls[which] = "uri/" + urllib.quote(self.uris[which])
d.addCallback(_compute_fileurls)
def _clobber_shares(ignored):
good_shares = self.find_shares(self.uris["good"])
self.failUnlessEqual(len(good_shares), 10)
sick_shares = self.find_shares(self.uris["sick"])
os.unlink(sick_shares[0][2])
dead_shares = self.find_shares(self.uris["dead"])
for i in range(1, 10):
os.unlink(dead_shares[i][2])
c_shares = self.find_shares(self.uris["corrupt"])
cso = CorruptShareOptions()
cso.stdout = StringIO()
cso.parseOptions([c_shares[0][2]])
corrupt_share(cso)
d.addCallback(_clobber_shares)
d.addCallback(self.CHECK, "good", t="check")
def _got_html_good(res):
self.failUnless("Healthy" in res, res)
self.failIf("Not Healthy" in res, res)
d.addCallback(_got_html_good)
d.addCallback(self.CHECK, "good", t="check", return_to="somewhere")
def _got_html_good_return_to(res):
self.failUnless("Healthy" in res, res)
self.failIf("Not Healthy" in res, res)
self.failUnless('<a href="somewhere">Return to parent directory'
in res, res)
d.addCallback(_got_html_good_return_to)
d.addCallback(self.CHECK, "good", t="check", output="json")
def _got_json_good(res):
r = simplejson.loads(res)
self.failUnlessEqual(r["summary"], "Healthy")
self.failUnless(r["results"]["healthy"])
self.failIf(r["results"]["needs-rebalancing"])
self.failUnless(r["results"]["recoverable"])
d.addCallback(_got_json_good)
d.addCallback(self.CHECK, "small", t="check")
def _got_html_small(res):
self.failUnless("Literal files are always healthy" in res, res)
self.failIf("Not Healthy" in res, res)
d.addCallback(_got_html_small)
d.addCallback(self.CHECK, "small", t="check", return_to="somewhere")
def _got_html_small_return_to(res):
self.failUnless("Literal files are always healthy" in res, res)
self.failIf("Not Healthy" in res, res)
self.failUnless('<a href="somewhere">Return to parent directory'
in res, res)
d.addCallback(_got_html_small_return_to)
d.addCallback(self.CHECK, "small", t="check", output="json")
def _got_json_small(res):
r = simplejson.loads(res)
self.failUnlessEqual(r["storage-index"], "")
self.failUnless(r["results"]["healthy"])
d.addCallback(_got_json_small)
d.addCallback(self.CHECK, "sick", t="check")
def _got_html_sick(res):
self.failUnless("Not Healthy" in res, res)
d.addCallback(_got_html_sick)
d.addCallback(self.CHECK, "sick", t="check", output="json")
def _got_json_sick(res):
r = simplejson.loads(res)
self.failUnlessEqual(r["summary"],
"Not Healthy: 9 shares (enc 3-of-10)")
self.failIf(r["results"]["healthy"])
self.failIf(r["results"]["needs-rebalancing"])
self.failUnless(r["results"]["recoverable"])
d.addCallback(_got_json_sick)
d.addCallback(self.CHECK, "dead", t="check")
def _got_html_dead(res):
self.failUnless("Not Healthy" in res, res)
d.addCallback(_got_html_dead)
d.addCallback(self.CHECK, "dead", t="check", output="json")
def _got_json_dead(res):
r = simplejson.loads(res)
self.failUnlessEqual(r["summary"],
"Not Healthy: 1 shares (enc 3-of-10)")
self.failIf(r["results"]["healthy"])
self.failIf(r["results"]["needs-rebalancing"])
self.failIf(r["results"]["recoverable"])
d.addCallback(_got_json_dead)
d.addCallback(self.CHECK, "corrupt", t="check", verify="true")
def _got_html_corrupt(res):
self.failUnless("Not Healthy! : Unhealthy" in res, res)
d.addCallback(_got_html_corrupt)
d.addCallback(self.CHECK, "corrupt",
t="check", verify="true", output="json")
def _got_json_corrupt(res):
r = simplejson.loads(res)
self.failUnless("Unhealthy: 9 shares (enc 3-of-10)" in r["summary"],
r["summary"])
self.failIf(r["results"]["healthy"])
self.failUnless(r["results"]["recoverable"])
self.failUnlessEqual(r["results"]["count-shares-good"], 9)
self.failUnlessEqual(r["results"]["count-corrupt-shares"], 1)
d.addCallback(_got_json_corrupt)
d.addErrback(self.explain_web_error)
return d
def test_repair_html(self):
self.basedir = "web/Grid/repair_html"
self.set_up_grid()
c0 = self.g.clients[0]
self.uris = {}
DATA = "data" * 100
d = c0.upload(upload.Data(DATA, convergence=""))
def _stash_uri(ur, which):
self.uris[which] = ur.uri
d.addCallback(_stash_uri, "good")
d.addCallback(lambda ign:
c0.upload(upload.Data(DATA+"1", convergence="")))
d.addCallback(_stash_uri, "sick")
d.addCallback(lambda ign:
c0.upload(upload.Data(DATA+"2", convergence="")))
d.addCallback(_stash_uri, "dead")
def _stash_mutable_uri(n, which):
self.uris[which] = n.get_uri()
assert isinstance(self.uris[which], str)
d.addCallback(lambda ign: c0.create_mutable_file(DATA+"3"))
d.addCallback(_stash_mutable_uri, "corrupt")
def _compute_fileurls(ignored):
self.fileurls = {}
for which in self.uris:
self.fileurls[which] = "uri/" + urllib.quote(self.uris[which])
d.addCallback(_compute_fileurls)
def _clobber_shares(ignored):
good_shares = self.find_shares(self.uris["good"])
self.failUnlessEqual(len(good_shares), 10)
sick_shares = self.find_shares(self.uris["sick"])
os.unlink(sick_shares[0][2])
dead_shares = self.find_shares(self.uris["dead"])
for i in range(1, 10):
os.unlink(dead_shares[i][2])
c_shares = self.find_shares(self.uris["corrupt"])
cso = CorruptShareOptions()
cso.stdout = StringIO()
cso.parseOptions([c_shares[0][2]])
corrupt_share(cso)
d.addCallback(_clobber_shares)
d.addCallback(self.CHECK, "good", t="check", repair="true")
def _got_html_good(res):
self.failUnless("Healthy" in res, res)
self.failIf("Not Healthy" in res, res)
self.failUnless("No repair necessary" in res, res)
d.addCallback(_got_html_good)
d.addCallback(self.CHECK, "sick", t="check", repair="true")
def _got_html_sick(res):
self.failUnless("Healthy : healthy" in res, res)
self.failIf("Not Healthy" in res, res)
self.failUnless("Repair successful" in res, res)
d.addCallback(_got_html_sick)
# repair of a dead file will fail, of course, but it isn't yet
# clear how this should be reported. Right now it shows up as
# a "410 Gone".
#
#d.addCallback(self.CHECK, "dead", t="check", repair="true")
#def _got_html_dead(res):
# print res
# self.failUnless("Healthy : healthy" in res, res)
# self.failIf("Not Healthy" in res, res)
# self.failUnless("No repair necessary" in res, res)
#d.addCallback(_got_html_dead)
d.addCallback(self.CHECK, "corrupt",
t="check", verify="true", repair="true")
def _got_html_corrupt(res):
self.failUnless("Healthy : Healthy" in res, res)
self.failIf("Not Healthy" in res, res)
self.failUnless("Repair successful" in res, res)
d.addCallback(_got_html_corrupt)
d.addErrback(self.explain_web_error)
return d
def test_repair_json(self):
self.basedir = "web/Grid/repair_json"
self.set_up_grid()
c0 = self.g.clients[0]
self.uris = {}
DATA = "data" * 100
d = c0.upload(upload.Data(DATA+"1", convergence=""))
def _stash_uri(ur, which):
self.uris[which] = ur.uri
d.addCallback(_stash_uri, "sick")
def _compute_fileurls(ignored):
self.fileurls = {}
for which in self.uris:
self.fileurls[which] = "uri/" + urllib.quote(self.uris[which])
d.addCallback(_compute_fileurls)
def _clobber_shares(ignored):
sick_shares = self.find_shares(self.uris["sick"])
os.unlink(sick_shares[0][2])
d.addCallback(_clobber_shares)
d.addCallback(self.CHECK, "sick",
t="check", repair="true", output="json")
def _got_json_sick(res):
r = simplejson.loads(res)
self.failUnlessEqual(r["repair-attempted"], True)
self.failUnlessEqual(r["repair-successful"], True)
self.failUnlessEqual(r["pre-repair-results"]["summary"],
"Not Healthy: 9 shares (enc 3-of-10)")
self.failIf(r["pre-repair-results"]["results"]["healthy"])
self.failUnlessEqual(r["post-repair-results"]["summary"], "healthy")
self.failUnless(r["post-repair-results"]["results"]["healthy"])
d.addCallback(_got_json_sick)
d.addErrback(self.explain_web_error)
return d

View File

@ -178,6 +178,13 @@ class LiteralCheckResults(rend.Page, ResultsBase):
}
return simplejson.dumps(data, indent=1) + "\n"
def render_return(self, ctx, data):
req = inevow.IRequest(ctx)
return_to = get_arg(req, "return_to", None)
if return_to:
return T.div[T.a(href=return_to)["Return to parent directory"]]
return ""
class CheckerBase:
def renderHTTP(self, ctx):
@ -230,11 +237,6 @@ class CheckResults(CheckerBase, rend.Page, ResultsBase):
]]
return ctx.tag[repair]
def render_rebalance(self, ctx, data):
if self.r.needs_rebalancing():
return ctx.tag["(needs rebalancing)"]
return ctx.tag["(does not need rebalancing)"]
def render_results(self, ctx, data):
cr = self._render_results(ctx, self.r)
return ctx.tag[cr]