2008-07-16 22:42:56 +00:00
|
|
|
|
2008-08-13 03:34:26 +00:00
|
|
|
import time
|
2008-09-10 02:45:17 +00:00
|
|
|
import simplejson
|
2008-07-16 22:42:56 +00:00
|
|
|
from nevow import rend, inevow, tags as T
|
2008-10-23 23:00:31 +00:00
|
|
|
from twisted.web import http, html
|
|
|
|
from allmydata.web.common import getxmlfile, get_arg, get_root, \
|
|
|
|
IClient, WebError
|
2008-10-22 00:03:07 +00:00
|
|
|
from allmydata.web.operations import ReloadMixin
|
|
|
|
from allmydata.interfaces import ICheckAndRepairResults, ICheckerResults
|
2008-09-07 19:44:56 +00:00
|
|
|
from allmydata.util import base32, idlib
|
2008-07-16 22:42:56 +00:00
|
|
|
|
2008-09-07 19:44:56 +00:00
|
|
|
class ResultsBase:
|
2008-10-23 23:00:31 +00:00
|
|
|
def _render_results(self, ctx, cr):
|
2008-09-07 19:44:56 +00:00
|
|
|
assert ICheckerResults(cr)
|
2008-10-23 23:00:31 +00:00
|
|
|
c = IClient(ctx)
|
|
|
|
data = cr.get_data()
|
|
|
|
r = []
|
|
|
|
def add(name, value):
|
|
|
|
r.append(T.li[name + ": ", value])
|
|
|
|
|
|
|
|
add("Report", T.pre["\n".join(self._html(cr.get_report()))])
|
|
|
|
add("Share Counts",
|
|
|
|
"need %d-of-%d, have %d" % (data["count-shares-needed"],
|
|
|
|
data["count-shares-expected"],
|
|
|
|
data["count-shares-good"]))
|
|
|
|
add("Hosts with good shares", data["count-good-share-hosts"])
|
|
|
|
|
|
|
|
if data["list-corrupt-shares"]:
|
|
|
|
badsharemap = []
|
|
|
|
for (serverid, si, shnum) in data["list-corrupt-shares"]:
|
|
|
|
nickname = c.get_nickname_for_peerid(serverid)
|
|
|
|
badsharemap.append(T.tr[T.td["sh#%d" % shnum],
|
|
|
|
T.td[T.tt[base32.b2a(serverid)],
|
|
|
|
" (", nickname, ")"],
|
|
|
|
])
|
|
|
|
add("Corrupt shares", T.table(border="1")[badsharemap])
|
|
|
|
else:
|
|
|
|
add("Corrupt shares", "none")
|
|
|
|
|
|
|
|
add("Wrong Shares", data["count-wrong-shares"])
|
|
|
|
|
|
|
|
sharemap = []
|
|
|
|
servers = {}
|
|
|
|
for shareid in sorted(data["sharemap"].keys()):
|
|
|
|
serverids = data["sharemap"][shareid]
|
|
|
|
for i,serverid in enumerate(serverids):
|
|
|
|
servers[serverid] = servers.get(serverid,0) + 1
|
|
|
|
shareid_s = ""
|
|
|
|
if i == 0:
|
|
|
|
shareid_s = shareid
|
|
|
|
nickname = c.get_nickname_for_peerid(serverid)
|
|
|
|
sharemap.append(T.tr[T.td[shareid_s],
|
|
|
|
T.td[T.tt[base32.b2a(serverid)],
|
|
|
|
" (", nickname, ")"],
|
|
|
|
])
|
|
|
|
add("Good Shares", T.table(border="1")[sharemap])
|
|
|
|
|
|
|
|
add("Recoverable Versions", data["count-recoverable-versions"])
|
|
|
|
add("Unrecoverable Versions", data["count-unrecoverable-versions"])
|
|
|
|
|
|
|
|
servermap = []
|
|
|
|
for serverid in sorted(servers.keys()):
|
|
|
|
nickname = c.get_nickname_for_peerid(serverid)
|
|
|
|
servermap.append(T.tr[T.td[T.tt[base32.b2a(serverid)],
|
|
|
|
" (", nickname, ")"],
|
|
|
|
T.td["*" * servers[serverid]] ])
|
|
|
|
add("Share Balancing", T.table(border="1")[servermap])
|
|
|
|
|
|
|
|
return T.ul[r]
|
2008-09-10 02:45:17 +00:00
|
|
|
|
|
|
|
def _json_check_and_repair_results(self, r):
|
|
|
|
data = {}
|
|
|
|
data["storage-index"] = r.get_storage_index_string()
|
|
|
|
data["repair-attempted"] = r.get_repair_attempted()
|
|
|
|
data["repair-successful"] = r.get_repair_successful()
|
|
|
|
pre = r.get_pre_repair_results()
|
|
|
|
data["pre-repair-results"] = self._json_check_results(pre)
|
|
|
|
post = r.get_post_repair_results()
|
|
|
|
data["post-repair-results"] = self._json_check_results(post)
|
|
|
|
return data
|
|
|
|
|
|
|
|
def _json_check_results(self, r):
|
|
|
|
data = {}
|
|
|
|
data["storage-index"] = r.get_storage_index_string()
|
2008-11-19 01:28:26 +00:00
|
|
|
data["summary"] = r.get_summary()
|
2008-09-10 02:45:17 +00:00
|
|
|
data["results"] = self._json_check_counts(r.get_data())
|
|
|
|
data["results"]["needs-rebalancing"] = r.needs_rebalancing()
|
|
|
|
data["results"]["healthy"] = r.is_healthy()
|
2008-11-07 05:35:47 +00:00
|
|
|
data["results"]["recoverable"] = r.is_recoverable()
|
2008-09-10 02:45:17 +00:00
|
|
|
return data
|
|
|
|
|
|
|
|
def _json_check_counts(self, d):
|
|
|
|
r = {}
|
|
|
|
r["count-shares-good"] = d["count-shares-good"]
|
|
|
|
r["count-shares-needed"] = d["count-shares-needed"]
|
|
|
|
r["count-shares-expected"] = d["count-shares-expected"]
|
|
|
|
r["count-good-share-hosts"] = d["count-good-share-hosts"]
|
|
|
|
r["count-corrupt-shares"] = d["count-corrupt-shares"]
|
|
|
|
r["list-corrupt-shares"] = [ (idlib.nodeid_b2a(serverid),
|
|
|
|
base32.b2a(si), shnum)
|
|
|
|
for (serverid, si, shnum)
|
|
|
|
in d["list-corrupt-shares"] ]
|
|
|
|
r["servers-responding"] = [idlib.nodeid_b2a(serverid)
|
|
|
|
for serverid in d["servers-responding"]]
|
|
|
|
sharemap = {}
|
|
|
|
for (shareid, serverids) in d["sharemap"].items():
|
2008-09-10 21:11:37 +00:00
|
|
|
sharemap[shareid] = [idlib.nodeid_b2a(serverid)
|
|
|
|
for serverid in serverids]
|
2008-09-10 02:45:17 +00:00
|
|
|
r["sharemap"] = sharemap
|
|
|
|
|
|
|
|
r["count-wrong-shares"] = d["count-wrong-shares"]
|
|
|
|
r["count-recoverable-versions"] = d["count-recoverable-versions"]
|
|
|
|
r["count-unrecoverable-versions"] = d["count-unrecoverable-versions"]
|
|
|
|
|
|
|
|
return r
|
|
|
|
|
2008-09-07 19:44:56 +00:00
|
|
|
def _html(self, s):
|
|
|
|
if isinstance(s, (str, unicode)):
|
|
|
|
return html.escape(s)
|
|
|
|
assert isinstance(s, (list, tuple))
|
|
|
|
return [html.escape(w) for w in s]
|
|
|
|
|
2008-09-10 21:11:37 +00:00
|
|
|
def want_json(self, ctx):
|
|
|
|
output = get_arg(inevow.IRequest(ctx), "output", "").lower()
|
|
|
|
if output.lower() == "json":
|
|
|
|
return True
|
|
|
|
return False
|
|
|
|
|
2008-10-23 23:00:31 +00:00
|
|
|
def _render_si_link(self, ctx, storage_index):
|
|
|
|
si_s = base32.b2a(storage_index)
|
|
|
|
root = get_root(ctx)
|
|
|
|
req = inevow.IRequest(ctx)
|
|
|
|
ophandle = req.prepath[-1]
|
|
|
|
target = "%s/operations/%s/%s" % (get_root(ctx), ophandle, si_s)
|
|
|
|
output = get_arg(ctx, "output")
|
|
|
|
if output:
|
|
|
|
target = target + "?output=%s" % output
|
|
|
|
return T.a(href=target)[si_s]
|
|
|
|
|
2008-09-10 21:11:37 +00:00
|
|
|
class LiteralCheckerResults(rend.Page, ResultsBase):
|
2008-09-10 02:45:17 +00:00
|
|
|
docFactory = getxmlfile("literal-checker-results.xhtml")
|
|
|
|
|
|
|
|
def renderHTTP(self, ctx):
|
2008-09-10 21:11:37 +00:00
|
|
|
if self.want_json(ctx):
|
2008-09-10 02:45:17 +00:00
|
|
|
return self.json(ctx)
|
|
|
|
return rend.Page.renderHTTP(self, ctx)
|
|
|
|
|
|
|
|
def json(self, ctx):
|
|
|
|
inevow.IRequest(ctx).setHeader("content-type", "text/plain")
|
|
|
|
data = {"storage-index": "",
|
|
|
|
"results": {"healthy": True},
|
|
|
|
}
|
2008-09-15 20:43:14 +00:00
|
|
|
return simplejson.dumps(data, indent=1) + "\n"
|
2008-09-10 02:45:17 +00:00
|
|
|
|
2008-10-23 23:00:31 +00:00
|
|
|
class CheckerBase:
|
2008-07-16 22:42:56 +00:00
|
|
|
|
2008-09-10 02:45:17 +00:00
|
|
|
def renderHTTP(self, ctx):
|
2008-09-10 21:11:37 +00:00
|
|
|
if self.want_json(ctx):
|
2008-09-10 02:45:17 +00:00
|
|
|
return self.json(ctx)
|
|
|
|
return rend.Page.renderHTTP(self, ctx)
|
|
|
|
|
2008-10-23 23:00:31 +00:00
|
|
|
def render_storage_index(self, ctx, data):
|
|
|
|
return self.r.get_storage_index_string()
|
|
|
|
|
|
|
|
def render_return(self, ctx, data):
|
|
|
|
req = inevow.IRequest(ctx)
|
|
|
|
return_to = get_arg(req, "return_to", None)
|
|
|
|
if return_to:
|
|
|
|
return T.div[T.a(href=return_to)["Return to parent directory"]]
|
|
|
|
return ""
|
|
|
|
|
|
|
|
class CheckerResults(CheckerBase, rend.Page, ResultsBase):
|
|
|
|
docFactory = getxmlfile("checker-results.xhtml")
|
|
|
|
|
|
|
|
def __init__(self, results):
|
|
|
|
self.r = ICheckerResults(results)
|
|
|
|
|
2008-09-10 02:45:17 +00:00
|
|
|
def json(self, ctx):
|
|
|
|
inevow.IRequest(ctx).setHeader("content-type", "text/plain")
|
|
|
|
data = self._json_check_results(self.r)
|
2008-09-15 20:43:14 +00:00
|
|
|
return simplejson.dumps(data, indent=1) + "\n"
|
2008-09-10 02:45:17 +00:00
|
|
|
|
2008-11-07 05:35:47 +00:00
|
|
|
def render_summary(self, ctx, data):
|
|
|
|
results = []
|
2008-09-07 19:44:56 +00:00
|
|
|
if self.r.is_healthy():
|
2008-11-07 05:35:47 +00:00
|
|
|
results.append("Healthy")
|
|
|
|
elif self.r.is_recoverable():
|
|
|
|
results.append("Not Healthy!")
|
|
|
|
else:
|
|
|
|
results.append("Not Recoverable!")
|
|
|
|
results.append(" : ")
|
|
|
|
results.append(self._html(self.r.get_summary()))
|
|
|
|
return ctx.tag[results]
|
2008-10-23 23:00:31 +00:00
|
|
|
|
2008-10-30 01:09:17 +00:00
|
|
|
def render_repair(self, ctx, data):
|
|
|
|
if self.r.is_healthy():
|
|
|
|
return ""
|
|
|
|
repair = T.form(action=".", method="post",
|
|
|
|
enctype="multipart/form-data")[
|
|
|
|
T.fieldset[
|
|
|
|
T.input(type="hidden", name="t", value="check"),
|
|
|
|
T.input(type="hidden", name="repair", value="true"),
|
|
|
|
T.input(type="submit", value="Repair"),
|
|
|
|
]]
|
|
|
|
return ctx.tag[repair]
|
|
|
|
|
2008-10-23 23:00:31 +00:00
|
|
|
def render_rebalance(self, ctx, data):
|
|
|
|
if self.r.needs_rebalancing():
|
|
|
|
return ctx.tag["(needs rebalancing)"]
|
|
|
|
return ctx.tag["(does not need rebalancing)"]
|
2008-07-16 22:42:56 +00:00
|
|
|
|
|
|
|
def render_results(self, ctx, data):
|
2008-10-23 23:00:31 +00:00
|
|
|
cr = self._render_results(ctx, self.r)
|
2008-09-07 19:44:56 +00:00
|
|
|
return ctx.tag[cr]
|
2008-07-16 22:42:56 +00:00
|
|
|
|
2008-10-23 23:00:31 +00:00
|
|
|
class CheckAndRepairResults(CheckerBase, rend.Page, ResultsBase):
|
2008-09-07 19:44:56 +00:00
|
|
|
docFactory = getxmlfile("check-and-repair-results.xhtml")
|
|
|
|
|
|
|
|
def __init__(self, results):
|
|
|
|
self.r = ICheckAndRepairResults(results)
|
|
|
|
|
2008-09-10 02:45:17 +00:00
|
|
|
def json(self, ctx):
|
|
|
|
inevow.IRequest(ctx).setHeader("content-type", "text/plain")
|
|
|
|
data = self._json_check_and_repair_results(self.r)
|
2008-09-15 20:43:14 +00:00
|
|
|
return simplejson.dumps(data, indent=1) + "\n"
|
2008-09-10 02:45:17 +00:00
|
|
|
|
2008-11-07 05:35:47 +00:00
|
|
|
def render_summary(self, ctx, data):
|
2008-09-07 19:44:56 +00:00
|
|
|
cr = self.r.get_post_repair_results()
|
2008-11-07 05:35:47 +00:00
|
|
|
results = []
|
2008-09-07 19:44:56 +00:00
|
|
|
if cr.is_healthy():
|
2008-11-07 05:35:47 +00:00
|
|
|
results.append("Healthy")
|
|
|
|
elif cr.is_recoverable():
|
|
|
|
results.append("Not Healthy!")
|
|
|
|
else:
|
|
|
|
results.append("Not Recoverable!")
|
|
|
|
results.append(" : ")
|
|
|
|
results.append(self._html(cr.get_summary()))
|
|
|
|
return ctx.tag[results]
|
2008-09-07 19:44:56 +00:00
|
|
|
|
|
|
|
def render_repair_results(self, ctx, data):
|
|
|
|
if self.r.get_repair_attempted():
|
|
|
|
if self.r.get_repair_successful():
|
|
|
|
return ctx.tag["Repair successful"]
|
|
|
|
else:
|
|
|
|
return ctx.tag["Repair unsuccessful"]
|
|
|
|
return ctx.tag["No repair necessary"]
|
|
|
|
|
|
|
|
def render_post_repair_results(self, ctx, data):
|
2008-10-23 23:00:31 +00:00
|
|
|
cr = self._render_results(ctx, self.r.get_post_repair_results())
|
2008-09-07 19:44:56 +00:00
|
|
|
return ctx.tag[cr]
|
|
|
|
|
|
|
|
def render_maybe_pre_repair_results(self, ctx, data):
|
|
|
|
if self.r.get_repair_attempted():
|
2008-10-23 23:00:31 +00:00
|
|
|
cr = self._render_results(ctx, self.r.get_pre_repair_results())
|
2008-09-07 19:44:56 +00:00
|
|
|
return ctx.tag[T.div["Pre-Repair Checker Results:"], cr]
|
|
|
|
return ""
|
|
|
|
|
|
|
|
|
2008-10-22 00:03:07 +00:00
|
|
|
class DeepCheckResults(rend.Page, ResultsBase, ReloadMixin):
|
2008-07-17 23:47:09 +00:00
|
|
|
docFactory = getxmlfile("deep-check-results.xhtml")
|
|
|
|
|
2008-10-22 00:03:07 +00:00
|
|
|
def __init__(self, monitor):
|
|
|
|
self.monitor = monitor
|
2008-07-17 23:47:09 +00:00
|
|
|
|
2008-10-23 23:00:31 +00:00
|
|
|
def childFactory(self, ctx, name):
|
|
|
|
if not name:
|
|
|
|
return self
|
|
|
|
# /operation/$OPHANDLE/$STORAGEINDEX provides detailed information
|
|
|
|
# about a specific file or directory that was checked
|
|
|
|
si = base32.a2b(name)
|
|
|
|
r = self.monitor.get_status()
|
|
|
|
try:
|
|
|
|
return CheckerResults(r.get_results_for_storage_index(si))
|
|
|
|
except KeyError:
|
|
|
|
raise WebError("No detailed results for SI %s" % html.escape(name),
|
|
|
|
http.NOT_FOUND)
|
|
|
|
|
2008-09-10 02:45:17 +00:00
|
|
|
def renderHTTP(self, ctx):
|
2008-09-10 21:11:37 +00:00
|
|
|
if self.want_json(ctx):
|
2008-09-10 02:45:17 +00:00
|
|
|
return self.json(ctx)
|
|
|
|
return rend.Page.renderHTTP(self, ctx)
|
|
|
|
|
|
|
|
def json(self, ctx):
|
|
|
|
inevow.IRequest(ctx).setHeader("content-type", "text/plain")
|
|
|
|
data = {}
|
2008-10-22 00:03:07 +00:00
|
|
|
data["finished"] = self.monitor.is_finished()
|
|
|
|
res = self.monitor.get_status()
|
|
|
|
data["root-storage-index"] = res.get_root_storage_index_string()
|
|
|
|
c = res.get_counters()
|
2008-09-10 02:45:17 +00:00
|
|
|
data["count-objects-checked"] = c["count-objects-checked"]
|
|
|
|
data["count-objects-healthy"] = c["count-objects-healthy"]
|
|
|
|
data["count-objects-unhealthy"] = c["count-objects-unhealthy"]
|
|
|
|
data["count-corrupt-shares"] = c["count-corrupt-shares"]
|
2008-09-10 21:11:37 +00:00
|
|
|
data["list-corrupt-shares"] = [ (idlib.nodeid_b2a(serverid),
|
|
|
|
base32.b2a(storage_index),
|
2008-09-10 02:45:17 +00:00
|
|
|
shnum)
|
|
|
|
for (serverid, storage_index, shnum)
|
2008-10-22 00:03:07 +00:00
|
|
|
in res.get_corrupt_shares() ]
|
2008-09-10 02:45:17 +00:00
|
|
|
data["list-unhealthy-files"] = [ (path_t, self._json_check_results(r))
|
|
|
|
for (path_t, r)
|
2008-10-22 00:03:07 +00:00
|
|
|
in res.get_all_results().items()
|
2008-09-10 02:45:17 +00:00
|
|
|
if not r.is_healthy() ]
|
2008-10-22 00:03:07 +00:00
|
|
|
data["stats"] = res.get_stats()
|
2008-09-15 20:43:14 +00:00
|
|
|
return simplejson.dumps(data, indent=1) + "\n"
|
2008-09-10 02:45:17 +00:00
|
|
|
|
2008-07-17 23:47:09 +00:00
|
|
|
def render_root_storage_index(self, ctx, data):
|
2008-10-22 00:03:07 +00:00
|
|
|
return self.monitor.get_status().get_root_storage_index_string()
|
2008-07-17 23:47:09 +00:00
|
|
|
|
|
|
|
def data_objects_checked(self, ctx, data):
|
2008-10-22 00:03:07 +00:00
|
|
|
return self.monitor.get_status().get_counters()["count-objects-checked"]
|
2008-07-17 23:47:09 +00:00
|
|
|
def data_objects_healthy(self, ctx, data):
|
2008-10-22 00:03:07 +00:00
|
|
|
return self.monitor.get_status().get_counters()["count-objects-healthy"]
|
2008-09-07 19:44:56 +00:00
|
|
|
def data_objects_unhealthy(self, ctx, data):
|
2008-10-22 00:03:07 +00:00
|
|
|
return self.monitor.get_status().get_counters()["count-objects-unhealthy"]
|
2008-11-07 05:35:47 +00:00
|
|
|
def data_objects_unrecoverable(self, ctx, data):
|
|
|
|
return self.monitor.get_status().get_counters()["count-objects-unrecoverable"]
|
2008-09-07 19:44:56 +00:00
|
|
|
|
|
|
|
def data_count_corrupt_shares(self, ctx, data):
|
2008-10-22 00:03:07 +00:00
|
|
|
return self.monitor.get_status().get_counters()["count-corrupt-shares"]
|
2008-09-07 19:44:56 +00:00
|
|
|
|
|
|
|
def render_problems_p(self, ctx, data):
|
2008-10-22 00:03:07 +00:00
|
|
|
c = self.monitor.get_status().get_counters()
|
2008-09-07 19:44:56 +00:00
|
|
|
if c["count-objects-unhealthy"]:
|
|
|
|
return ctx.tag
|
|
|
|
return ""
|
2008-07-17 23:47:09 +00:00
|
|
|
|
|
|
|
def data_problems(self, ctx, data):
|
2008-10-22 00:03:07 +00:00
|
|
|
all_objects = self.monitor.get_status().get_all_results()
|
2008-09-07 19:44:56 +00:00
|
|
|
for path in sorted(all_objects.keys()):
|
|
|
|
cr = all_objects[path]
|
|
|
|
assert ICheckerResults.providedBy(cr)
|
|
|
|
if not cr.is_healthy():
|
|
|
|
yield path, cr
|
|
|
|
|
2008-08-12 04:03:26 +00:00
|
|
|
def render_problem(self, ctx, data):
|
2008-09-07 19:44:56 +00:00
|
|
|
path, cr = data
|
|
|
|
summary_text = ""
|
|
|
|
summary = cr.get_summary()
|
|
|
|
if summary:
|
|
|
|
summary_text = ": " + summary
|
|
|
|
summary_text += " [SI: %s]" % cr.get_storage_index_string()
|
|
|
|
return ctx.tag["/".join(self._html(path)), self._html(summary_text)]
|
|
|
|
|
|
|
|
|
|
|
|
def render_servers_with_corrupt_shares_p(self, ctx, data):
|
2008-10-22 00:03:07 +00:00
|
|
|
if self.monitor.get_status().get_counters()["count-corrupt-shares"]:
|
2008-09-07 19:44:56 +00:00
|
|
|
return ctx.tag
|
|
|
|
return ""
|
|
|
|
|
|
|
|
def data_servers_with_corrupt_shares(self, ctx, data):
|
|
|
|
servers = [serverid
|
|
|
|
for (serverid, storage_index, sharenum)
|
2008-10-22 00:03:07 +00:00
|
|
|
in self.monitor.get_status().get_corrupt_shares()]
|
2008-09-07 19:44:56 +00:00
|
|
|
servers.sort()
|
|
|
|
return servers
|
|
|
|
|
|
|
|
def render_server_problem(self, ctx, data):
|
|
|
|
serverid = data
|
|
|
|
data = [idlib.shortnodeid_b2a(serverid)]
|
|
|
|
c = IClient(ctx)
|
|
|
|
nickname = c.get_nickname_for_peerid(serverid)
|
|
|
|
if nickname:
|
|
|
|
data.append(" (%s)" % self._html(nickname))
|
|
|
|
return ctx.tag[data]
|
|
|
|
|
|
|
|
|
|
|
|
def render_corrupt_shares_p(self, ctx, data):
|
2008-10-22 00:03:07 +00:00
|
|
|
if self.monitor.get_status().get_counters()["count-corrupt-shares"]:
|
2008-09-07 19:44:56 +00:00
|
|
|
return ctx.tag
|
|
|
|
return ""
|
|
|
|
def data_corrupt_shares(self, ctx, data):
|
2008-10-22 00:03:07 +00:00
|
|
|
return self.monitor.get_status().get_corrupt_shares()
|
2008-09-07 19:44:56 +00:00
|
|
|
def render_share_problem(self, ctx, data):
|
|
|
|
serverid, storage_index, sharenum = data
|
|
|
|
nickname = IClient(ctx).get_nickname_for_peerid(serverid)
|
|
|
|
ctx.fillSlots("serverid", idlib.shortnodeid_b2a(serverid))
|
|
|
|
if nickname:
|
|
|
|
ctx.fillSlots("nickname", self._html(nickname))
|
2008-10-23 23:00:31 +00:00
|
|
|
ctx.fillSlots("si", self._render_si_link(ctx, storage_index))
|
2008-09-07 19:44:56 +00:00
|
|
|
ctx.fillSlots("shnum", str(sharenum))
|
|
|
|
return ctx.tag
|
|
|
|
|
|
|
|
def render_return(self, ctx, data):
|
|
|
|
req = inevow.IRequest(ctx)
|
|
|
|
return_to = get_arg(req, "return_to", None)
|
|
|
|
if return_to:
|
|
|
|
return T.div[T.a(href=return_to)["Return to parent directory"]]
|
|
|
|
return ""
|
2008-08-12 04:03:26 +00:00
|
|
|
|
|
|
|
def data_all_objects(self, ctx, data):
|
2008-10-22 00:03:07 +00:00
|
|
|
r = self.monitor.get_status().get_all_results()
|
2008-09-07 19:44:56 +00:00
|
|
|
for path in sorted(r.keys()):
|
|
|
|
yield (path, r[path])
|
2008-08-12 04:03:26 +00:00
|
|
|
|
|
|
|
def render_object(self, ctx, data):
|
2008-09-07 19:44:56 +00:00
|
|
|
path, r = data
|
2008-10-23 23:03:59 +00:00
|
|
|
if path:
|
|
|
|
pathstring = "/".join(self._html(path))
|
|
|
|
else:
|
|
|
|
pathstring = "<root>"
|
|
|
|
ctx.fillSlots("path", pathstring)
|
2008-08-12 04:03:26 +00:00
|
|
|
ctx.fillSlots("healthy", str(r.is_healthy()))
|
2008-11-07 05:35:47 +00:00
|
|
|
ctx.fillSlots("recoverable", str(r.is_recoverable()))
|
2008-10-23 23:00:31 +00:00
|
|
|
storage_index = r.get_storage_index()
|
|
|
|
ctx.fillSlots("storage_index", self._render_si_link(ctx, storage_index))
|
2008-09-07 19:44:56 +00:00
|
|
|
ctx.fillSlots("summary", self._html(r.get_summary()))
|
2008-08-12 04:03:26 +00:00
|
|
|
return ctx.tag
|
2008-07-17 23:47:09 +00:00
|
|
|
|
2008-09-07 19:44:56 +00:00
|
|
|
def render_runtime(self, ctx, data):
|
|
|
|
req = inevow.IRequest(ctx)
|
|
|
|
runtime = time.time() - req.processing_started_timestamp
|
|
|
|
return ctx.tag["runtime: %s seconds" % runtime]
|
|
|
|
|
2008-10-22 00:03:07 +00:00
|
|
|
class DeepCheckAndRepairResults(rend.Page, ResultsBase, ReloadMixin):
|
2008-09-07 19:44:56 +00:00
|
|
|
docFactory = getxmlfile("deep-check-and-repair-results.xhtml")
|
|
|
|
|
2008-10-22 00:03:07 +00:00
|
|
|
def __init__(self, monitor):
|
|
|
|
#assert IDeepCheckAndRepairResults(results)
|
|
|
|
#self.r = results
|
|
|
|
self.monitor = monitor
|
2008-09-07 19:44:56 +00:00
|
|
|
|
2008-09-10 02:45:17 +00:00
|
|
|
def renderHTTP(self, ctx):
|
2008-09-10 21:11:37 +00:00
|
|
|
if self.want_json(ctx):
|
2008-09-10 02:45:17 +00:00
|
|
|
return self.json(ctx)
|
|
|
|
return rend.Page.renderHTTP(self, ctx)
|
|
|
|
|
|
|
|
def json(self, ctx):
|
|
|
|
inevow.IRequest(ctx).setHeader("content-type", "text/plain")
|
2008-10-22 00:03:07 +00:00
|
|
|
res = self.monitor.get_status()
|
2008-09-10 02:45:17 +00:00
|
|
|
data = {}
|
2008-10-22 00:03:07 +00:00
|
|
|
data["finished"] = self.monitor.is_finished()
|
|
|
|
data["root-storage-index"] = res.get_root_storage_index_string()
|
|
|
|
c = res.get_counters()
|
2008-09-10 02:45:17 +00:00
|
|
|
data["count-objects-checked"] = c["count-objects-checked"]
|
|
|
|
|
|
|
|
data["count-objects-healthy-pre-repair"] = c["count-objects-healthy-pre-repair"]
|
|
|
|
data["count-objects-unhealthy-pre-repair"] = c["count-objects-unhealthy-pre-repair"]
|
|
|
|
data["count-objects-healthy-post-repair"] = c["count-objects-healthy-post-repair"]
|
|
|
|
data["count-objects-unhealthy-post-repair"] = c["count-objects-unhealthy-post-repair"]
|
|
|
|
|
|
|
|
data["count-repairs-attempted"] = c["count-repairs-attempted"]
|
|
|
|
data["count-repairs-successful"] = c["count-repairs-successful"]
|
|
|
|
data["count-repairs-unsuccessful"] = c["count-repairs-unsuccessful"]
|
|
|
|
|
|
|
|
data["count-corrupt-shares-pre-repair"] = c["count-corrupt-shares-pre-repair"]
|
|
|
|
data["count-corrupt-shares-post-repair"] = c["count-corrupt-shares-pre-repair"]
|
|
|
|
|
2008-09-10 21:11:37 +00:00
|
|
|
data["list-corrupt-shares"] = [ (idlib.nodeid_b2a(serverid),
|
|
|
|
base32.b2a(storage_index),
|
2008-09-10 02:45:17 +00:00
|
|
|
shnum)
|
|
|
|
for (serverid, storage_index, shnum)
|
2008-10-22 00:03:07 +00:00
|
|
|
in res.get_corrupt_shares() ]
|
2008-09-10 02:45:17 +00:00
|
|
|
|
2008-11-19 02:12:10 +00:00
|
|
|
remaining_corrupt = [ (idlib.nodeid_b2a(serverid),
|
|
|
|
base32.b2a(storage_index),
|
|
|
|
shnum)
|
|
|
|
for (serverid, storage_index, shnum)
|
|
|
|
in res.get_remaining_corrupt_shares() ]
|
|
|
|
data["list-remaining-corrupt-shares"] = remaining_corrupt
|
|
|
|
|
|
|
|
unhealthy = [ (path_t,
|
|
|
|
self._json_check_results(crr.get_pre_repair_results()))
|
|
|
|
for (path_t, crr)
|
|
|
|
in res.get_all_results().items()
|
|
|
|
if not crr.get_pre_repair_results().is_healthy() ]
|
|
|
|
data["list-unhealthy-files"] = unhealthy
|
2008-10-22 00:03:07 +00:00
|
|
|
data["stats"] = res.get_stats()
|
2008-09-15 20:43:14 +00:00
|
|
|
return simplejson.dumps(data, indent=1) + "\n"
|
2008-09-10 02:45:17 +00:00
|
|
|
|
2008-09-07 19:44:56 +00:00
|
|
|
def render_root_storage_index(self, ctx, data):
|
2008-10-22 00:03:07 +00:00
|
|
|
return self.monitor.get_status().get_root_storage_index_string()
|
2008-09-07 19:44:56 +00:00
|
|
|
|
|
|
|
def data_objects_checked(self, ctx, data):
|
2008-10-22 00:03:07 +00:00
|
|
|
return self.monitor.get_status().get_counters()["count-objects-checked"]
|
2008-09-07 19:44:56 +00:00
|
|
|
|
|
|
|
def data_objects_healthy(self, ctx, data):
|
2008-10-22 00:03:07 +00:00
|
|
|
return self.monitor.get_status().get_counters()["count-objects-healthy-pre-repair"]
|
2008-09-07 19:44:56 +00:00
|
|
|
def data_objects_unhealthy(self, ctx, data):
|
2008-10-22 00:03:07 +00:00
|
|
|
return self.monitor.get_status().get_counters()["count-objects-unhealthy-pre-repair"]
|
2008-09-07 19:44:56 +00:00
|
|
|
def data_corrupt_shares(self, ctx, data):
|
2008-10-22 00:03:07 +00:00
|
|
|
return self.monitor.get_status().get_counters()["count-corrupt-shares-pre-repair"]
|
2008-09-07 19:44:56 +00:00
|
|
|
|
|
|
|
def data_repairs_attempted(self, ctx, data):
|
2008-10-22 00:03:07 +00:00
|
|
|
return self.monitor.get_status().get_counters()["count-repairs-attempted"]
|
2008-09-07 19:44:56 +00:00
|
|
|
def data_repairs_successful(self, ctx, data):
|
2008-10-22 00:03:07 +00:00
|
|
|
return self.monitor.get_status().get_counters()["count-repairs-successful"]
|
2008-09-07 19:44:56 +00:00
|
|
|
def data_repairs_unsuccessful(self, ctx, data):
|
2008-10-22 00:03:07 +00:00
|
|
|
return self.monitor.get_status().get_counters()["count-repairs-unsuccessful"]
|
2008-09-07 19:44:56 +00:00
|
|
|
|
|
|
|
def data_objects_healthy_post(self, ctx, data):
|
2008-10-22 00:03:07 +00:00
|
|
|
return self.monitor.get_status().get_counters()["count-objects-healthy-post-repair"]
|
2008-09-07 19:44:56 +00:00
|
|
|
def data_objects_unhealthy_post(self, ctx, data):
|
2008-10-22 00:03:07 +00:00
|
|
|
return self.monitor.get_status().get_counters()["count-objects-unhealthy-post-repair"]
|
2008-09-07 19:44:56 +00:00
|
|
|
def data_corrupt_shares_post(self, ctx, data):
|
2008-10-22 00:03:07 +00:00
|
|
|
return self.monitor.get_status().get_counters()["count-corrupt-shares-post-repair"]
|
2008-09-07 19:44:56 +00:00
|
|
|
|
|
|
|
def render_pre_repair_problems_p(self, ctx, data):
|
2008-10-22 00:03:07 +00:00
|
|
|
c = self.monitor.get_status().get_counters()
|
2008-09-07 19:44:56 +00:00
|
|
|
if c["count-objects-unhealthy-pre-repair"]:
|
|
|
|
return ctx.tag
|
|
|
|
return ""
|
|
|
|
|
|
|
|
def data_pre_repair_problems(self, ctx, data):
|
2008-10-22 00:03:07 +00:00
|
|
|
all_objects = self.monitor.get_status().get_all_results()
|
2008-09-07 19:44:56 +00:00
|
|
|
for path in sorted(all_objects.keys()):
|
|
|
|
r = all_objects[path]
|
|
|
|
assert ICheckAndRepairResults.providedBy(r)
|
|
|
|
cr = r.get_pre_repair_results()
|
|
|
|
if not cr.is_healthy():
|
|
|
|
yield path, cr
|
|
|
|
|
|
|
|
def render_problem(self, ctx, data):
|
|
|
|
path, cr = data
|
|
|
|
return ["/".join(self._html(path)), ": ", self._html(cr.get_summary())]
|
|
|
|
|
|
|
|
def render_post_repair_problems_p(self, ctx, data):
|
2008-10-22 00:03:07 +00:00
|
|
|
c = self.monitor.get_status().get_counters()
|
2008-09-07 19:44:56 +00:00
|
|
|
if (c["count-objects-unhealthy-post-repair"]
|
|
|
|
or c["count-corrupt-shares-post-repair"]):
|
|
|
|
return ctx.tag
|
|
|
|
return ""
|
|
|
|
|
|
|
|
def data_post_repair_problems(self, ctx, data):
|
2008-10-22 00:03:07 +00:00
|
|
|
all_objects = self.monitor.get_status().get_all_results()
|
2008-09-07 19:44:56 +00:00
|
|
|
for path in sorted(all_objects.keys()):
|
|
|
|
r = all_objects[path]
|
|
|
|
assert ICheckAndRepairResults.providedBy(r)
|
|
|
|
cr = r.get_post_repair_results()
|
|
|
|
if not cr.is_healthy():
|
|
|
|
yield path, cr
|
|
|
|
|
|
|
|
def render_servers_with_corrupt_shares_p(self, ctx, data):
|
2008-10-22 00:03:07 +00:00
|
|
|
if self.monitor.get_status().get_counters()["count-corrupt-shares-pre-repair"]:
|
2008-09-07 19:44:56 +00:00
|
|
|
return ctx.tag
|
|
|
|
return ""
|
|
|
|
def data_servers_with_corrupt_shares(self, ctx, data):
|
|
|
|
return [] # TODO
|
|
|
|
def render_server_problem(self, ctx, data):
|
|
|
|
pass
|
|
|
|
|
|
|
|
|
|
|
|
def render_remaining_corrupt_shares_p(self, ctx, data):
|
2008-10-22 00:03:07 +00:00
|
|
|
if self.monitor.get_status().get_counters()["count-corrupt-shares-post-repair"]:
|
2008-09-07 19:44:56 +00:00
|
|
|
return ctx.tag
|
|
|
|
return ""
|
|
|
|
def data_post_repair_corrupt_shares(self, ctx, data):
|
|
|
|
return [] # TODO
|
|
|
|
|
|
|
|
def render_share_problem(self, ctx, data):
|
|
|
|
pass
|
|
|
|
|
|
|
|
|
2008-07-17 23:47:09 +00:00
|
|
|
def render_return(self, ctx, data):
|
|
|
|
req = inevow.IRequest(ctx)
|
|
|
|
return_to = get_arg(req, "return_to", None)
|
|
|
|
if return_to:
|
|
|
|
return T.div[T.a(href=return_to)["Return to parent directory"]]
|
|
|
|
return ""
|
2008-08-13 03:34:26 +00:00
|
|
|
|
2008-09-07 19:44:56 +00:00
|
|
|
def data_all_objects(self, ctx, data):
|
2008-10-22 00:03:07 +00:00
|
|
|
r = self.monitor.get_status().get_all_results()
|
2008-09-07 19:44:56 +00:00
|
|
|
for path in sorted(r.keys()):
|
|
|
|
yield (path, r[path])
|
|
|
|
|
|
|
|
def render_object(self, ctx, data):
|
|
|
|
path, r = data
|
|
|
|
ctx.fillSlots("path", "/".join(self._html(path)))
|
|
|
|
ctx.fillSlots("healthy_pre_repair",
|
|
|
|
str(r.get_pre_repair_results().is_healthy()))
|
|
|
|
ctx.fillSlots("healthy_post_repair",
|
|
|
|
str(r.get_post_repair_results().is_healthy()))
|
|
|
|
ctx.fillSlots("summary",
|
|
|
|
self._html(r.get_pre_repair_results().get_summary()))
|
|
|
|
return ctx.tag
|
|
|
|
|
2008-08-13 03:34:26 +00:00
|
|
|
def render_runtime(self, ctx, data):
|
|
|
|
req = inevow.IRequest(ctx)
|
|
|
|
runtime = time.time() - req.processing_started_timestamp
|
|
|
|
return ctx.tag["runtime: %s seconds" % runtime]
|