webish: download-results: add server_problems

This commit is contained in:
Brian Warner 2008-03-03 20:30:35 -07:00
parent def910c391
commit 886ef22335
6 changed files with 36 additions and 8 deletions

View File

@ -247,10 +247,11 @@ class BlockDownloader:
I am a child of the SegmentDownloader.
"""
def __init__(self, vbucket, blocknum, parent):
def __init__(self, vbucket, blocknum, parent, results):
self.vbucket = vbucket
self.blocknum = blocknum
self.parent = parent
self.results = results
self._log_number = self.parent.log("starting block %d" % blocknum)
def log(self, msg, parent=None):
@ -272,6 +273,9 @@ class BlockDownloader:
def _got_block_error(self, f, lognum):
self.log("BlockDownloader[%d] got error: %s" % (self.blocknum, f),
parent=lognum)
if self.results:
peerid = self.vbucket.bucket.get_peerid()
self.results.server_problems[peerid] = str(f)
self.parent.bucket_failed(self.vbucket)
class SegmentDownloader:
@ -281,11 +285,12 @@ class SegmentDownloader:
I am a child of the FileDownloader.
"""
def __init__(self, parent, segmentnumber, needed_shares):
def __init__(self, parent, segmentnumber, needed_shares, results):
self.parent = parent
self.segmentnumber = segmentnumber
self.needed_blocks = needed_shares
self.blocks = {} # k: blocknum, v: data
self.results = results
self._log_number = self.parent.log("starting segment %d" %
segmentnumber)
@ -324,7 +329,7 @@ class SegmentDownloader:
# through it.
downloaders = []
for blocknum, vbucket in active_buckets.iteritems():
bd = BlockDownloader(vbucket, blocknum, self)
bd = BlockDownloader(vbucket, blocknum, self, self.results)
downloaders.append(bd)
l = [bd.start(self.segmentnumber) for bd in downloaders]
return defer.DeferredList(l, fireOnOneErrback=True)
@ -791,7 +796,8 @@ class FileDownloader:
100.0 * segnum / self._total_segments))
# memory footprint: when the SegmentDownloader finishes pulling down
# all shares, we have 1*segment_size of usage.
segmentdler = SegmentDownloader(self, segnum, self._num_needed_shares)
segmentdler = SegmentDownloader(self, segnum, self._num_needed_shares,
self._results)
started = time.time()
d = segmentdler.start()
def _finished_fetching(res):
@ -845,7 +851,8 @@ class FileDownloader:
self.log("downloading seg#%d of %d (%d%%)"
% (segnum, self._total_segments,
100.0 * segnum / self._total_segments))
segmentdler = SegmentDownloader(self, segnum, self._num_needed_shares)
segmentdler = SegmentDownloader(self, segnum, self._num_needed_shares,
self._results)
started = time.time()
d = segmentdler.start()
def _finished_fetching(res):

View File

@ -1309,9 +1309,9 @@ class IDownloadResults(Interface):
.file_size : the size of the file, in bytes
.servers_used : set of server peerids that were used during download
.server_problems : dict mapping server peerid to a problem string. Only
servers that had problems (bad hashes, disconnects) are
listed here.
.server_problems : dict mapping server peerid to a problem string. Only
servers that had problems (bad hashes, disconnects) are
listed here.
.servermap : dict mapping server peerid to a set of share numbers. Only
servers that had any shares are listed here.
.timings : dict of timing information, mapping name to seconds (float)

View File

@ -1207,6 +1207,9 @@ class ReadBucketProxy:
self._si_s = storage_index_s
self._started = False
def get_peerid(self):
return self._peerid
def __repr__(self):
peerid_s = idlib.shortnodeid_b2a(self._peerid)
return "<ReadBucketProxy to peer [%s] SI %s>" % (peerid_s,

View File

@ -31,6 +31,9 @@ class FakeBucketWriterProxy:
self.share_hashes = None
self.closed = False
def get_peerid(self):
return "peerid"
def startIfNecessary(self):
return defer.succeed(self)
def start(self):

View File

@ -24,6 +24,7 @@
<ul>
<li>Servers Used: <span n:render="servers_used" /></li>
<li>Servermap: <span n:render="servermap" /></li>
<li n:render="problems"></li>
<li>Timings:</li>
<ul>
<li>File Size: <span n:render="string" n:data="file_size" /> bytes</li>

View File

@ -1695,6 +1695,20 @@ class DownloadResultsRendererMixin:
d.addCallback(_render)
return d
def render_problems(self, ctx, data):
d = self.download_results()
d.addCallback(lambda res: res.server_problems)
def _got(server_problems):
if not server_problems:
return ""
l = T.ul()
for peerid in sorted(server_problems.keys()):
peerid_s = idlib.shortnodeid_b2a(peerid)
l[T.li["[%s]: %s" % (peerid_s, server_problems[peerid])]]
return T.li["Server Problems:", l]
d.addCallback(_got)
return d
def data_file_size(self, ctx, data):
d = self.download_results()
d.addCallback(lambda res: res.file_size)