webish: download-results: add per-server response times

This commit is contained in:
Brian Warner 2008-03-03 20:53:45 -07:00
parent 886ef22335
commit 18eb00d136
4 changed files with 45 additions and 9 deletions

View File

@ -261,12 +261,19 @@ class BlockDownloader:
def start(self, segnum):
lognum = self.log("get_block(segnum=%d)" % segnum)
started = time.time()
d = self.vbucket.get_block(segnum)
d.addCallbacks(self._hold_block, self._got_block_error,
callbackArgs=(lognum,), errbackArgs=(lognum,))
callbackArgs=(started, lognum,), errbackArgs=(lognum,))
return d
def _hold_block(self, data, lognum):
def _hold_block(self, data, started, lognum):
if self.results:
elapsed = time.time() - started
peerid = self.vbucket.bucket.get_peerid()
if peerid not in self.results.timings["fetch_per_server"]:
self.results.timings["fetch_per_server"][peerid] = []
self.results.timings["fetch_per_server"][peerid].append(elapsed)
self.log("got block", parent=lognum)
self.parent.hold_block(self.blocknum, data)
@ -331,6 +338,8 @@ class SegmentDownloader:
for blocknum, vbucket in active_buckets.iteritems():
bd = BlockDownloader(vbucket, blocknum, self, self.results)
downloaders.append(bd)
if self.results:
self.results.servers_used.add(vbucket.bucket.get_peerid())
l = [bd.start(self.segmentnumber) for bd in downloaders]
return defer.DeferredList(l, fireOnOneErrback=True)
@ -428,6 +437,7 @@ class FileDownloader:
s.set_results(self._results)
self._results.file_size = self._size
self._results.timings["servers_peer_selection"] = {}
self._results.timings["fetch_per_server"] = {}
self._results.timings["cumulative_fetch"] = 0.0
self._results.timings["cumulative_decode"] = 0.0
self._results.timings["cumulative_decrypt"] = 0.0

View File

@ -1308,7 +1308,7 @@ class IDownloadResults(Interface):
public attributes which contain details about the download process.::
.file_size : the size of the file, in bytes
.servers_used : set of server peerids that were used during download
.servers_used : set of server peerids that were used during download
.server_problems : dict mapping server peerid to a problem string. Only
servers that had problems (bad hashes, disconnects) are
listed here.
@ -1324,7 +1324,7 @@ class IDownloadResults(Interface):
cumulative_decode : just time spent in zfec
cumulative_decrypt : just time spent in decryption
total : total download time, start to finish
servers_fetching : dict of peerid to list of per-segment fetch times
fetch_per_server : dict of peerid to list of per-segment fetch times
"""

View File

@ -22,9 +22,9 @@
<div n:render="results">
<h2>Download Results</h2>
<ul>
<li>Servers Used: <span n:render="servers_used" /></li>
<li n:render="servers_used" />
<li>Servermap: <span n:render="servermap" /></li>
<li n:render="problems"></li>
<li n:render="problems" />
<li>Timings:</li>
<ul>
<li>File Size: <span n:render="string" n:data="file_size" /> bytes</li>
@ -45,6 +45,7 @@
(<span n:render="rate" n:data="rate_decrypt" />)</li>
</ul>
</ul>
<li n:render="server_timings" />
</ul>
</ul>
</div>

View File

@ -1674,9 +1674,6 @@ class UploadStatusPage(UploadResultsRendererMixin, rend.Page):
class DownloadResultsRendererMixin:
# this requires a method named 'download_results'
def render_servers_used(self, ctx, data):
return "nope"
def render_servermap(self, ctx, data):
d = self.download_results()
d.addCallback(lambda res: res.servermap)
@ -1695,6 +1692,18 @@ class DownloadResultsRendererMixin:
d.addCallback(_render)
return d
def render_servers_used(self, ctx, data):
d = self.download_results()
d.addCallback(lambda res: res.servers_used)
def _got(servers_used):
if not servers_used:
return ""
peerids_s = ", ".join(["[%s]" % idlib.shortnodeid_b2a(peerid)
for peerid in servers_used])
return T.li["Servers Used: ", peerids_s]
d.addCallback(_got)
return d
def render_problems(self, ctx, data):
d = self.download_results()
d.addCallback(lambda res: res.server_problems)
@ -1796,6 +1805,22 @@ class DownloadResultsRendererMixin:
def data_rate_decrypt(self, ctx, data):
return self._get_rate("cumulative_decrypt")
def render_server_timings(self, ctx, data):
d = self.download_results()
d.addCallback(lambda res: res.timings.get("fetch_per_server"))
def _render(per_server):
if per_server is None:
return ""
l = T.ul()
for peerid in sorted(per_server.keys()):
peerid_s = idlib.shortnodeid_b2a(peerid)
times_s = ", ".join([self.render_time(None, t)
for t in per_server[peerid]])
l[T.li["[%s]: %s" % (peerid_s, times_s)]]
return T.li["Per-Server Segment Fetch Response Times: ", l]
d.addCallback(_render)
return d
class DownloadStatusPage(DownloadResultsRendererMixin, rend.Page):
docFactory = getxmlfile("download-status.xhtml")