diff --git a/.travis.yml b/.travis.yml
index dec435604..40f351ae6 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -12,12 +12,7 @@ env:
- TAHOE_LAFS_HYPOTHESIS_PROFILE=ci
install:
-# ~/.local/bin is on $PATH by default, but on OS-X, --user puts it elsewhere
- - if [ "${TRAVIS_OS_NAME}" = "osx" ]; then export PATH=$HOME/Library/Python/2.7/bin:$PATH; fi
- - if [ "${TRAVIS_OS_NAME}" = "osx" ]; then wget https://bootstrap.pypa.io/get-pip.py && sudo python ./get-pip.py; fi
- - pip list
- - if [ "${TRAVIS_OS_NAME}" = "osx" ]; then pip install --user --upgrade codecov tox setuptools virtualenv; fi
- - if [ "${TRAVIS_OS_NAME}" = "linux" ]; then pip install --upgrade codecov tox setuptools virtualenv; fi
+ - pip install --upgrade tox setuptools virtualenv
- echo $PATH; which python; which pip; which tox
- python misc/build_helpers/show-tool-versions.py
@@ -25,13 +20,6 @@ script:
- |
set -eo pipefail
tox -e ${T}
- # To verify that the resultant PyInstaller-generated binary executes
- # cleanly (i.e., that it terminates with an exit code of 0 and isn't
- # failing due to import/packaging-related errors, etc.).
- if [ "${T}" = "pyinstaller" ]; then dist/Tahoe-LAFS/tahoe --version; fi
-
-after_success:
- - if [ "${T}" = "coverage" ]; then codecov; fi
notifications:
email: false
@@ -45,26 +33,6 @@ notifications:
matrix:
include:
- - os: linux
- python: '2.7'
- env: T=coverage LANG=en_US.UTF-8
- - os: linux
- python: '2.7'
- env: T=codechecks LANG=en_US.UTF-8
- - os: linux
- python: '2.7'
- env: T=pyinstaller LANG=en_US.UTF-8
- - os: linux
- python: '2.7'
- env: T=py27 LANG=C
- - os: osx
- python: '2.7'
- env: T=py27 LANG=en_US.UTF-8
- language: generic # "python" is not available on OS-X
- - os: osx
- python: '2.7'
- env: T=pyinstaller LANG=en_US.UTF-8
- language: generic # "python" is not available on OS-X
- os: linux
python: '3.6'
env: T=py36
diff --git a/integration/conftest.py b/integration/conftest.py
index 5395d7c5f..9c4c7433c 100644
--- a/integration/conftest.py
+++ b/integration/conftest.py
@@ -377,13 +377,31 @@ def chutney(reactor, temp_dir):
proto,
'git',
(
- 'git', 'clone', '--depth=1',
+ 'git', 'clone',
'https://git.torproject.org/chutney.git',
chutney_dir,
),
env=environ,
)
pytest_twisted.blockon(proto.done)
+
+ # XXX: Here we reset Chutney to the last revision known to work
+ # with Python 2, as a workaround for Chutney moving to Python 3.
+ # When this is no longer necessary, we will have to drop this and
+ # add '--depth=1' back to the above 'git clone' subprocess.
+ proto = _DumpOutputProtocol(None)
+ reactor.spawnProcess(
+ proto,
+ 'git',
+ (
+ 'git', '-C', chutney_dir,
+ 'reset', '--hard',
+ '99bd06c7554b9113af8c0877b6eca4ceb95dcbaa'
+ ),
+ env=environ,
+ )
+ pytest_twisted.blockon(proto.done)
+
return chutney_dir
diff --git a/integration/test_web.py b/integration/test_web.py
index 4ba0a6fd1..575e4fc1a 100644
--- a/integration/test_web.py
+++ b/integration/test_web.py
@@ -219,23 +219,21 @@ def test_status(alice):
found_upload = False
found_download = False
for href in hrefs:
- if href.startswith(u"/") or not href:
+ if href == u"/" or not href:
continue
- resp = requests.get(
- util.node_url(alice.node_dir, u"status/{}".format(href)),
- )
- if href.startswith(u'up'):
+ resp = requests.get(util.node_url(alice.node_dir, href))
+ if href.startswith(u"/status/up"):
assert "File Upload Status" in resp.content
if "Total Size: {}".format(len(FILE_CONTENTS)) in resp.content:
found_upload = True
- elif href.startswith(u'down'):
+ elif href.startswith(u"/status/down"):
assert "File Download Status" in resp.content
if "Total Size: {}".format(len(FILE_CONTENTS)) in resp.content:
found_download = True
# download the specialized event information
resp = requests.get(
- util.node_url(alice.node_dir, u"status/{}/event_json".format(href)),
+ util.node_url(alice.node_dir, u"{}/event_json".format(href)),
)
js = json.loads(resp.content)
# there's usually just one "read" operation, but this can handle many ..
diff --git a/newsfragments/3254.minor b/newsfragments/3254.minor
new file mode 100644
index 000000000..e69de29bb
diff --git a/newsfragments/3288.minor b/newsfragments/3288.minor
new file mode 100644
index 000000000..e69de29bb
diff --git a/newsfragments/3313.minor b/newsfragments/3313.minor
new file mode 100644
index 000000000..c4eecd956
--- /dev/null
+++ b/newsfragments/3313.minor
@@ -0,0 +1 @@
+Replace nevow with twisted.web in web.operations.OphandleTable
diff --git a/newsfragments/3330.minor b/newsfragments/3330.minor
new file mode 100644
index 000000000..e69de29bb
diff --git a/newsfragments/3331.minor b/newsfragments/3331.minor
new file mode 100644
index 000000000..e69de29bb
diff --git a/newsfragments/3332.minor b/newsfragments/3332.minor
new file mode 100644
index 000000000..e69de29bb
diff --git a/newsfragments/3333.minor b/newsfragments/3333.minor
new file mode 100644
index 000000000..e69de29bb
diff --git a/newsfragments/3334.minor b/newsfragments/3334.minor
new file mode 100644
index 000000000..e69de29bb
diff --git a/newsfragments/3335.minor b/newsfragments/3335.minor
new file mode 100644
index 000000000..e69de29bb
diff --git a/newsfragments/3348.bugfix b/newsfragments/3348.bugfix
new file mode 100644
index 000000000..e0f1f6c5e
--- /dev/null
+++ b/newsfragments/3348.bugfix
@@ -0,0 +1 @@
+Use last known revision of Chutney that is known to work with Python 2 for Tor integration tests.
diff --git a/newsfragments/3349.bugfix b/newsfragments/3349.bugfix
new file mode 100644
index 000000000..08f2d7314
--- /dev/null
+++ b/newsfragments/3349.bugfix
@@ -0,0 +1 @@
+Mutable files now use RSA exponent 65537
diff --git a/src/allmydata/crypto/rsa.py b/src/allmydata/crypto/rsa.py
index 74f564ded..e82bf12d1 100644
--- a/src/allmydata/crypto/rsa.py
+++ b/src/allmydata/crypto/rsa.py
@@ -46,18 +46,8 @@ def create_signing_keypair(key_size):
:returns: 2-tuple of (private_key, public_key)
"""
- # Tahoe's original use of pycryptopp would use cryptopp's default
- # public_exponent, which is 17
- #
- # Thus, we are using 17 here as well. However, there are other
- # choices; see this for more discussion:
- # https://security.stackexchange.com/questions/2335/should-rsa-public-exponent-be-only-in-3-5-17-257-or-65537-due-to-security-c
- #
- # Another popular choice is 65537. See:
- # https://cryptography.io/en/latest/hazmat/primitives/asymmetric/rsa/#cryptography.hazmat.primitives.asymmetric.rsa.generate_private_key
- # https://www.daemonology.net/blog/2009-06-11-cryptographic-right-answers.html
priv_key = rsa.generate_private_key(
- public_exponent=17,
+ public_exponent=65537,
key_size=key_size,
backend=default_backend()
)
diff --git a/src/allmydata/test/web/test_status.py b/src/allmydata/test/web/test_status.py
new file mode 100644
index 000000000..5685a3938
--- /dev/null
+++ b/src/allmydata/test/web/test_status.py
@@ -0,0 +1,230 @@
+"""
+Tests for ```allmydata.web.status```.
+"""
+
+from bs4 import BeautifulSoup
+from twisted.web.template import flattenString
+
+from allmydata.web.status import (
+ Status,
+ StatusElement,
+)
+
+from zope.interface import implementer
+
+from allmydata.interfaces import IDownloadResults
+from allmydata.web.status import DownloadStatusElement
+from allmydata.immutable.downloader.status import DownloadStatus
+
+from .common import (
+ assert_soup_has_favicon,
+ assert_soup_has_tag_with_content,
+)
+from ..common import TrialTestCase
+
+from .test_web import FakeHistory
+
+# Test that status.StatusElement can render HTML.
+class StatusTests(TrialTestCase):
+
+ def _render_status_page(self, active, recent):
+ elem = StatusElement(active, recent)
+ d = flattenString(None, elem)
+ return self.successResultOf(d)
+
+ def test_status_page(self):
+ status = Status(FakeHistory())
+ doc = self._render_status_page(
+ status._get_active_operations(),
+ status._get_recent_operations()
+ )
+ soup = BeautifulSoup(doc, 'html5lib')
+
+ assert_soup_has_favicon(self, soup)
+
+ assert_soup_has_tag_with_content(
+ self, soup, u"title",
+ u"Tahoe-LAFS - Recent and Active Operations"
+ )
+
+ assert_soup_has_tag_with_content(
+ self, soup, u"h2",
+ u"Active Operations:"
+ )
+
+ assert_soup_has_tag_with_content(
+ self, soup, u"td",
+ u"retrieve"
+ )
+
+ assert_soup_has_tag_with_content(
+ self, soup, u"td",
+ u"publish"
+ )
+
+ assert_soup_has_tag_with_content(
+ self, soup, u"td",
+ u"download"
+ )
+
+ assert_soup_has_tag_with_content(
+ self, soup, u"td",
+ u"upload"
+ )
+
+ assert_soup_has_tag_with_content(
+ self, soup, u"h2",
+ "Recent Operations:"
+ )
+
+
+@implementer(IDownloadResults)
+class FakeDownloadResults(object):
+
+ def __init__(self,
+ file_size=0,
+ servers_used=None,
+ server_problems=None,
+ servermap=None,
+ timings=None):
+ """
+ See IDownloadResults for parameters.
+ """
+ self.file_size = file_size
+ self.servers_used = servers_used
+ self.server_problems = server_problems
+ self.servermap = servermap
+ self.timings = timings
+
+
+class FakeDownloadStatus(DownloadStatus):
+
+ def __init__(self,
+ storage_index = None,
+ file_size = 0,
+ servers_used = None,
+ server_problems = None,
+ servermap = None,
+ timings = None):
+ """
+ See IDownloadStatus and IDownloadResults for parameters.
+ """
+ super(FakeDownloadStatus, self).__init__(storage_index, file_size)
+
+ self.servers_used = servers_used
+ self.server_problems = server_problems
+ self.servermap = servermap
+ self.timings = timings
+
+ def get_results(self):
+ return FakeDownloadResults(self.size,
+ self.servers_used,
+ self.server_problems,
+ self.servermap,
+ self.timings)
+
+
+class DownloadStatusElementTests(TrialTestCase):
+ """
+ Tests for ```allmydata.web.status.DownloadStatusElement```.
+ """
+
+ def _render_download_status_element(self, status):
+ """
+ :param IDownloadStatus status:
+ :return: HTML string rendered by DownloadStatusElement
+ """
+ elem = DownloadStatusElement(status)
+ d = flattenString(None, elem)
+ return self.successResultOf(d)
+
+ def test_download_status_element(self):
+ """
+ See if we can render the page almost fully.
+ """
+ status = FakeDownloadStatus(
+ "si-1", 123,
+ ["s-1", "s-2", "s-3"],
+ {"s-1": "unknown problem"},
+ {"s-1": [1], "s-2": [1,2], "s-3": [2,3]},
+ {"fetch_per_server":
+ {"s-1": [1], "s-2": [2,3], "s-3": [3,2]}}
+ )
+
+ result = self._render_download_status_element(status)
+ soup = BeautifulSoup(result, 'html5lib')
+
+ assert_soup_has_favicon(self, soup)
+
+ assert_soup_has_tag_with_content(
+ self, soup, u"title", u"Tahoe-LAFS - File Download Status"
+ )
+
+ assert_soup_has_tag_with_content(
+ self, soup, u"li", u"File Size: 123 bytes"
+ )
+ assert_soup_has_tag_with_content(
+ self, soup, u"li", u"Progress: 0.0%"
+ )
+
+ assert_soup_has_tag_with_content(
+ self, soup, u"li", u"Servers Used: [omwtc], [omwte], [omwtg]"
+ )
+
+ assert_soup_has_tag_with_content(
+ self, soup, u"li", u"Server Problems:"
+ )
+
+ assert_soup_has_tag_with_content(
+ self, soup, u"li", u"[omwtc]: unknown problem"
+ )
+
+ assert_soup_has_tag_with_content(self, soup, u"li", u"Servermap:")
+
+ assert_soup_has_tag_with_content(
+ self, soup, u"li", u"[omwtc] has share: #1"
+ )
+
+ assert_soup_has_tag_with_content(
+ self, soup, u"li", u"[omwte] has shares: #1,#2"
+ )
+
+ assert_soup_has_tag_with_content(
+ self, soup, u"li", u"[omwtg] has shares: #2,#3"
+ )
+
+ assert_soup_has_tag_with_content(
+ self, soup, u"li", u"Per-Server Segment Fetch Response Times:"
+ )
+
+ assert_soup_has_tag_with_content(
+ self, soup, u"li", u"[omwtc]: 1.00s"
+ )
+
+ assert_soup_has_tag_with_content(
+ self, soup, u"li", u"[omwte]: 2.00s, 3.00s"
+ )
+
+ assert_soup_has_tag_with_content(
+ self, soup, u"li", u"[omwtg]: 3.00s, 2.00s"
+ )
+
+ def test_download_status_element_partial(self):
+ """
+ See if we can render the page with incomplete download status.
+ """
+ status = FakeDownloadStatus()
+ result = self._render_download_status_element(status)
+ soup = BeautifulSoup(result, 'html5lib')
+
+ assert_soup_has_tag_with_content(
+ self, soup, u"li", u"Servermap: None"
+ )
+
+ assert_soup_has_tag_with_content(
+ self, soup, u"li", u"File Size: 0 bytes"
+ )
+
+ assert_soup_has_tag_with_content(
+ self, soup, u"li", u"Total: None (None)"
+ )
diff --git a/src/allmydata/test/web/test_util.py b/src/allmydata/test/web/test_util.py
index a378c1447..8c48c20a5 100644
--- a/src/allmydata/test/web/test_util.py
+++ b/src/allmydata/test/web/test_util.py
@@ -22,6 +22,9 @@ class Util(ShouldFailMixin, testutil.ReallyEqualMixin, unittest.TestCase):
self.failUnlessReallyEqual(common.abbreviate_time(0.00123), "1.2ms")
self.failUnlessReallyEqual(common.abbreviate_time(0.000123), "123us")
self.failUnlessReallyEqual(common.abbreviate_time(-123000), "-123000000000us")
+ self.failUnlessReallyEqual(common.abbreviate_time(2.5), "2.50s")
+ self.failUnlessReallyEqual(common.abbreviate_time(0.25), "250ms")
+ self.failUnlessReallyEqual(common.abbreviate_time(0.0021), "2.1ms")
self.failUnlessReallyEqual(common.abbreviate_time(None), "")
self.failUnlessReallyEqual(common.abbreviate_time(2.5), "2.50s")
@@ -54,6 +57,9 @@ class Util(ShouldFailMixin, testutil.ReallyEqualMixin, unittest.TestCase):
self.failUnlessReallyEqual(common.abbreviate_rate(1234000), "1.23MBps")
self.failUnlessReallyEqual(common.abbreviate_rate(12340), "12.3kBps")
self.failUnlessReallyEqual(common.abbreviate_rate(123), "123Bps")
+ self.failUnlessReallyEqual(common.abbreviate_rate(2500000), "2.50MBps")
+ self.failUnlessReallyEqual(common.abbreviate_rate(30100), "30.1kBps")
+ self.failUnlessReallyEqual(common.abbreviate_rate(123), "123Bps")
def test_abbreviate_size(self):
self.failUnlessReallyEqual(common.abbreviate_size(None), "")
diff --git a/src/allmydata/test/web/test_web.py b/src/allmydata/test/web/test_web.py
index b3c8c7f46..041fa4e0a 100644
--- a/src/allmydata/test/web/test_web.py
+++ b/src/allmydata/test/web/test_web.py
@@ -33,7 +33,6 @@ from allmydata.immutable import upload
from allmydata.immutable.downloader.status import DownloadStatus
from allmydata.dirnode import DirectoryNode
from allmydata.nodemaker import NodeMaker
-from allmydata.web import status
from allmydata.web.common import WebError, MultiFormatPage
from allmydata.util import fileutil, base32, hashutil
from allmydata.util.consumer import download_to_data
@@ -972,11 +971,11 @@ class Web(WebMixin, WebErrorMixin, testutil.StallMixin, testutil.ReallyEqualMixi
d = self.GET("/status", followRedirect=True)
def _check(res):
self.failUnlessIn('Recent and Active Operations', res)
- self.failUnlessIn('"down-%d"' % dl_num, res)
- self.failUnlessIn('"up-%d"' % ul_num, res)
- self.failUnlessIn('"mapupdate-%d"' % mu_num, res)
- self.failUnlessIn('"publish-%d"' % pub_num, res)
- self.failUnlessIn('"retrieve-%d"' % ret_num, res)
+ self.failUnlessIn('"/status/down-%d"' % dl_num, res)
+ self.failUnlessIn('"/status/up-%d"' % ul_num, res)
+ self.failUnlessIn('"/status/mapupdate-%d"' % mu_num, res)
+ self.failUnlessIn('"/status/publish-%d"' % pub_num, res)
+ self.failUnlessIn('"/status/retrieve-%d"' % ret_num, res)
d.addCallback(_check)
d.addCallback(lambda res: self.GET("/status/?t=json"))
def _check_json(res):
@@ -1035,17 +1034,209 @@ class Web(WebMixin, WebErrorMixin, testutil.StallMixin, testutil.ReallyEqualMixi
return d
- def test_status_numbers(self):
- drrm = status.DownloadResultsRendererMixin()
- self.failUnlessReallyEqual(drrm.render_time(None, None), "")
- self.failUnlessReallyEqual(drrm.render_time(None, 2.5), "2.50s")
- self.failUnlessReallyEqual(drrm.render_time(None, 0.25), "250ms")
- self.failUnlessReallyEqual(drrm.render_time(None, 0.0021), "2.1ms")
- self.failUnlessReallyEqual(drrm.render_time(None, 0.000123), "123us")
- self.failUnlessReallyEqual(drrm.render_rate(None, None), "")
- self.failUnlessReallyEqual(drrm.render_rate(None, 2500000), "2.50MBps")
- self.failUnlessReallyEqual(drrm.render_rate(None, 30100), "30.1kBps")
- self.failUnlessReallyEqual(drrm.render_rate(None, 123), "123Bps")
+ def test_status_path_nodash_error(self):
+ """
+ Expect an error, because path is expected to be of the form
+ "/status/{up,down,..}-%number", with a hyphen.
+ """
+ return self.shouldFail2(error.Error,
+ "test_status_path_nodash",
+ "400 Bad Request",
+ "no '-' in 'nodash'",
+ self.GET,
+ "/status/nodash")
+
+ def test_status_page_contains_links(self):
+ """
+ Check that the rendered `/status` page contains all the
+ expected links.
+ """
+ def _check_status_page_links(response):
+ (body, status, _) = response
+
+ self.failUnlessReallyEqual(int(status), 200)
+
+ soup = BeautifulSoup(body, 'html5lib')
+ h = self.s.get_history()
+
+ # Check for `Not started`
+ ret_num = h.list_all_retrieve_statuses()[0].get_counter()
+ assert_soup_has_tag_with_attributes_and_content(
+ self, soup, u"a",
+ u"Not started",
+ {u"href": u"/status/retrieve-{}".format(ret_num)}
+ )
+
+ # Check for `Not started`
+ pub_num = h.list_all_publish_statuses()[0].get_counter()
+ assert_soup_has_tag_with_attributes_and_content(
+ self, soup, u"a",
+ u"Not started",
+ {u"href": u"/status/publish-{}".format(pub_num)}
+ )
+
+ # Check for `Not started`
+ mu_num = h.list_all_mapupdate_statuses()[0].get_counter()
+ assert_soup_has_tag_with_attributes_and_content(
+ self, soup, u"a",
+ u"Not started",
+ {u"href": u"/status/mapupdate-{}".format(mu_num)}
+ )
+
+ # Check for `fetching segments
+ # 2,3; errors on segment 1`: see build_one_ds() above.
+ dl_num = h.list_all_download_statuses()[0].get_counter()
+ assert_soup_has_tag_with_attributes_and_content(
+ self, soup, u"a",
+ u"fetching segments 2,3; errors on segment 1",
+ {u"href": u"/status/down-{}".format(dl_num)}
+ )
+
+ # Check for `Not started`
+ ul_num = h.list_all_upload_statuses()[0].get_counter()
+ assert_soup_has_tag_with_attributes_and_content(
+ self, soup, u"a",
+ u"Not started",
+ {u"href": u"/status/up-{}".format(ul_num)}
+ )
+
+ d = self.GET("/status", return_response=True)
+ d.addCallback(_check_status_page_links)
+ return d
+
+ def test_status_path_trailing_slashes(self):
+ """
+ Test that both `GET /status` and `GET /status/` are treated
+ alike, but reject any additional trailing slashes and other
+ non-existent child nodes.
+ """
+ def _check_status(response):
+ (body, status, _) = response
+
+ self.failUnlessReallyEqual(int(status), 200)
+
+ soup = BeautifulSoup(body, 'html5lib')
+ assert_soup_has_favicon(self, soup)
+ assert_soup_has_tag_with_content(
+ self, soup, u"title",
+ u"Tahoe-LAFS - Recent and Active Operations"
+ )
+
+ d = self.GET("/status", return_response=True)
+ d.addCallback(_check_status)
+
+ d = self.GET("/status/", return_response=True)
+ d.addCallback(_check_status)
+
+ d = self.shouldFail2(error.Error,
+ "test_status_path_trailing_slashes",
+ "400 Bad Request",
+ "no '-' in ''",
+ self.GET,
+ "/status//")
+
+ d = self.shouldFail2(error.Error,
+ "test_status_path_trailing_slashes",
+ "400 Bad Request",
+ "no '-' in ''",
+ self.GET,
+ "/status////////")
+
+ return d
+
+ def test_status_path_404_error(self):
+ """
+ Looking for non-existent statuses under child paths should
+ exercises all the iterators in web.status.Status.getChild().
+
+ The test suite (hopefully!) would not have done any setup for
+ a very large number of statuses at this point, now or in the
+ future, so these all should always return 404.
+ """
+ d = self.GET("/status/up-9999999")
+ d.addBoth(self.should404, "test_status_path_404_error (up)")
+
+ d = self.GET("/status/down-9999999")
+ d.addBoth(self.should404, "test_status_path_404_error (down)")
+
+ d = self.GET("/status/mapupdate-9999999")
+ d.addBoth(self.should404, "test_status_path_404_error (mapupdate)")
+
+ d = self.GET("/status/publish-9999999")
+ d.addBoth(self.should404, "test_status_path_404_error (publish)")
+
+ d = self.GET("/status/retrieve-9999999")
+ d.addBoth(self.should404, "test_status_path_404_error (retrieve)")
+
+ return d
+
+ def _check_status_subpath_result(self, result, expected_title):
+ """
+ Helper to verify that results of "GET /status/up-0" and
+ similar are as expected.
+ """
+ body, status, _ = result
+ self.failUnlessReallyEqual(int(status), 200)
+ soup = BeautifulSoup(body, 'html5lib')
+ assert_soup_has_favicon(self, soup)
+ assert_soup_has_tag_with_content(
+ self, soup, u"title", expected_title
+ )
+
+ def test_status_up_subpath(self):
+ """
+ See that "GET /status/up-0" works.
+ """
+ h = self.s.get_history()
+ ul_num = h.list_all_upload_statuses()[0].get_counter()
+ d = self.GET("/status/up-{}".format(ul_num), return_response=True)
+ d.addCallback(self._check_status_subpath_result,
+ u"Tahoe-LAFS - File Upload Status")
+ return d
+
+ def test_status_down_subpath(self):
+ """
+ See that "GET /status/down-0" works.
+ """
+ h = self.s.get_history()
+ dl_num = h.list_all_download_statuses()[0].get_counter()
+ d = self.GET("/status/down-{}".format(dl_num), return_response=True)
+ d.addCallback(self._check_status_subpath_result,
+ u"Tahoe-LAFS - File Download Status")
+ return d
+
+ def test_status_mapupdate_subpath(self):
+ """
+ See that "GET /status/mapupdate-0" works.
+ """
+ h = self.s.get_history()
+ mu_num = h.list_all_mapupdate_statuses()[0].get_counter()
+ d = self.GET("/status/mapupdate-{}".format(mu_num), return_response=True)
+ d.addCallback(self._check_status_subpath_result,
+ u"Tahoe-LAFS - Mutable File Servermap Update Status")
+ return d
+
+ def test_status_publish_subpath(self):
+ """
+ See that "GET /status/publish-0" works.
+ """
+ h = self.s.get_history()
+ pub_num = h.list_all_publish_statuses()[0].get_counter()
+ d = self.GET("/status/publish-{}".format(pub_num), return_response=True)
+ d.addCallback(self._check_status_subpath_result,
+ u"Tahoe-LAFS - Mutable File Publish Status")
+ return d
+
+ def test_status_retrieve_subpath(self):
+ """
+ See that "GET /status/retrieve-0" works.
+ """
+ h = self.s.get_history()
+ ret_num = h.list_all_retrieve_statuses()[0].get_counter()
+ d = self.GET("/status/retrieve-{}".format(ret_num), return_response=True)
+ d.addCallback(self._check_status_subpath_result,
+ u"Tahoe-LAFS - Mutable File Retrieve Status")
+ return d
def test_GET_FILEURL(self):
d = self.GET(self.public_url + "/foo/bar.txt")
diff --git a/src/allmydata/web/download-status.xhtml b/src/allmydata/web/download-status.xhtml
index e5794ca79..0c2da523c 100644
--- a/src/allmydata/web/download-status.xhtml
+++ b/src/allmydata/web/download-status.xhtml
@@ -1,58 +1,62 @@
-
+
+
Tahoe-LAFS - File Download Status
+
-File Download Status
+ File Download Status
-
- - Started:
- - Storage Index:
- - Helper?:
- - Total Size:
- - Progress:
- - Status:
-
-
-
-
-
-
Download Results
-
-
- - Servermap:
-
- - Timings:
- - File Size: bytes
- - Total:
- ()
-
- - Peer Selection:
- - UEB Fetch:
- - Hashtree Fetch:
- - Segment Fetch:
- ()
-
- - Cumulative Fetching:
- ()
- - Cumulative Decoding:
- ()
- - Cumulative Decrypting:
- ()
-
- - Paused by client:
-
-
+ - Started:
+ - Storage Index:
+ - Helper?:
+ - Total Size:
+ - Progress:
+ - Status:
-
-
-
+
+
+
+
+
Download Results
+
+
+
+ - Servermap:
+
+ - Timings:
+
+ - File Size: bytes
+ - Total:
+ ()
+
+ - Peer Selection:
+ - UEB Fetch:
+ - Hashtree Fetch:
+ - Segment Fetch:
+ ()
+
+ - Cumulative Fetching:
+ ()
+ - Cumulative Decoding:
+ ()
+ - Cumulative Decrypting:
+ ()
+
+ - Paused by client:
+
+
+
+
+
+
+
diff --git a/src/allmydata/web/operations.py b/src/allmydata/web/operations.py
index eb9321344..4dcad0028 100644
--- a/src/allmydata/web/operations.py
+++ b/src/allmydata/web/operations.py
@@ -1,19 +1,23 @@
import time
-from nevow import rend, url
-from nevow.inevow import IRequest
+from nevow import url
from twisted.web.template import (
renderer,
tags as T,
)
from twisted.python.failure import Failure
from twisted.internet import reactor, defer
+from twisted.web import resource
from twisted.web.http import NOT_FOUND
from twisted.web.html import escape
from twisted.application import service
-from allmydata.web.common import WebError, \
- get_root, get_arg, boolean_of_arg
+from allmydata.web.common import (
+ WebError,
+ get_root,
+ get_arg,
+ boolean_of_arg,
+)
MINUTE = 60
HOUR = 60*MINUTE
@@ -21,13 +25,16 @@ DAY = 24*HOUR
(MONITOR, RENDERER, WHEN_ADDED) = range(3)
-class OphandleTable(rend.Page, service.Service):
+class OphandleTable(resource.Resource, service.Service):
+ """Renders /operations/%d."""
+
name = "operations"
UNCOLLECTED_HANDLE_LIFETIME = 4*DAY
COLLECTED_HANDLE_LIFETIME = 1*DAY
def __init__(self, clock=None):
+ super(OphandleTable, self).__init__()
# both of these are indexed by ophandle
self.handles = {} # tuple of (monitor, renderer, when_added)
self.timers = {}
@@ -45,12 +52,17 @@ class OphandleTable(rend.Page, service.Service):
del self.timers
return service.Service.stopService(self)
- def add_monitor(self, ctx, monitor, renderer):
- ophandle = get_arg(ctx, "ophandle")
+ def add_monitor(self, req, monitor, renderer):
+ """
+ :param allmydata.webish.MyRequest req:
+ :param allmydata.monitor.Monitor monitor:
+ :param allmydata.web.directory.ManifestResults renderer:
+ """
+ ophandle = get_arg(req, "ophandle")
assert ophandle
now = time.time()
self.handles[ophandle] = (monitor, renderer, now)
- retain_for = get_arg(ctx, "retain-for", None)
+ retain_for = get_arg(req, "retain-for", None)
if retain_for is not None:
self._set_timer(ophandle, int(retain_for))
monitor.when_done().addBoth(self._operation_complete, ophandle)
@@ -67,36 +79,42 @@ class OphandleTable(rend.Page, service.Service):
# if we already have a timer, the client must have provided the
# retain-for= value, so don't touch it.
- def redirect_to(self, ctx):
- ophandle = get_arg(ctx, "ophandle")
+ def redirect_to(self, req):
+ """
+ :param allmydata.webish.MyRequest req:
+ """
+ ophandle = get_arg(req, "ophandle")
assert ophandle
- target = get_root(ctx) + "/operations/" + ophandle
- output = get_arg(ctx, "output")
+ target = get_root(req) + "/operations/" + ophandle
+ output = get_arg(req, "output")
if output:
target = target + "?output=%s" % output
+
+ # XXX: We have to use nevow.url here because nevow.appserver
+ # is unhappy with anything else; so this gets its own ticket.
+ # https://tahoe-lafs.org/trac/tahoe-lafs/ticket/3314
return url.URL.fromString(target)
- def childFactory(self, ctx, name):
+ def getChild(self, name, req):
ophandle = name
if ophandle not in self.handles:
raise WebError("unknown/expired handle '%s'" % escape(ophandle),
NOT_FOUND)
(monitor, renderer, when_added) = self.handles[ophandle]
- request = IRequest(ctx)
- t = get_arg(ctx, "t", "status")
- if t == "cancel" and request.method == "POST":
+ t = get_arg(req, "t", "status")
+ if t == "cancel" and req.method == "POST":
monitor.cancel()
# return the status anyways, but release the handle
self._release_ophandle(ophandle)
else:
- retain_for = get_arg(ctx, "retain-for", None)
+ retain_for = get_arg(req, "retain-for", None)
if retain_for is not None:
self._set_timer(ophandle, int(retain_for))
if monitor.is_finished():
- if boolean_of_arg(get_arg(ctx, "release-after-complete", "false")):
+ if boolean_of_arg(get_arg(req, "release-after-complete", "false")):
self._release_ophandle(ophandle)
if retain_for is None:
# this GET is collecting the ophandle, so change its timer
@@ -123,6 +141,7 @@ class OphandleTable(rend.Page, service.Service):
self.timers.pop(ophandle, None)
self.handles.pop(ophandle, None)
+
class ReloadMixin(object):
REFRESH_TIME = 1*MINUTE
diff --git a/src/allmydata/web/status.py b/src/allmydata/web/status.py
index ee0e924e0..3a89e8f5e 100644
--- a/src/allmydata/web/status.py
+++ b/src/allmydata/web/status.py
@@ -1,5 +1,7 @@
-import pprint, itertools, hashlib
+import pprint
+import itertools
+import hashlib
import json
from twisted.internet import defer
from twisted.python.filepath import FilePath
@@ -11,29 +13,26 @@ from twisted.web.template import (
renderElement,
tags,
)
-from nevow import rend, tags as T
from allmydata.util import base32, idlib
from allmydata.web.common import (
- getxmlfile,
abbreviate_time,
abbreviate_rate,
abbreviate_size,
plural,
compute_rate,
render_time,
- MultiFormatPage,
MultiFormatResource,
+ SlotsSequenceElement,
+ WebError,
)
-from allmydata.interfaces import IUploadStatus, IDownloadStatus, \
- IPublishStatus, IRetrieveStatus, IServermapUpdaterStatus
-class RateAndTimeMixin(object):
-
- def render_time(self, ctx, data):
- return abbreviate_time(data)
-
- def render_rate(self, ctx, data):
- return abbreviate_rate(data)
+from allmydata.interfaces import (
+ IUploadStatus,
+ IDownloadStatus,
+ IPublishStatus,
+ IRetrieveStatus,
+ IServermapUpdaterStatus,
+)
class UploadResultsRendererMixin(Element):
@@ -266,130 +265,6 @@ class UploadStatusElement(UploadResultsRendererMixin):
return tag(self._upload_status.get_status())
-class DownloadResultsRendererMixin(RateAndTimeMixin):
- # this requires a method named 'download_results'
-
- def render_servermap(self, ctx, data):
- d = self.download_results()
- d.addCallback(lambda res: res.servermap)
- def _render(servermap):
- if servermap is None:
- return "None"
- l = T.ul()
- for peerid in sorted(servermap.keys()):
- peerid_s = idlib.shortnodeid_b2a(peerid)
- shares_s = ",".join(["#%d" % shnum
- for shnum in servermap[peerid]])
- l[T.li["[%s] has share%s: %s" % (peerid_s,
- plural(servermap[peerid]),
- shares_s)]]
- return l
- d.addCallback(_render)
- return d
-
- def render_servers_used(self, ctx, data):
- d = self.download_results()
- d.addCallback(lambda res: res.servers_used)
- def _got(servers_used):
- if not servers_used:
- return ""
- peerids_s = ", ".join(["[%s]" % idlib.shortnodeid_b2a(peerid)
- for peerid in servers_used])
- return T.li["Servers Used: ", peerids_s]
- d.addCallback(_got)
- return d
-
- def render_problems(self, ctx, data):
- d = self.download_results()
- d.addCallback(lambda res: res.server_problems)
- def _got(server_problems):
- if not server_problems:
- return ""
- l = T.ul()
- for peerid in sorted(server_problems.keys()):
- peerid_s = idlib.shortnodeid_b2a(peerid)
- l[T.li["[%s]: %s" % (peerid_s, server_problems[peerid])]]
- return T.li["Server Problems:", l]
- d.addCallback(_got)
- return d
-
- def data_file_size(self, ctx, data):
- d = self.download_results()
- d.addCallback(lambda res: res.file_size)
- return d
-
- def _get_time(self, name):
- d = self.download_results()
- d.addCallback(lambda res: res.timings.get(name))
- return d
-
- def data_time_total(self, ctx, data):
- return self._get_time("total")
-
- def data_time_peer_selection(self, ctx, data):
- return self._get_time("peer_selection")
-
- def data_time_uri_extension(self, ctx, data):
- return self._get_time("uri_extension")
-
- def data_time_hashtrees(self, ctx, data):
- return self._get_time("hashtrees")
-
- def data_time_segments(self, ctx, data):
- return self._get_time("segments")
-
- def data_time_cumulative_fetch(self, ctx, data):
- return self._get_time("cumulative_fetch")
-
- def data_time_cumulative_decode(self, ctx, data):
- return self._get_time("cumulative_decode")
-
- def data_time_cumulative_decrypt(self, ctx, data):
- return self._get_time("cumulative_decrypt")
-
- def data_time_paused(self, ctx, data):
- return self._get_time("paused")
-
- def _get_rate(self, name):
- d = self.download_results()
- def _convert(r):
- file_size = r.file_size
- duration = r.timings.get(name)
- return compute_rate(file_size, duration)
- d.addCallback(_convert)
- return d
-
- def data_rate_total(self, ctx, data):
- return self._get_rate("total")
-
- def data_rate_segments(self, ctx, data):
- return self._get_rate("segments")
-
- def data_rate_fetch(self, ctx, data):
- return self._get_rate("cumulative_fetch")
-
- def data_rate_decode(self, ctx, data):
- return self._get_rate("cumulative_decode")
-
- def data_rate_decrypt(self, ctx, data):
- return self._get_rate("cumulative_decrypt")
-
- def render_server_timings(self, ctx, data):
- d = self.download_results()
- d.addCallback(lambda res: res.timings.get("fetch_per_server"))
- def _render(per_server):
- if per_server is None:
- return ""
- l = T.ul()
- for peerid in sorted(per_server.keys()):
- peerid_s = idlib.shortnodeid_b2a(peerid)
- times_s = ", ".join([abbreviate_time(t)
- for t in per_server[peerid]])
- l[T.li["[%s]: %s" % (peerid_s, times_s)]]
- return T.li["Per-Server Segment Fetch Response Times: ", l]
- d.addCallback(_render)
- return d
-
def _find_overlap(events, start_key, end_key):
"""
given a list of event dicts, return a new list in which each event
@@ -538,50 +413,85 @@ class _EventJson(Resource, object):
return json.dumps(data, indent=1) + "\n"
-class DownloadStatusPage(DownloadResultsRendererMixin, rend.Page):
- docFactory = getxmlfile("download-status.xhtml")
+class DownloadStatusPage(Resource, object):
+ """Renders /status/down-%d."""
- def __init__(self, data):
- rend.Page.__init__(self, data)
- self.download_status = data
- self.putChild("event_json", _EventJson(self.download_status))
+ def __init__(self, download_status):
+ """
+ :param IDownloadStatus download_status: stats provider
+ """
+ super(DownloadStatusPage, self).__init__()
+ self._download_status = download_status
+ self.putChild("event_json", _EventJson(self._download_status))
+ def render_GET(self, req):
+ elem = DownloadStatusElement(self._download_status)
+ return renderElement(req, elem)
+
+
+class DownloadStatusElement(Element):
+
+ loader = XMLFile(FilePath(__file__).sibling("download-status.xhtml"))
+
+ def __init__(self, download_status):
+ super(DownloadStatusElement, self).__init__()
+ self._download_status = download_status
+
+ # XXX: fun fact: the `get_results()` method which we wind up
+ # invoking here (see immutable.downloader.status.DownloadStatus)
+ # is unimplemented, and simply returns a `None`. As a result,
+ # `results()` renderer returns an empty tag, and does not invoke
+ # any of the subsequent renderers. Thus we end up not displaying
+ # download results on the download status page.
+ #
+ # See #3310: https://tahoe-lafs.org/trac/tahoe-lafs/ticket/3310
def download_results(self):
- return defer.maybeDeferred(self.download_status.get_results)
+ return self._download_status.get_results()
- def relative_time(self, t):
+ def _relative_time(self, t):
if t is None:
return t
- if self.download_status.first_timestamp is not None:
- return t - self.download_status.first_timestamp
+ if self._download_status.first_timestamp is not None:
+ return t - self._download_status.first_timestamp
return t
- def short_relative_time(self, t):
- t = self.relative_time(t)
+
+ def _short_relative_time(self, t):
+ t = self._relative_time(t)
if t is None:
return ""
return "+%.6fs" % t
- def render_timeline_link(self, ctx, data):
- from nevow import url
- return T.a(href=url.URL.fromContext(ctx).child("timeline"))["timeline"]
-
def _rate_and_time(self, bytes, seconds):
- time_s = self.render_time(None, seconds)
+ time_s = abbreviate_time(seconds)
if seconds != 0:
- rate = self.render_rate(None, 1.0 * bytes / seconds)
- return T.span(title=rate)[time_s]
- return T.span[time_s]
+ rate = abbreviate_rate(1.0 * bytes / seconds)
+ return tags.span(time_s, title=rate)
+ return tags.span(time_s)
- def render_events(self, ctx, data):
- if not self.download_status.storage_index:
- return
- srt = self.short_relative_time
- l = T.div()
+ # XXX: This method is a candidate for refactoring. It renders
+ # four tables from this function. Layout part of those tables
+ # could be moved to download-status.xhtml.
+ #
+ # See #3311: https://tahoe-lafs.org/trac/tahoe-lafs/ticket/3311
+ @renderer
+ def events(self, req, tag):
+ if not self._download_status.get_storage_index():
+ return tag
- t = T.table(align="left", class_="status-download-events")
- t[T.tr[T.th["serverid"], T.th["sent"], T.th["received"],
- T.th["shnums"], T.th["RTT"]]]
- for d_ev in self.download_status.dyhb_requests:
+ srt = self._short_relative_time
+
+ evtag = tags.div()
+
+ # "DYHB Requests" table.
+ dyhbtag = tags.table(align="left", class_="status-download-events")
+
+ dyhbtag(tags.tr(tags.th("serverid"),
+ tags.th("sent"),
+ tags.th("received"),
+ tags.th("shnums"),
+ tags.th("RTT")))
+
+ for d_ev in self._download_status.dyhb_requests:
server = d_ev["server"]
sent = d_ev["start_time"]
shnums = d_ev["response_shnums"]
@@ -591,20 +501,32 @@ class DownloadStatusPage(DownloadResultsRendererMixin, rend.Page):
rtt = received - sent
if not shnums:
shnums = ["-"]
- t[T.tr(style="background: %s" % _color(server))[
- [T.td[server.get_name()], T.td[srt(sent)], T.td[srt(received)],
- T.td[",".join([str(shnum) for shnum in shnums])],
- T.td[self.render_time(None, rtt)],
- ]]]
- l[T.h2["DYHB Requests:"], t]
- l[T.br(clear="all")]
+ dyhbtag(tags.tr(style="background: %s" % _color(server))(
+ (tags.td(server.get_name()),
+ tags.td(srt(sent)),
+ tags.td(srt(received)),
+ tags.td(",".join([str(shnum) for shnum in shnums])),
+ tags.td(abbreviate_time(rtt)),
+ )))
- t = T.table(align="left",class_="status-download-events")
- t[T.tr[T.th["range"], T.th["start"], T.th["finish"], T.th["got"],
- T.th["time"], T.th["decrypttime"], T.th["pausedtime"],
- T.th["speed"]]]
- for r_ev in self.download_status.read_events:
+ evtag(tags.h2("DYHB Requests:"), dyhbtag)
+ evtag(tags.br(clear="all"))
+
+ # "Read Events" table.
+ readtag = tags.table(align="left",class_="status-download-events")
+
+ readtag(tags.tr((
+ tags.th("range"),
+ tags.th("start"),
+ tags.th("finish"),
+ tags.th("got"),
+ tags.th("time"),
+ tags.th("decrypttime"),
+ tags.th("pausedtime"),
+ tags.th("speed"))))
+
+ for r_ev in self._download_status.read_events:
start = r_ev["start"]
length = r_ev["length"]
bytes = r_ev["bytes_returned"]
@@ -614,25 +536,38 @@ class DownloadStatusPage(DownloadResultsRendererMixin, rend.Page):
speed, rtt = "",""
if r_ev["finish_time"] is not None:
rtt = r_ev["finish_time"] - r_ev["start_time"] - r_ev["paused_time"]
- speed = self.render_rate(None, compute_rate(bytes, rtt))
- rtt = self.render_time(None, rtt)
- paused = self.render_time(None, r_ev["paused_time"])
+ speed = abbreviate_rate(compute_rate(bytes, rtt))
+ rtt = abbreviate_time(rtt)
+ paused = abbreviate_time(r_ev["paused_time"])
- t[T.tr[T.td["[%d:+%d]" % (start, length)],
- T.td[srt(r_ev["start_time"])], T.td[srt(r_ev["finish_time"])],
- T.td[bytes], T.td[rtt],
- T.td[decrypt_time], T.td[paused],
- T.td[speed],
- ]]
+ readtag(tags.tr(
+ tags.td("[%d:+%d]" % (start, length)),
+ tags.td(srt(r_ev["start_time"])),
+ tags.td(srt(r_ev["finish_time"])),
+ tags.td(str(bytes)),
+ tags.td(rtt),
+ tags.td(decrypt_time),
+ tags.td(paused),
+ tags.td(speed),
+ ))
- l[T.h2["Read Events:"], t]
- l[T.br(clear="all")]
+ evtag(tags.h2("Read Events:"), readtag)
+ evtag(tags.br(clear="all"))
- t = T.table(align="left",class_="status-download-events")
- t[T.tr[T.th["segnum"], T.th["start"], T.th["active"], T.th["finish"],
- T.th["range"],
- T.th["decodetime"], T.th["segtime"], T.th["speed"]]]
- for s_ev in self.download_status.segment_events:
+ # "Segment Events" table.
+ segtag = tags.table(align="left",class_="status-download-events")
+
+ segtag(tags.tr(
+ tags.th("segnum"),
+ tags.th("start"),
+ tags.th("active"),
+ tags.th("finish"),
+ tags.th("range"),
+ tags.th("decodetime"),
+ tags.th("segtime"),
+ tags.th("speed")))
+
+ for s_ev in self._download_status.segment_events:
range_s = "-"
segtime_s = "-"
speed = "-"
@@ -640,10 +575,10 @@ class DownloadStatusPage(DownloadResultsRendererMixin, rend.Page):
if s_ev["finish_time"] is not None:
if s_ev["success"]:
segtime = s_ev["finish_time"] - s_ev["active_time"]
- segtime_s = self.render_time(None, segtime)
+ segtime_s = abbreviate_time(segtime)
seglen = s_ev["segment_length"]
range_s = "[%d:+%d]" % (s_ev["segment_start"], seglen)
- speed = self.render_rate(None, compute_rate(seglen, segtime))
+ speed = abbreviate_rate(compute_rate(seglen, segtime))
decode_time = self._rate_and_time(seglen, s_ev["decode_time"])
else:
# error
@@ -652,76 +587,213 @@ class DownloadStatusPage(DownloadResultsRendererMixin, rend.Page):
# not finished yet
pass
- t[T.tr[T.td["seg%d" % s_ev["segment_number"]],
- T.td[srt(s_ev["start_time"])],
- T.td[srt(s_ev["active_time"])],
- T.td[srt(s_ev["finish_time"])],
- T.td[range_s],
- T.td[decode_time],
- T.td[segtime_s], T.td[speed]]]
+ segtag(tags.tr(
+ tags.td("seg%d" % s_ev["segment_number"]),
+ tags.td(srt(s_ev["start_time"])),
+ tags.td(srt(s_ev["active_time"])),
+ tags.td(srt(s_ev["finish_time"])),
+ tags.td(range_s),
+ tags.td(decode_time),
+ tags.td(segtime_s),
+ tags.td(speed)))
- l[T.h2["Segment Events:"], t]
- l[T.br(clear="all")]
- t = T.table(align="left",class_="status-download-events")
- t[T.tr[T.th["serverid"], T.th["shnum"], T.th["range"],
- T.th["txtime"], T.th["rxtime"],
- T.th["received"], T.th["RTT"]]]
- for r_ev in self.download_status.block_requests:
+ evtag(tags.h2("Segment Events:"), segtag)
+ evtag(tags.br(clear="all"))
+
+ # "Requests" table.
+ reqtab = tags.table(align="left",class_="status-download-events")
+
+ reqtab(tags.tr(
+ tags.th("serverid"),
+ tags.th("shnum"),
+ tags.th("range"),
+ tags.th("txtime"),
+ tags.th("rxtime"),
+ tags.th("received"),
+ tags.th("RTT")))
+
+ for r_ev in self._download_status.block_requests:
server = r_ev["server"]
rtt = None
if r_ev["finish_time"] is not None:
rtt = r_ev["finish_time"] - r_ev["start_time"]
color = _color(server)
- t[T.tr(style="background: %s" % color)[
- T.td[server.get_name()], T.td[r_ev["shnum"]],
- T.td["[%d:+%d]" % (r_ev["start"], r_ev["length"])],
- T.td[srt(r_ev["start_time"])], T.td[srt(r_ev["finish_time"])],
- T.td[r_ev["response_length"] or ""],
- T.td[self.render_time(None, rtt)],
- ]]
+ reqtab(tags.tr(style="background: %s" % color)
+ (
+ tags.td(server.get_name()),
+ tags.td(str(r_ev["shnum"])),
+ tags.td("[%d:+%d]" % (r_ev["start"], r_ev["length"])),
+ tags.td(srt(r_ev["start_time"])),
+ tags.td(srt(r_ev["finish_time"])),
+ tags.td(str(r_ev["response_length"]) or ""),
+ tags.td(abbreviate_time(rtt)),
+ ))
- l[T.h2["Requests:"], t]
- l[T.br(clear="all")]
+ evtag(tags.h2("Requests:"), reqtab)
+ evtag(tags.br(clear="all"))
- return l
+ return evtag
- def render_results(self, ctx, data):
- d = self.download_results()
- def _got_results(results):
- if results:
- return ctx.tag
- return ""
- d.addCallback(_got_results)
- return d
+ @renderer
+ def results(self, req, tag):
+ if self.download_results():
+ return tag
+ return ""
- def render_started(self, ctx, data):
- started_s = render_time(data.get_started())
- return started_s + " (%s)" % data.get_started()
+ @renderer
+ def started(self, req, tag):
+ started_s = render_time(self._download_status.get_started())
+ return tag(started_s + " (%s)" % self._download_status.get_started())
- def render_si(self, ctx, data):
- si_s = base32.b2a_or_none(data.get_storage_index())
+ @renderer
+ def si(self, req, tag):
+ si_s = base32.b2a_or_none(self._download_status.get_storage_index())
if si_s is None:
si_s = "(None)"
- return si_s
+ return tag(si_s)
- def render_helper(self, ctx, data):
- return {True: "Yes",
- False: "No"}[data.using_helper()]
+ @renderer
+ def helper(self, req, tag):
+ return tag({True: "Yes",
+ False: "No"}[self._download_status.using_helper()])
- def render_total_size(self, ctx, data):
- size = data.get_size()
+ @renderer
+ def total_size(self, req, tag):
+ size = self._download_status.get_size()
if size is None:
return "(unknown)"
- return size
+ return tag(str(size))
- def render_progress(self, ctx, data):
- progress = data.get_progress()
+ @renderer
+ def progress(self, req, tag):
+ progress = self._download_status.get_progress()
# TODO: make an ascii-art bar
- return "%.1f%%" % (100.0 * progress)
+ return tag("%.1f%%" % (100.0 * progress))
- def render_status(self, ctx, data):
- return data.get_status()
+ @renderer
+ def status(self, req, tag):
+ return tag(self._download_status.get_status())
+ @renderer
+ def servers_used(self, req, tag):
+ servers_used = self.download_results().servers_used
+ if not servers_used:
+ return ""
+ peerids_s = ", ".join(["[%s]" % idlib.shortnodeid_b2a(peerid)
+ for peerid in servers_used])
+ return tags.li("Servers Used: ", peerids_s)
+
+ @renderer
+ def servermap(self, req, tag):
+ servermap = self.download_results().servermap
+ if not servermap:
+ return tag("None")
+ ul = tags.ul()
+ for peerid in sorted(servermap.keys()):
+ peerid_s = idlib.shortnodeid_b2a(peerid)
+ shares_s = ",".join(["#%d" % shnum
+ for shnum in servermap[peerid]])
+ ul(tags.li("[%s] has share%s: %s" % (peerid_s,
+ plural(servermap[peerid]),
+ shares_s)))
+ return ul
+
+ @renderer
+ def problems(self, req, tag):
+ server_problems = self.download_results().server_problems
+ if not server_problems:
+ return ""
+ ul = tags.ul()
+ for peerid in sorted(server_problems.keys()):
+ peerid_s = idlib.shortnodeid_b2a(peerid)
+ ul(tags.li("[%s]: %s" % (peerid_s, server_problems[peerid])))
+ return tags.li("Server Problems:", ul)
+
+ @renderer
+ def file_size(self, req, tag):
+ return tag(str(self.download_results().file_size))
+
+ def _get_time(self, name):
+ if self.download_results().timings:
+ return self.download_results().timings.get(name)
+ return None
+
+ @renderer
+ def time_total(self, req, tag):
+ return tag(str(self._get_time("total")))
+
+ @renderer
+ def time_peer_selection(self, req, tag):
+ return tag(str(self._get_time("peer_selection")))
+
+ @renderer
+ def time_uri_extension(self, req, tag):
+ return tag(str(self._get_time("uri_extension")))
+
+ @renderer
+ def time_hashtrees(self, req, tag):
+ return tag(str(self._get_time("hashtrees")))
+
+ @renderer
+ def time_segments(self, req, tag):
+ return tag(str(self._get_time("segments")))
+
+ @renderer
+ def time_cumulative_fetch(self, req, tag):
+ return tag(str(self._get_time("cumulative_fetch")))
+
+ @renderer
+ def time_cumulative_decode(self, req, tag):
+ return tag(str(self._get_time("cumulative_decode")))
+
+ @renderer
+ def time_cumulative_decrypt(self, req, tag):
+ return tag(str(self._get_time("cumulative_decrypt")))
+
+ @renderer
+ def time_paused(self, req, tag):
+ return tag(str(self._get_time("paused")))
+
+ def _get_rate(self, name):
+ r = self.download_results()
+ file_size = r.file_size
+ duration = None
+ if r.timings:
+ duration = r.timings.get(name)
+ return compute_rate(file_size, duration)
+
+ @renderer
+ def rate_total(self, req, tag):
+ return tag(str(self._get_rate("total")))
+
+ @renderer
+ def rate_segments(self, req, tag):
+ return tag(str(self._get_rate("segments")))
+
+ @renderer
+ def rate_fetch(self, req, tag):
+ return tag(str(self._get_rate("cumulative_fetch")))
+
+ @renderer
+ def rate_decode(self, req, tag):
+ return tag(str(self._get_rate("cumulative_decode")))
+
+ @renderer
+ def rate_decrypt(self, req, tag):
+ return tag(str(self._get_rate("cumulative_decrypt")))
+
+ @renderer
+ def server_timings(self, req, tag):
+ per_server = self._get_time("fetch_per_server")
+ if per_server is None:
+ return ""
+ ul = tags.ul()
+ for peerid in sorted(per_server.keys()):
+ peerid_s = idlib.shortnodeid_b2a(peerid)
+ times_s = ", ".join([abbreviate_time(t)
+ for t in per_server[peerid]])
+ ul(tags.li("[%s]: %s" % (peerid_s, times_s)))
+ return tags.li("Per-Server Segment Fetch Response Times: ", ul)
class RetrieveStatusPage(MultiFormatResource):
@@ -1166,14 +1238,21 @@ def marshal_json(s):
return item
-class Status(MultiFormatPage):
- docFactory = getxmlfile("status.xhtml")
- addSlash = True
+class Status(MultiFormatResource):
+ """Renders /status page."""
def __init__(self, history):
- rend.Page.__init__(self, history)
+ """
+ :param allmydata.history.History history: provides operation statuses.
+ """
+ super(Status, self).__init__()
self.history = history
+ def render_HTML(self, req):
+ elem = StatusElement(self._get_active_operations(),
+ self._get_recent_operations())
+ return renderElement(req, elem)
+
def render_JSON(self, req):
# modern browsers now render this instead of forcing downloads
req.setHeader("content-type", "application/json")
@@ -1189,97 +1268,23 @@ class Status(MultiFormatPage):
return json.dumps(data, indent=1) + "\n"
- def _get_all_statuses(self):
- h = self.history
- return itertools.chain(h.list_all_upload_statuses(),
- h.list_all_download_statuses(),
- h.list_all_mapupdate_statuses(),
- h.list_all_publish_statuses(),
- h.list_all_retrieve_statuses(),
- h.list_all_helper_statuses(),
- )
+ def getChild(self, path, request):
+ # The "if (path is empty) return self" line should handle
+ # trailing slash in request path.
+ #
+ # Twisted Web's documentation says this: "If the URL ends in a
+ # slash, for example ``http://example.com/foo/bar/`` , the
+ # final URL segment will be an empty string. Resources can
+ # thus know if they were requested with or without a final
+ # slash."
+ if not path and request.postpath != ['']:
+ return self
- def data_active_operations(self, ctx, data):
- return self._get_active_operations()
-
- def _get_active_operations(self):
- active = [s
- for s in self._get_all_statuses()
- if s.get_active()]
- active.sort(lambda a, b: cmp(a.get_started(), b.get_started()))
- active.reverse()
- return active
-
- def data_recent_operations(self, ctx, data):
- return self._get_recent_operations()
-
- def _get_recent_operations(self):
- recent = [s
- for s in self._get_all_statuses()
- if not s.get_active()]
- recent.sort(lambda a, b: cmp(a.get_started(), b.get_started()))
- recent.reverse()
- return recent
-
- def render_row(self, ctx, data):
- s = data
-
- started_s = render_time(s.get_started())
- ctx.fillSlots("started", started_s)
-
- si_s = base32.b2a_or_none(s.get_storage_index())
- if si_s is None:
- si_s = "(None)"
- ctx.fillSlots("si", si_s)
- ctx.fillSlots("helper", {True: "Yes",
- False: "No"}[s.using_helper()])
-
- size = s.get_size()
- if size is None:
- size = "(unknown)"
- elif isinstance(size, (int, long, float)):
- size = abbreviate_size(size)
- ctx.fillSlots("total_size", size)
-
- progress = data.get_progress()
- if IUploadStatus.providedBy(data):
- link = "up-%d" % data.get_counter()
- ctx.fillSlots("type", "upload")
- # TODO: make an ascii-art bar
- (chk, ciphertext, encandpush) = progress
- progress_s = ("hash: %.1f%%, ciphertext: %.1f%%, encode: %.1f%%" %
- ( (100.0 * chk),
- (100.0 * ciphertext),
- (100.0 * encandpush) ))
- ctx.fillSlots("progress", progress_s)
- elif IDownloadStatus.providedBy(data):
- link = "down-%d" % data.get_counter()
- ctx.fillSlots("type", "download")
- ctx.fillSlots("progress", "%.1f%%" % (100.0 * progress))
- elif IPublishStatus.providedBy(data):
- link = "publish-%d" % data.get_counter()
- ctx.fillSlots("type", "publish")
- ctx.fillSlots("progress", "%.1f%%" % (100.0 * progress))
- elif IRetrieveStatus.providedBy(data):
- ctx.fillSlots("type", "retrieve")
- link = "retrieve-%d" % data.get_counter()
- ctx.fillSlots("progress", "%.1f%%" % (100.0 * progress))
- else:
- assert IServermapUpdaterStatus.providedBy(data)
- ctx.fillSlots("type", "mapupdate %s" % data.get_mode())
- link = "mapupdate-%d" % data.get_counter()
- ctx.fillSlots("progress", "%.1f%%" % (100.0 * progress))
- ctx.fillSlots("status", T.a(href=link)[s.get_status()])
- return ctx.tag
-
- def childFactory(self, ctx, name):
h = self.history
try:
- stype, count_s = name.split("-")
+ stype, count_s = path.split("-")
except ValueError:
- raise RuntimeError(
- "no - in '{}'".format(name)
- )
+ raise WebError("no '-' in '{}'".format(path))
count = int(count_s)
if stype == "up":
for s in itertools.chain(h.list_all_upload_statuses(),
@@ -1305,6 +1310,109 @@ class Status(MultiFormatPage):
if s.get_counter() == count:
return RetrieveStatusPage(s)
+ def _get_all_statuses(self):
+ h = self.history
+ return itertools.chain(h.list_all_upload_statuses(),
+ h.list_all_download_statuses(),
+ h.list_all_mapupdate_statuses(),
+ h.list_all_publish_statuses(),
+ h.list_all_retrieve_statuses(),
+ h.list_all_helper_statuses(),
+ )
+
+ def _get_active_operations(self):
+ active = [s
+ for s in self._get_all_statuses()
+ if s.get_active()]
+ active.sort(lambda a, b: cmp(a.get_started(), b.get_started()))
+ active.reverse()
+ return active
+
+ def _get_recent_operations(self):
+ recent = [s
+ for s in self._get_all_statuses()
+ if not s.get_active()]
+ recent.sort(lambda a, b: cmp(a.get_started(), b.get_started()))
+ recent.reverse()
+ return recent
+
+
+class StatusElement(Element):
+
+ loader = XMLFile(FilePath(__file__).sibling("status.xhtml"))
+
+ def __init__(self, active, recent):
+ super(StatusElement, self).__init__()
+ self._active = active
+ self._recent = recent
+
+ @renderer
+ def active_operations(self, req, tag):
+ active = [self.get_op_state(op) for op in self._active]
+ return SlotsSequenceElement(tag, active)
+
+ @renderer
+ def recent_operations(self, req, tag):
+ recent = [self.get_op_state(op) for op in self._recent]
+ return SlotsSequenceElement(tag, recent)
+
+ @staticmethod
+ def get_op_state(op):
+ result = dict()
+
+ started_s = render_time(op.get_started())
+ result["started"] = started_s
+
+ si_s = base32.b2a_or_none(op.get_storage_index())
+ if si_s is None:
+ si_s = "(None)"
+
+ result["si"] = si_s
+ result["helper"] = {True: "Yes", False: "No"}[op.using_helper()]
+
+ size = op.get_size()
+ if size is None:
+ size = "(unknown)"
+ elif isinstance(size, (int, long, float)):
+ size = abbreviate_size(size)
+
+ result["total_size"] = size
+
+ progress = op.get_progress()
+ if IUploadStatus.providedBy(op):
+ link = "up-%d" % op.get_counter()
+ result["type"] = "upload"
+ # TODO: make an ascii-art bar
+ (chk, ciphertext, encandpush) = progress
+ progress_s = ("hash: %.1f%%, ciphertext: %.1f%%, encode: %.1f%%" %
+ ((100.0 * chk),
+ (100.0 * ciphertext),
+ (100.0 * encandpush)))
+ result["progress"] = progress_s
+ elif IDownloadStatus.providedBy(op):
+ link = "down-%d" % op.get_counter()
+ result["type"] = "download"
+ result["progress"] = "%.1f%%" % (100.0 * progress)
+ elif IPublishStatus.providedBy(op):
+ link = "publish-%d" % op.get_counter()
+ result["type"] = "publish"
+ result["progress"] = "%.1f%%" % (100.0 * progress)
+ elif IRetrieveStatus.providedBy(op):
+ result["type"] = "retrieve"
+ link = "retrieve-%d" % op.get_counter()
+ result["progress"] = "%.1f%%" % (100.0 * progress)
+ else:
+ assert IServermapUpdaterStatus.providedBy(op)
+ result["type"] = "mapupdate %s" % op.get_mode()
+ link = "mapupdate-%d" % op.get_counter()
+ result["progress"] = "%.1f%%" % (100.0 * progress)
+
+ result["status"] = tags.a(op.get_status(),
+ href="/status/{}".format(link))
+
+ return result
+
+
# Render "/helper_status" page.
class HelperStatus(MultiFormatResource):
diff --git a/src/allmydata/web/status.xhtml b/src/allmydata/web/status.xhtml
index d827002f4..045d5aa85 100644
--- a/src/allmydata/web/status.xhtml
+++ b/src/allmydata/web/status.xhtml
@@ -1,4 +1,4 @@
-
+
Tahoe-LAFS - Recent and Active Operations
@@ -11,8 +11,8 @@
Active Operations:
-
-
+
+
Type |
Storage Index |
Helper? |
@@ -20,21 +20,21 @@
Progress |
Status |
-
- |
- |
- |
- |
- |
- |
+
+ |
+ |
+ |
+ |
+ |
+ |
- No active operations! |
+ No active operations! |
Recent Operations:
-
-
+
+
Started |
Type |
Storage Index |
@@ -43,16 +43,16 @@
Progress |
Status |
-
- |
- |
- |
- |
- |
- |
- |
+
+ |
+ |
+ |
+ |
+ |
+ |
+ |
- No recent operations! |
+ No recent operations! |