2020-07-16 16:55:12 -04:00
|
|
|
"""
|
|
|
|
Tests for ```allmydata.web.status```.
|
|
|
|
"""
|
2020-07-09 16:59:04 -04:00
|
|
|
|
|
|
|
from bs4 import BeautifulSoup
|
|
|
|
from twisted.web.template import flattenString
|
|
|
|
|
|
|
|
from allmydata.web.status import (
|
|
|
|
Status,
|
|
|
|
StatusElement,
|
|
|
|
)
|
|
|
|
|
2020-07-07 20:49:58 -04:00
|
|
|
from zope.interface import implementer
|
|
|
|
|
2020-07-17 16:13:07 -04:00
|
|
|
from allmydata.interfaces import IDownloadResults
|
2020-07-07 20:49:58 -04:00
|
|
|
from allmydata.web.status import DownloadStatusElement
|
2020-07-17 16:13:07 -04:00
|
|
|
from allmydata.immutable.downloader.status import DownloadStatus
|
2020-07-07 20:49:58 -04:00
|
|
|
|
2020-07-09 16:59:04 -04:00
|
|
|
from .common import (
|
|
|
|
assert_soup_has_favicon,
|
|
|
|
assert_soup_has_tag_with_content,
|
|
|
|
)
|
|
|
|
from ..common import TrialTestCase
|
|
|
|
|
|
|
|
from .test_web import FakeHistory
|
|
|
|
|
|
|
|
# Test that status.StatusElement can render HTML.
|
|
|
|
class StatusTests(TrialTestCase):
|
|
|
|
|
|
|
|
def _render_status_page(self, active, recent):
|
|
|
|
elem = StatusElement(active, recent)
|
|
|
|
d = flattenString(None, elem)
|
|
|
|
return self.successResultOf(d)
|
|
|
|
|
|
|
|
def test_status_page(self):
|
|
|
|
status = Status(FakeHistory())
|
|
|
|
doc = self._render_status_page(
|
|
|
|
status._get_active_operations(),
|
|
|
|
status._get_recent_operations()
|
|
|
|
)
|
|
|
|
soup = BeautifulSoup(doc, 'html5lib')
|
|
|
|
|
|
|
|
assert_soup_has_favicon(self, soup)
|
|
|
|
|
|
|
|
assert_soup_has_tag_with_content(
|
|
|
|
self, soup, u"title",
|
|
|
|
u"Tahoe-LAFS - Recent and Active Operations"
|
|
|
|
)
|
|
|
|
|
|
|
|
assert_soup_has_tag_with_content(
|
|
|
|
self, soup, u"h2",
|
|
|
|
u"Active Operations:"
|
|
|
|
)
|
|
|
|
|
|
|
|
assert_soup_has_tag_with_content(
|
|
|
|
self, soup, u"td",
|
|
|
|
u"retrieve"
|
|
|
|
)
|
|
|
|
|
|
|
|
assert_soup_has_tag_with_content(
|
|
|
|
self, soup, u"td",
|
|
|
|
u"publish"
|
|
|
|
)
|
|
|
|
|
|
|
|
assert_soup_has_tag_with_content(
|
|
|
|
self, soup, u"td",
|
|
|
|
u"download"
|
|
|
|
)
|
|
|
|
|
|
|
|
assert_soup_has_tag_with_content(
|
|
|
|
self, soup, u"td",
|
|
|
|
u"upload"
|
|
|
|
)
|
|
|
|
|
|
|
|
assert_soup_has_tag_with_content(
|
|
|
|
self, soup, u"h2",
|
|
|
|
"Recent Operations:"
|
|
|
|
)
|
2020-07-18 07:53:36 -04:00
|
|
|
|
2020-07-07 20:49:58 -04:00
|
|
|
|
|
|
|
@implementer(IDownloadResults)
|
|
|
|
class FakeDownloadResults(object):
|
2020-07-07 21:56:35 -04:00
|
|
|
|
2020-07-08 12:44:09 -04:00
|
|
|
def __init__(self,
|
|
|
|
file_size=0,
|
|
|
|
servers_used=None,
|
|
|
|
server_problems=None,
|
|
|
|
servermap=None,
|
|
|
|
timings=None):
|
|
|
|
"""
|
|
|
|
See IDownloadResults for parameters.
|
|
|
|
"""
|
2020-07-08 13:56:52 -04:00
|
|
|
self.file_size = file_size
|
2020-07-08 13:29:01 -04:00
|
|
|
self.servers_used = servers_used
|
|
|
|
self.server_problems = server_problems
|
|
|
|
self.servermap = servermap
|
|
|
|
self.timings = timings
|
2020-07-07 20:49:58 -04:00
|
|
|
|
|
|
|
|
2020-07-17 16:13:07 -04:00
|
|
|
class FakeDownloadStatus(DownloadStatus):
|
2020-07-07 20:49:58 -04:00
|
|
|
|
2020-07-08 12:44:09 -04:00
|
|
|
def __init__(self,
|
|
|
|
storage_index = None,
|
|
|
|
file_size = 0,
|
|
|
|
servers_used = None,
|
|
|
|
server_problems = None,
|
|
|
|
servermap = None,
|
|
|
|
timings = None):
|
|
|
|
"""
|
|
|
|
See IDownloadStatus and IDownloadResults for parameters.
|
|
|
|
"""
|
2020-07-17 16:13:07 -04:00
|
|
|
super(FakeDownloadStatus, self).__init__(storage_index, file_size)
|
2020-07-07 20:49:58 -04:00
|
|
|
|
2020-07-08 12:44:09 -04:00
|
|
|
self.servers_used = servers_used
|
|
|
|
self.server_problems = server_problems
|
|
|
|
self.servermap = servermap
|
|
|
|
self.timings = timings
|
|
|
|
|
2020-07-07 20:49:58 -04:00
|
|
|
def get_results(self):
|
2020-07-17 16:13:07 -04:00
|
|
|
return FakeDownloadResults(self.size,
|
2020-07-08 12:44:09 -04:00
|
|
|
self.servers_used,
|
|
|
|
self.server_problems,
|
|
|
|
self.servermap,
|
|
|
|
self.timings)
|
2020-07-07 20:49:58 -04:00
|
|
|
|
2020-07-17 16:13:07 -04:00
|
|
|
|
2020-07-07 21:56:35 -04:00
|
|
|
class DownloadStatusElementTests(TrialTestCase):
|
2020-07-17 16:14:35 -04:00
|
|
|
"""
|
|
|
|
Tests for ```allmydata.web.status.DownloadStatusElement```.
|
|
|
|
"""
|
2020-07-07 20:49:58 -04:00
|
|
|
|
2020-07-08 12:44:09 -04:00
|
|
|
def _render_download_status_element(self, status):
|
|
|
|
"""
|
|
|
|
:param IDownloadStatus status:
|
|
|
|
:return: HTML string rendered by DownloadStatusElement
|
|
|
|
"""
|
|
|
|
elem = DownloadStatusElement(status)
|
2020-07-07 20:49:58 -04:00
|
|
|
d = flattenString(None, elem)
|
|
|
|
return self.successResultOf(d)
|
|
|
|
|
|
|
|
def test_download_status_element(self):
|
2020-07-08 12:44:09 -04:00
|
|
|
"""
|
|
|
|
See if we can render the page almost fully.
|
|
|
|
"""
|
2020-07-17 21:02:24 -04:00
|
|
|
status = FakeDownloadStatus(
|
2020-12-17 09:41:14 -05:00
|
|
|
b"si-1", 123,
|
|
|
|
[b"s-1", b"s-2", b"s-3"],
|
|
|
|
{b"s-1": "unknown problem"},
|
|
|
|
{b"s-1": [1], b"s-2": [1,2], b"s-3": [2,3]},
|
2020-07-17 21:02:24 -04:00
|
|
|
{"fetch_per_server":
|
2020-12-17 09:41:14 -05:00
|
|
|
{b"s-1": [1], b"s-2": [2,3], b"s-3": [3,2]}}
|
2020-07-17 21:02:24 -04:00
|
|
|
)
|
2020-07-08 12:44:09 -04:00
|
|
|
|
|
|
|
result = self._render_download_status_element(status)
|
2020-07-07 20:49:58 -04:00
|
|
|
soup = BeautifulSoup(result, 'html5lib')
|
|
|
|
|
|
|
|
assert_soup_has_favicon(self, soup)
|
|
|
|
|
2020-07-17 21:02:24 -04:00
|
|
|
assert_soup_has_tag_with_content(
|
|
|
|
self, soup, u"title", u"Tahoe-LAFS - File Download Status"
|
|
|
|
)
|
2020-07-07 21:56:35 -04:00
|
|
|
|
2020-07-17 21:02:24 -04:00
|
|
|
assert_soup_has_tag_with_content(
|
|
|
|
self, soup, u"li", u"File Size: 123 bytes"
|
|
|
|
)
|
|
|
|
assert_soup_has_tag_with_content(
|
|
|
|
self, soup, u"li", u"Progress: 0.0%"
|
|
|
|
)
|
2020-07-07 21:56:35 -04:00
|
|
|
|
2020-07-17 21:02:24 -04:00
|
|
|
assert_soup_has_tag_with_content(
|
|
|
|
self, soup, u"li", u"Servers Used: [omwtc], [omwte], [omwtg]"
|
|
|
|
)
|
|
|
|
|
|
|
|
assert_soup_has_tag_with_content(
|
|
|
|
self, soup, u"li", u"Server Problems:"
|
|
|
|
)
|
|
|
|
|
|
|
|
assert_soup_has_tag_with_content(
|
|
|
|
self, soup, u"li", u"[omwtc]: unknown problem"
|
|
|
|
)
|
2020-07-07 21:56:35 -04:00
|
|
|
|
|
|
|
assert_soup_has_tag_with_content(self, soup, u"li", u"Servermap:")
|
|
|
|
|
2020-07-17 21:02:24 -04:00
|
|
|
assert_soup_has_tag_with_content(
|
|
|
|
self, soup, u"li", u"[omwtc] has share: #1"
|
|
|
|
)
|
|
|
|
|
|
|
|
assert_soup_has_tag_with_content(
|
|
|
|
self, soup, u"li", u"[omwte] has shares: #1,#2"
|
|
|
|
)
|
|
|
|
|
|
|
|
assert_soup_has_tag_with_content(
|
|
|
|
self, soup, u"li", u"[omwtg] has shares: #2,#3"
|
|
|
|
)
|
|
|
|
|
|
|
|
assert_soup_has_tag_with_content(
|
|
|
|
self, soup, u"li", u"Per-Server Segment Fetch Response Times:"
|
|
|
|
)
|
|
|
|
|
|
|
|
assert_soup_has_tag_with_content(
|
|
|
|
self, soup, u"li", u"[omwtc]: 1.00s"
|
|
|
|
)
|
|
|
|
|
|
|
|
assert_soup_has_tag_with_content(
|
|
|
|
self, soup, u"li", u"[omwte]: 2.00s, 3.00s"
|
|
|
|
)
|
|
|
|
|
|
|
|
assert_soup_has_tag_with_content(
|
|
|
|
self, soup, u"li", u"[omwtg]: 3.00s, 2.00s"
|
|
|
|
)
|
2020-07-08 13:07:19 -04:00
|
|
|
|
|
|
|
def test_download_status_element_partial(self):
|
|
|
|
"""
|
|
|
|
See if we can render the page with incomplete download status.
|
|
|
|
"""
|
|
|
|
status = FakeDownloadStatus()
|
|
|
|
result = self._render_download_status_element(status)
|
|
|
|
soup = BeautifulSoup(result, 'html5lib')
|
|
|
|
|
2020-07-17 21:02:24 -04:00
|
|
|
assert_soup_has_tag_with_content(
|
|
|
|
self, soup, u"li", u"Servermap: None"
|
|
|
|
)
|
|
|
|
|
|
|
|
assert_soup_has_tag_with_content(
|
|
|
|
self, soup, u"li", u"File Size: 0 bytes"
|
|
|
|
)
|
|
|
|
|
|
|
|
assert_soup_has_tag_with_content(
|
|
|
|
self, soup, u"li", u"Total: None (None)"
|
|
|
|
)
|