assert about connected storage servers

This commit is contained in:
meejah 2020-01-31 07:10:15 -07:00
parent f9956f4c43
commit e4d556b49e
2 changed files with 18 additions and 2 deletions

View File

@ -45,6 +45,15 @@ def assert_soup_has_tag_with_attributes_and_content(testcase, soup, tag_name, co
assert_soup_has_tag_with_content(testcase, soup, tag_name, content) assert_soup_has_tag_with_content(testcase, soup, tag_name, content)
def _normalized_contents(tag):
"""
:returns: all the text contents of the tag with whitespace
normalized: all newlines removed and at most one space between
words.
"""
return u" ".join(tag.text.split())
def assert_soup_has_tag_with_content(testcase, soup, tag_name, content): def assert_soup_has_tag_with_content(testcase, soup, tag_name, content):
""" """
Using a ``TestCase`` object ``testcase``, assert that the passed Using a ``TestCase`` object ``testcase``, assert that the passed
@ -55,10 +64,14 @@ def assert_soup_has_tag_with_content(testcase, soup, tag_name, content):
for tag in tags: for tag in tags:
if content in tag.contents: if content in tag.contents:
return return
# make this a "fuzzy" option?
# make these "fuzzy" options?
for c in tag.contents: for c in tag.contents:
if content in c: if content in c:
return return
if content in _normalized_contents(tag):
return
# seems like exceptions can't support unicode text in python2?? # seems like exceptions can't support unicode text in python2??
testcase.fail( testcase.fail(
u"No <{}> tag contains the text '{}'".format(tag_name, content).encode('utf8') u"No <{}> tag contains the text '{}'".format(tag_name, content).encode('utf8')

View File

@ -871,7 +871,10 @@ class Web(WebMixin, WebErrorMixin, testutil.StallMixin, testutil.ReallyEqualMixi
u"other_nickname \u263B", u"other_nickname \u263B",
{u"class": u"nickname"}, {u"class": u"nickname"},
) )
# self.failUnlessIn(u'Connected to <span>1</span>\n of <span>2</span> known storage servers', res_u) assert_soup_has_tag_with_content(
self, soup, u"h2",
u"Connected to 1 of 2 known storage servers"
)
divs = soup.find_all(u"div") divs = soup.find_all(u"div")
found_status = False found_status = False
for div in divs: for div in divs: