Merge pull request #945 from tahoe-lafs/3566.web-tests-python-3-part-2

Port web tests to Python 3, part 2

Fixes ticket:3566
This commit is contained in:
Itamar Turner-Trauring 2021-01-04 15:40:52 -05:00 committed by GitHub
commit 4683760ed8
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
21 changed files with 239 additions and 189 deletions

0
newsfragments/3566.minor Normal file
View File

View File

@ -34,10 +34,10 @@ class Blacklist(object):
try:
if self.last_mtime is None or current_mtime > self.last_mtime:
self.entries.clear()
with open(self.blacklist_fn, "r") as f:
with open(self.blacklist_fn, "rb") as f:
for line in f:
line = line.strip()
if not line or line.startswith("#"):
if not line or line.startswith(b"#"):
continue
si_s, reason = line.split(None, 1)
si = base32.a2b(si_s) # must be valid base32

View File

@ -18,7 +18,6 @@ import time
from zope.interface import implementer
from twisted.internet import defer
from foolscap.api import fireEventually
import json
from allmydata.crypto import aes
from allmydata.deep_stats import DeepStats
@ -31,7 +30,7 @@ from allmydata.interfaces import IFilesystemNode, IDirectoryNode, IFileNode, \
from allmydata.check_results import DeepCheckResults, \
DeepCheckAndRepairResults
from allmydata.monitor import Monitor
from allmydata.util import hashutil, base32, log
from allmydata.util import hashutil, base32, log, jsonbytes as json
from allmydata.util.encodingutil import quote_output, normalize
from allmydata.util.assertutil import precondition
from allmydata.util.netstring import netstring, split_netstring

View File

@ -1,5 +1,7 @@
from __future__ import print_function
from future.utils import bchr
# do not import any allmydata modules at this level. Do that from inside
# individual functions instead.
import struct, time, os, sys
@ -905,7 +907,7 @@ def corrupt_share(options):
f = open(fn, "rb+")
f.seek(offset)
d = f.read(1)
d = chr(ord(d) ^ 0x01)
d = bchr(ord(d) ^ 0x01)
f.seek(offset)
f.write(d)
f.close()
@ -920,7 +922,7 @@ def corrupt_share(options):
f.seek(m.DATA_OFFSET)
data = f.read(2000)
# make sure this slot contains an SMDF share
assert data[0] == b"\x00", "non-SDMF mutable shares not supported"
assert data[0:1] == b"\x00", "non-SDMF mutable shares not supported"
f.close()
(version, ig_seqnum, ig_roothash, ig_IV, ig_k, ig_N, ig_segsize,

View File

@ -11,7 +11,7 @@ __all__ = [
"skipIf",
]
from past.builtins import chr as byteschr
from past.builtins import chr as byteschr, unicode
import os, random, struct
import six
@ -825,13 +825,18 @@ class WebErrorMixin(object):
code=None, substring=None, response_substring=None,
callable=None, *args, **kwargs):
# returns a Deferred with the response body
assert substring is None or isinstance(substring, str)
if isinstance(substring, bytes):
substring = unicode(substring, "ascii")
if isinstance(response_substring, unicode):
response_substring = response_substring.encode("ascii")
assert substring is None or isinstance(substring, unicode)
assert response_substring is None or isinstance(response_substring, bytes)
assert callable
def _validate(f):
if code is not None:
self.failUnlessEqual(f.value.status, str(code), which)
self.failUnlessEqual(f.value.status, b"%d" % code, which)
if substring:
code_string = str(f)
code_string = unicode(f)
self.failUnless(substring in code_string,
"%s: substring '%s' not in '%s'"
% (which, substring, code_string))

View File

@ -1,7 +1,8 @@
from __future__ import print_function
from future.utils import PY2, native_str
from future.utils import PY2, native_str, bchr, binary_type
from future.builtins import str as future_str
from past.builtins import unicode
import os
import time
@ -20,9 +21,6 @@ from twisted.trial import unittest
from ..util.assertutil import precondition
from ..scripts import runner
from allmydata.util.encodingutil import unicode_platform, get_filesystem_encoding, get_io_encoding
# Imported for backwards compatibility:
from future.utils import bord, bchr, binary_type
from past.builtins import unicode
def skip_if_cannot_represent_filename(u):
@ -183,13 +181,12 @@ def insecurerandstr(n):
return b''.join(map(bchr, map(randrange, [0]*n, [256]*n)))
def flip_bit(good, which):
# TODO Probs need to update with bchr/bord as with flip_one_bit, below.
# flip the low-order bit of good[which]
"""Flip the low-order bit of good[which]."""
if which == -1:
pieces = good[:which], good[-1:], ""
pieces = good[:which], good[-1:], b""
else:
pieces = good[:which], good[which:which+1], good[which+1:]
return pieces[0] + chr(ord(pieces[1]) ^ 0x01) + pieces[2]
return pieces[0] + bchr(ord(pieces[1]) ^ 0x01) + pieces[2]
def flip_one_bit(s, offset=0, size=None):
""" flip one random bit of the string s, in a byte greater than or equal to offset and less
@ -198,7 +195,7 @@ def flip_one_bit(s, offset=0, size=None):
if size is None:
size=len(s)-offset
i = randrange(offset, offset+size)
result = s[:i] + bchr(bord(s[i])^(0x01<<randrange(0, 8))) + s[i+1:]
result = s[:i] + bchr(ord(s[i:i+1])^(0x01<<randrange(0, 8))) + s[i+1:]
assert result != s, "Internal error -- flip_one_bit() produced the same string as its input: %s == %s" % (result, s)
return result

View File

@ -24,6 +24,7 @@ from future.utils import PY2
if PY2:
from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
from past.builtins import unicode
from six import ensure_text
import os
from base64 import b32encode
@ -614,8 +615,7 @@ class GridTestMixin(object):
method="GET", clientnum=0, **kwargs):
# if return_response=True, this fires with (data, statuscode,
# respheaders) instead of just data.
assert not isinstance(urlpath, unicode)
url = self.client_baseurls[clientnum] + urlpath
url = self.client_baseurls[clientnum] + ensure_text(urlpath)
response = yield treq.request(method, url, persistent=False,
allow_redirects=followRedirect,

View File

@ -173,7 +173,7 @@ class WebResultsRendering(unittest.TestCase):
return c
def render_json(self, resource):
return self.successResultOf(render(resource, {"output": ["json"]}))
return self.successResultOf(render(resource, {b"output": [b"json"]}))
def render_element(self, element, args=None):
if args is None:
@ -186,7 +186,7 @@ class WebResultsRendering(unittest.TestCase):
html = self.render_element(lcr)
self.failUnlessIn(b"Literal files are always healthy", html)
html = self.render_element(lcr, args={"return_to": ["FOOURL"]})
html = self.render_element(lcr, args={b"return_to": [b"FOOURL"]})
self.failUnlessIn(b"Literal files are always healthy", html)
self.failUnlessIn(b'<a href="FOOURL">Return to file.</a>', html)
@ -269,7 +269,7 @@ class WebResultsRendering(unittest.TestCase):
self.failUnlessIn("File Check Results for SI=2k6avp", s) # abbreviated
self.failUnlessIn("Not Recoverable! : rather dead", s)
html = self.render_element(w, args={"return_to": ["FOOURL"]})
html = self.render_element(w, args={b"return_to": [b"FOOURL"]})
self.failUnlessIn(b'<a href="FOOURL">Return to file/directory.</a>',
html)

View File

@ -70,7 +70,7 @@ def renderJSON(resource):
"""
Render a JSON from the given resource.
"""
return render(resource, {"t": ["json"]})
return render(resource, {b"t": [b"json"]})
class MyBucketCountingCrawler(BucketCountingCrawler):
def finished_prefix(self, cycle, prefix):

View File

@ -1,6 +1,17 @@
"""
Ported to Python 3.
"""
from __future__ import print_function
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
import os.path, re, urllib
from future.utils import PY2
if PY2:
from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
import os.path, re
from urllib.parse import quote as url_quote
import json
from six.moves import StringIO
@ -37,7 +48,7 @@ DIR_HTML_TAG = '<html lang="en">'
class CompletelyUnhandledError(Exception):
pass
class ErrorBoom(object, resource.Resource):
class ErrorBoom(resource.Resource, object):
@render_exception
def render(self, req):
raise CompletelyUnhandledError("whoops")
@ -47,32 +58,38 @@ class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMi
def CHECK(self, ign, which, args, clientnum=0):
fileurl = self.fileurls[which]
url = fileurl + "?" + args
return self.GET(url, method="POST", clientnum=clientnum)
return self.GET_unicode(url, method="POST", clientnum=clientnum)
def GET_unicode(self, *args, **kwargs):
"""Send an HTTP request, but convert result to Unicode string."""
d = GridTestMixin.GET(self, *args, **kwargs)
d.addCallback(str, "utf-8")
return d
def test_filecheck(self):
self.basedir = "web/Grid/filecheck"
self.set_up_grid()
c0 = self.g.clients[0]
self.uris = {}
DATA = "data" * 100
d = c0.upload(upload.Data(DATA, convergence=""))
DATA = b"data" * 100
d = c0.upload(upload.Data(DATA, convergence=b""))
def _stash_uri(ur, which):
self.uris[which] = ur.get_uri()
d.addCallback(_stash_uri, "good")
d.addCallback(lambda ign:
c0.upload(upload.Data(DATA+"1", convergence="")))
c0.upload(upload.Data(DATA+b"1", convergence=b"")))
d.addCallback(_stash_uri, "sick")
d.addCallback(lambda ign:
c0.upload(upload.Data(DATA+"2", convergence="")))
c0.upload(upload.Data(DATA+b"2", convergence=b"")))
d.addCallback(_stash_uri, "dead")
def _stash_mutable_uri(n, which):
self.uris[which] = n.get_uri()
assert isinstance(self.uris[which], str)
assert isinstance(self.uris[which], bytes)
d.addCallback(lambda ign:
c0.create_mutable_file(publish.MutableData(DATA+"3")))
c0.create_mutable_file(publish.MutableData(DATA+b"3")))
d.addCallback(_stash_mutable_uri, "corrupt")
d.addCallback(lambda ign:
c0.upload(upload.Data("literal", convergence="")))
c0.upload(upload.Data(b"literal", convergence=b"")))
d.addCallback(_stash_uri, "small")
d.addCallback(lambda ign: c0.create_immutable_dirnode({}))
d.addCallback(_stash_mutable_uri, "smalldir")
@ -80,7 +97,7 @@ class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMi
def _compute_fileurls(ignored):
self.fileurls = {}
for which in self.uris:
self.fileurls[which] = "uri/" + urllib.quote(self.uris[which])
self.fileurls[which] = "uri/" + url_quote(self.uris[which])
d.addCallback(_compute_fileurls)
def _clobber_shares(ignored):
@ -203,28 +220,28 @@ class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMi
self.set_up_grid()
c0 = self.g.clients[0]
self.uris = {}
DATA = "data" * 100
d = c0.upload(upload.Data(DATA, convergence=""))
DATA = b"data" * 100
d = c0.upload(upload.Data(DATA, convergence=b""))
def _stash_uri(ur, which):
self.uris[which] = ur.get_uri()
d.addCallback(_stash_uri, "good")
d.addCallback(lambda ign:
c0.upload(upload.Data(DATA+"1", convergence="")))
c0.upload(upload.Data(DATA+b"1", convergence=b"")))
d.addCallback(_stash_uri, "sick")
d.addCallback(lambda ign:
c0.upload(upload.Data(DATA+"2", convergence="")))
c0.upload(upload.Data(DATA+b"2", convergence=b"")))
d.addCallback(_stash_uri, "dead")
def _stash_mutable_uri(n, which):
self.uris[which] = n.get_uri()
assert isinstance(self.uris[which], str)
assert isinstance(self.uris[which], bytes)
d.addCallback(lambda ign:
c0.create_mutable_file(publish.MutableData(DATA+"3")))
c0.create_mutable_file(publish.MutableData(DATA+b"3")))
d.addCallback(_stash_mutable_uri, "corrupt")
def _compute_fileurls(ignored):
self.fileurls = {}
for which in self.uris:
self.fileurls[which] = "uri/" + urllib.quote(self.uris[which])
self.fileurls[which] = "uri/" + url_quote(self.uris[which])
d.addCallback(_compute_fileurls)
def _clobber_shares(ignored):
@ -286,8 +303,8 @@ class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMi
self.set_up_grid()
c0 = self.g.clients[0]
self.uris = {}
DATA = "data" * 100
d = c0.upload(upload.Data(DATA+"1", convergence=""))
DATA = b"data" * 100
d = c0.upload(upload.Data(DATA+b"1", convergence=b""))
def _stash_uri(ur, which):
self.uris[which] = ur.get_uri()
d.addCallback(_stash_uri, "sick")
@ -295,7 +312,7 @@ class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMi
def _compute_fileurls(ignored):
self.fileurls = {}
for which in self.uris:
self.fileurls[which] = "uri/" + urllib.quote(self.uris[which])
self.fileurls[which] = "uri/" + url_quote(self.uris[which])
d.addCallback(_compute_fileurls)
def _clobber_shares(ignored):
@ -329,7 +346,7 @@ class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMi
self.fileurls = {}
# the future cap format may contain slashes, which must be tolerated
expected_info_url = "uri/%s?t=info" % urllib.quote(unknown_rwcap,
expected_info_url = "uri/%s?t=info" % url_quote(unknown_rwcap,
safe="")
if immutable:
@ -343,8 +360,8 @@ class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMi
def _stash_root_and_create_file(n):
self.rootnode = n
self.rooturl = "uri/" + urllib.quote(n.get_uri())
self.rourl = "uri/" + urllib.quote(n.get_readonly_uri())
self.rooturl = "uri/" + url_quote(n.get_uri())
self.rourl = "uri/" + url_quote(n.get_readonly_uri())
if not immutable:
return self.rootnode.set_node(name, future_node)
d.addCallback(_stash_root_and_create_file)
@ -352,18 +369,19 @@ class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMi
# make sure directory listing tolerates unknown nodes
d.addCallback(lambda ign: self.GET(self.rooturl))
def _check_directory_html(res, expected_type_suffix):
pattern = re.compile(r'<td>\?%s</td>[ \t\n\r]*'
'<td>%s</td>' % (expected_type_suffix, str(name)),
pattern = re.compile(br'<td>\?%s</td>[ \t\n\r]*'
b'<td>%s</td>' % (
expected_type_suffix, name.encode("ascii")),
re.DOTALL)
self.failUnless(re.search(pattern, res), res)
# find the More Info link for name, should be relative
mo = re.search(r'<a href="([^"]+)">More Info</a>', res)
mo = re.search(br'<a href="([^"]+)">More Info</a>', res)
info_url = mo.group(1)
self.failUnlessReallyEqual(info_url, "%s?t=info" % (str(name),))
self.failUnlessReallyEqual(info_url, b"%s?t=info" % (name.encode("ascii"),))
if immutable:
d.addCallback(_check_directory_html, "-IMM")
d.addCallback(_check_directory_html, b"-IMM")
else:
d.addCallback(_check_directory_html, "")
d.addCallback(_check_directory_html, b"")
d.addCallback(lambda ign: self.GET(self.rooturl+"?t=json"))
def _check_directory_json(res, expect_rw_uri):
@ -383,7 +401,6 @@ class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMi
d.addCallback(_check_directory_json, expect_rw_uri=not immutable)
def _check_info(res, expect_rw_uri, expect_ro_uri):
self.failUnlessIn("Object Type: <span>unknown</span>", res)
if expect_rw_uri:
self.failUnlessIn(unknown_rwcap, res)
if expect_ro_uri:
@ -393,6 +410,8 @@ class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMi
self.failUnlessIn(unknown_rocap, res)
else:
self.failIfIn(unknown_rocap, res)
res = str(res, "utf-8")
self.failUnlessIn("Object Type: <span>unknown</span>", res)
self.failIfIn("Raw data as", res)
self.failIfIn("Directory writecap", res)
self.failIfIn("Checker Operations", res)
@ -404,7 +423,7 @@ class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMi
d.addCallback(lambda ign: self.GET(expected_info_url))
d.addCallback(_check_info, expect_rw_uri=False, expect_ro_uri=False)
d.addCallback(lambda ign: self.GET("%s/%s?t=info" % (self.rooturl, str(name))))
d.addCallback(lambda ign: self.GET("%s/%s?t=info" % (self.rooturl, name)))
d.addCallback(_check_info, expect_rw_uri=False, expect_ro_uri=True)
def _check_json(res, expect_rw_uri):
@ -436,9 +455,9 @@ class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMi
# or not future_node was immutable.
d.addCallback(lambda ign: self.GET(self.rourl))
if immutable:
d.addCallback(_check_directory_html, "-IMM")
d.addCallback(_check_directory_html, b"-IMM")
else:
d.addCallback(_check_directory_html, "-RO")
d.addCallback(_check_directory_html, b"-RO")
d.addCallback(lambda ign: self.GET(self.rourl+"?t=json"))
d.addCallback(_check_directory_json, expect_rw_uri=False)
@ -462,9 +481,9 @@ class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMi
self.uris = {}
self.fileurls = {}
lonely_uri = "URI:LIT:n5xgk" # LIT for "one"
mut_write_uri = "URI:SSK:vfvcbdfbszyrsaxchgevhmmlii:euw4iw7bbnkrrwpzuburbhppuxhc3gwxv26f6imekhz7zyw2ojnq"
mut_read_uri = "URI:SSK-RO:e3mdrzfwhoq42hy5ubcz6rp3o4:ybyibhnp3vvwuq2vaw2ckjmesgkklfs6ghxleztqidihjyofgw7q"
lonely_uri = b"URI:LIT:n5xgk" # LIT for "one"
mut_write_uri = b"URI:SSK:vfvcbdfbszyrsaxchgevhmmlii:euw4iw7bbnkrrwpzuburbhppuxhc3gwxv26f6imekhz7zyw2ojnq"
mut_read_uri = b"URI:SSK-RO:e3mdrzfwhoq42hy5ubcz6rp3o4:ybyibhnp3vvwuq2vaw2ckjmesgkklfs6ghxleztqidihjyofgw7q"
# This method tests mainly dirnode, but we'd have to duplicate code in order to
# test the dirnode and web layers separately.
@ -507,10 +526,10 @@ class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMi
rep = str(dn)
self.failUnlessIn("RO-IMM", rep)
cap = dn.get_cap()
self.failUnlessIn("CHK", cap.to_string())
self.failUnlessIn(b"CHK", cap.to_string())
self.cap = cap
self.rootnode = dn
self.rooturl = "uri/" + urllib.quote(dn.get_uri())
self.rooturl = "uri/" + url_quote(dn.get_uri())
return download_to_data(dn._node)
d.addCallback(_created)
@ -526,7 +545,7 @@ class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMi
entry = entries[0]
(name_utf8, ro_uri, rwcapdata, metadata_s), subpos = split_netstring(entry, 4)
name = name_utf8.decode("utf-8")
self.failUnlessEqual(rwcapdata, "")
self.failUnlessEqual(rwcapdata, b"")
self.failUnlessIn(name, kids)
(expected_child, ign) = kids[name]
self.failUnlessReallyEqual(ro_uri, expected_child.get_readonly_uri())
@ -553,13 +572,13 @@ class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMi
d.addCallback(lambda ign: self.GET(self.rooturl))
def _check_html(res):
soup = BeautifulSoup(res, 'html5lib')
self.failIfIn("URI:SSK", res)
self.failIfIn(b"URI:SSK", res)
found = False
for td in soup.find_all(u"td"):
if td.text != u"FILE":
continue
a = td.findNextSibling()(u"a")[0]
self.assertIn(urllib.quote(lonely_uri), a[u"href"])
self.assertIn(url_quote(lonely_uri), a[u"href"])
self.assertEqual(u"lonely", a.text)
self.assertEqual(a[u"rel"], [u"noreferrer"])
self.assertEqual(u"{}".format(len("one")), td.findNextSibling().findNextSibling().text)
@ -573,7 +592,7 @@ class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMi
if a.text == u"More Info"
)
self.assertEqual(1, len(infos))
self.assertTrue(infos[0].endswith(urllib.quote(lonely_uri) + "?t=info"))
self.assertTrue(infos[0].endswith(url_quote(lonely_uri) + "?t=info"))
d.addCallback(_check_html)
# ... and in JSON.
@ -596,12 +615,12 @@ class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMi
c0 = self.g.clients[0]
self.uris = {}
self.fileurls = {}
DATA = "data" * 100
DATA = b"data" * 100
d = c0.create_dirnode()
def _stash_root_and_create_file(n):
self.rootnode = n
self.fileurls["root"] = "uri/" + urllib.quote(n.get_uri())
return n.add_file(u"good", upload.Data(DATA, convergence=""))
self.fileurls["root"] = "uri/" + url_quote(n.get_uri())
return n.add_file(u"good", upload.Data(DATA, convergence=b""))
d.addCallback(_stash_root_and_create_file)
def _stash_uri(fn, which):
self.uris[which] = fn.get_uri()
@ -609,13 +628,13 @@ class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMi
d.addCallback(_stash_uri, "good")
d.addCallback(lambda ign:
self.rootnode.add_file(u"small",
upload.Data("literal",
convergence="")))
upload.Data(b"literal",
convergence=b"")))
d.addCallback(_stash_uri, "small")
d.addCallback(lambda ign:
self.rootnode.add_file(u"sick",
upload.Data(DATA+"1",
convergence="")))
upload.Data(DATA+b"1",
convergence=b"")))
d.addCallback(_stash_uri, "sick")
# this tests that deep-check and stream-manifest will ignore
@ -695,13 +714,13 @@ class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMi
d.addCallback(_stash_uri, "subdir")
d.addCallback(lambda subdir_node:
subdir_node.add_file(u"grandchild",
upload.Data(DATA+"2",
convergence="")))
upload.Data(DATA+b"2",
convergence=b"")))
d.addCallback(_stash_uri, "grandchild")
d.addCallback(lambda ign:
self.delete_shares_numbered(self.uris["subdir"],
range(1, 10)))
list(range(1, 10))))
# root
# root/good
@ -770,30 +789,30 @@ class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMi
c0 = self.g.clients[0]
self.uris = {}
self.fileurls = {}
DATA = "data" * 100
DATA = b"data" * 100
d = c0.create_dirnode()
def _stash_root_and_create_file(n):
self.rootnode = n
self.fileurls["root"] = "uri/" + urllib.quote(n.get_uri())
return n.add_file(u"good", upload.Data(DATA, convergence=""))
self.fileurls["root"] = "uri/" + url_quote(n.get_uri())
return n.add_file(u"good", upload.Data(DATA, convergence=b""))
d.addCallback(_stash_root_and_create_file)
def _stash_uri(fn, which):
self.uris[which] = fn.get_uri()
d.addCallback(_stash_uri, "good")
d.addCallback(lambda ign:
self.rootnode.add_file(u"small",
upload.Data("literal",
convergence="")))
upload.Data(b"literal",
convergence=b"")))
d.addCallback(_stash_uri, "small")
d.addCallback(lambda ign:
self.rootnode.add_file(u"sick",
upload.Data(DATA+"1",
convergence="")))
upload.Data(DATA+b"1",
convergence=b"")))
d.addCallback(_stash_uri, "sick")
#d.addCallback(lambda ign:
# self.rootnode.add_file(u"dead",
# upload.Data(DATA+"2",
# convergence="")))
# upload.Data(DATA+b"2",
# convergence=b"")))
#d.addCallback(_stash_uri, "dead")
#d.addCallback(lambda ign: c0.create_mutable_file("mutable"))
@ -888,25 +907,25 @@ class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMi
self.set_up_grid(num_clients=2, oneshare=True)
c0 = self.g.clients[0]
self.uris = {}
DATA = "data" * 100
d = c0.upload(upload.Data(DATA, convergence=""))
DATA = b"data" * 100
d = c0.upload(upload.Data(DATA, convergence=b""))
def _stash_uri(ur, which):
self.uris[which] = ur.get_uri()
d.addCallback(_stash_uri, "one")
d.addCallback(lambda ign:
c0.upload(upload.Data(DATA+"1", convergence="")))
c0.upload(upload.Data(DATA+b"1", convergence=b"")))
d.addCallback(_stash_uri, "two")
def _stash_mutable_uri(n, which):
self.uris[which] = n.get_uri()
assert isinstance(self.uris[which], str)
assert isinstance(self.uris[which], bytes)
d.addCallback(lambda ign:
c0.create_mutable_file(publish.MutableData(DATA+"2")))
c0.create_mutable_file(publish.MutableData(DATA+b"2")))
d.addCallback(_stash_mutable_uri, "mutable")
def _compute_fileurls(ignored):
self.fileurls = {}
for which in self.uris:
self.fileurls[which] = "uri/" + urllib.quote(self.uris[which])
self.fileurls[which] = "uri/" + url_quote(self.uris[which])
d.addCallback(_compute_fileurls)
d.addCallback(self._count_leases, "one")
@ -982,25 +1001,25 @@ class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMi
c0 = self.g.clients[0]
self.uris = {}
self.fileurls = {}
DATA = "data" * 100
DATA = b"data" * 100
d = c0.create_dirnode()
def _stash_root_and_create_file(n):
self.rootnode = n
self.uris["root"] = n.get_uri()
self.fileurls["root"] = "uri/" + urllib.quote(n.get_uri())
return n.add_file(u"one", upload.Data(DATA, convergence=""))
self.fileurls["root"] = "uri/" + url_quote(n.get_uri())
return n.add_file(u"one", upload.Data(DATA, convergence=b""))
d.addCallback(_stash_root_and_create_file)
def _stash_uri(fn, which):
self.uris[which] = fn.get_uri()
d.addCallback(_stash_uri, "one")
d.addCallback(lambda ign:
self.rootnode.add_file(u"small",
upload.Data("literal",
convergence="")))
upload.Data(b"literal",
convergence=b"")))
d.addCallback(_stash_uri, "small")
d.addCallback(lambda ign:
c0.create_mutable_file(publish.MutableData("mutable")))
c0.create_mutable_file(publish.MutableData(b"mutable")))
d.addCallback(lambda fn: self.rootnode.set_node(u"mutable", fn))
d.addCallback(_stash_uri, "mutable")
@ -1051,36 +1070,36 @@ class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMi
c0 = self.g.clients[0]
c0.encoding_params['happy'] = 2
self.fileurls = {}
DATA = "data" * 100
DATA = b"data" * 100
d = c0.create_dirnode()
def _stash_root(n):
self.fileurls["root"] = "uri/" + urllib.quote(n.get_uri())
self.fileurls["root"] = "uri/" + url_quote(n.get_uri())
self.fileurls["imaginary"] = self.fileurls["root"] + "/imaginary"
return n
d.addCallback(_stash_root)
d.addCallback(lambda ign: c0.upload(upload.Data(DATA, convergence="")))
d.addCallback(lambda ign: c0.upload(upload.Data(DATA, convergence=b"")))
def _stash_bad(ur):
self.fileurls["1share"] = "uri/" + urllib.quote(ur.get_uri())
self.delete_shares_numbered(ur.get_uri(), range(1,10))
self.fileurls["1share"] = "uri/" + url_quote(ur.get_uri())
self.delete_shares_numbered(ur.get_uri(), list(range(1,10)))
u = uri.from_string(ur.get_uri())
u.key = testutil.flip_bit(u.key, 0)
baduri = u.to_string()
self.fileurls["0shares"] = "uri/" + urllib.quote(baduri)
self.fileurls["0shares"] = "uri/" + url_quote(baduri)
d.addCallback(_stash_bad)
d.addCallback(lambda ign: c0.create_dirnode())
def _mangle_dirnode_1share(n):
u = n.get_uri()
url = self.fileurls["dir-1share"] = "uri/" + urllib.quote(u)
url = self.fileurls["dir-1share"] = "uri/" + url_quote(u)
self.fileurls["dir-1share-json"] = url + "?t=json"
self.delete_shares_numbered(u, range(1,10))
self.delete_shares_numbered(u, list(range(1,10)))
d.addCallback(_mangle_dirnode_1share)
d.addCallback(lambda ign: c0.create_dirnode())
def _mangle_dirnode_0share(n):
u = n.get_uri()
url = self.fileurls["dir-0share"] = "uri/" + urllib.quote(u)
url = self.fileurls["dir-0share"] = "uri/" + url_quote(u)
self.fileurls["dir-0share-json"] = url + "?t=json"
self.delete_shares_numbered(u, range(0,10))
self.delete_shares_numbered(u, list(range(0,10)))
d.addCallback(_mangle_dirnode_0share)
# NotEnoughSharesError should be reported sensibly, with a
@ -1092,6 +1111,7 @@ class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMi
410, "Gone", "NoSharesError",
self.GET, self.fileurls["0shares"]))
def _check_zero_shares(body):
body = str(body, "utf-8")
self.failIfIn("<html>", body)
body = " ".join(body.strip().split())
exp = ("NoSharesError: no shares could be found. "
@ -1100,7 +1120,7 @@ class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMi
"severe corruption. You should perform a filecheck on "
"this object to learn more. The full error message is: "
"no shares (need 3). Last failure: None")
self.failUnlessReallyEqual(exp, body)
self.assertEqual(exp, body)
d.addCallback(_check_zero_shares)
@ -1109,6 +1129,7 @@ class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMi
410, "Gone", "NotEnoughSharesError",
self.GET, self.fileurls["1share"]))
def _check_one_share(body):
body = str(body, "utf-8")
self.failIfIn("<html>", body)
body = " ".join(body.strip().split())
msgbase = ("NotEnoughSharesError: This indicates that some "
@ -1133,10 +1154,11 @@ class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMi
404, "Not Found", None,
self.GET, self.fileurls["imaginary"]))
def _missing_child(body):
body = str(body, "utf-8")
self.failUnlessIn("No such child: imaginary", body)
d.addCallback(_missing_child)
d.addCallback(lambda ignored: self.GET(self.fileurls["dir-0share"]))
d.addCallback(lambda ignored: self.GET_unicode(self.fileurls["dir-0share"]))
def _check_0shares_dir_html(body):
self.failUnlessIn(DIR_HTML_TAG, body)
# we should see the regular page, but without the child table or
@ -1155,7 +1177,7 @@ class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMi
self.failUnlessIn("No upload forms: directory is unreadable", body)
d.addCallback(_check_0shares_dir_html)
d.addCallback(lambda ignored: self.GET(self.fileurls["dir-1share"]))
d.addCallback(lambda ignored: self.GET_unicode(self.fileurls["dir-1share"]))
def _check_1shares_dir_html(body):
# at some point, we'll split UnrecoverableFileError into 0-shares
# and some-shares like we did for immutable files (since there
@ -1182,6 +1204,7 @@ class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMi
self.GET,
self.fileurls["dir-0share-json"]))
def _check_unrecoverable_file(body):
body = str(body, "utf-8")
self.failIfIn("<html>", body)
body = " ".join(body.strip().split())
exp = ("UnrecoverableFileError: the directory (or mutable file) "
@ -1209,7 +1232,7 @@ class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMi
# attach a webapi child that throws a random error, to test how it
# gets rendered.
w = c0.getServiceNamed("webish")
w.root.putChild("ERRORBOOM", ErrorBoom())
w.root.putChild(b"ERRORBOOM", ErrorBoom())
# "Accept: */*" : should get a text/html stack trace
# "Accept: text/plain" : should get a text/plain stack trace
@ -1222,6 +1245,7 @@ class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMi
self.GET, "ERRORBOOM",
headers={"accept": "*/*"}))
def _internal_error_html1(body):
body = str(body, "utf-8")
self.failUnlessIn("<html>", "expected HTML, not '%s'" % body)
d.addCallback(_internal_error_html1)
@ -1231,6 +1255,7 @@ class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMi
self.GET, "ERRORBOOM",
headers={"accept": "text/plain"}))
def _internal_error_text2(body):
body = str(body, "utf-8")
self.failIfIn("<html>", body)
self.failUnless(body.startswith("Traceback "), body)
d.addCallback(_internal_error_text2)
@ -1242,6 +1267,7 @@ class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMi
self.GET, "ERRORBOOM",
headers={"accept": CLI_accepts}))
def _internal_error_text3(body):
body = str(body, "utf-8")
self.failIfIn("<html>", body)
self.failUnless(body.startswith("Traceback "), body)
d.addCallback(_internal_error_text3)
@ -1251,7 +1277,7 @@ class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMi
500, "Internal Server Error", None,
self.GET, "ERRORBOOM"))
def _internal_error_html4(body):
self.failUnlessIn("<html>", body)
self.failUnlessIn(b"<html>", body)
d.addCallback(_internal_error_html4)
def _flush_errors(res):
@ -1269,12 +1295,12 @@ class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMi
c0 = self.g.clients[0]
fn = c0.config.get_config_path("access.blacklist")
self.uris = {}
DATA = "off-limits " * 50
DATA = b"off-limits " * 50
d = c0.upload(upload.Data(DATA, convergence=""))
d = c0.upload(upload.Data(DATA, convergence=b""))
def _stash_uri_and_create_dir(ur):
self.uri = ur.get_uri()
self.url = "uri/"+self.uri
self.url = b"uri/"+self.uri
u = uri.from_string_filenode(self.uri)
self.si = u.get_storage_index()
childnode = c0.create_node_from_uri(self.uri, None)
@ -1283,9 +1309,9 @@ class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMi
def _stash_dir(node):
self.dir_node = node
self.dir_uri = node.get_uri()
self.dir_url = "uri/"+self.dir_uri
self.dir_url = b"uri/"+self.dir_uri
d.addCallback(_stash_dir)
d.addCallback(lambda ign: self.GET(self.dir_url, followRedirect=True))
d.addCallback(lambda ign: self.GET_unicode(self.dir_url, followRedirect=True))
def _check_dir_html(body):
self.failUnlessIn(DIR_HTML_TAG, body)
self.failUnlessIn("blacklisted.txt</a>", body)
@ -1298,7 +1324,7 @@ class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMi
f.write(" # this is a comment\n")
f.write(" \n")
f.write("\n") # also exercise blank lines
f.write("%s %s\n" % (base32.b2a(self.si), "off-limits to you"))
f.write("%s off-limits to you\n" % (str(base32.b2a(self.si), "ascii"),))
f.close()
# clients should be checking the blacklist each time, so we don't
# need to restart the client
@ -1309,14 +1335,14 @@ class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMi
self.GET, self.url))
# We should still be able to list the parent directory, in HTML...
d.addCallback(lambda ign: self.GET(self.dir_url, followRedirect=True))
d.addCallback(lambda ign: self.GET_unicode(self.dir_url, followRedirect=True))
def _check_dir_html2(body):
self.failUnlessIn(DIR_HTML_TAG, body)
self.failUnlessIn("blacklisted.txt</strike>", body)
d.addCallback(_check_dir_html2)
# ... and in JSON (used by CLI).
d.addCallback(lambda ign: self.GET(self.dir_url+"?t=json", followRedirect=True))
d.addCallback(lambda ign: self.GET(self.dir_url+b"?t=json", followRedirect=True))
def _check_dir_json(res):
data = json.loads(res)
self.failUnless(isinstance(data, list), data)
@ -1355,14 +1381,14 @@ class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMi
d.addCallback(_add_dir)
def _get_dircap(dn):
self.dir_si_b32 = base32.b2a(dn.get_storage_index())
self.dir_url_base = "uri/"+dn.get_write_uri()
self.dir_url_json1 = "uri/"+dn.get_write_uri()+"?t=json"
self.dir_url_json2 = "uri/"+dn.get_write_uri()+"?t=json"
self.dir_url_json_ro = "uri/"+dn.get_readonly_uri()+"?t=json"
self.child_url = "uri/"+dn.get_readonly_uri()+"/child"
self.dir_url_base = b"uri/"+dn.get_write_uri()
self.dir_url_json1 = b"uri/"+dn.get_write_uri()+b"?t=json"
self.dir_url_json2 = b"uri/"+dn.get_write_uri()+b"?t=json"
self.dir_url_json_ro = b"uri/"+dn.get_readonly_uri()+b"?t=json"
self.child_url = b"uri/"+dn.get_readonly_uri()+b"/child"
d.addCallback(_get_dircap)
d.addCallback(lambda ign: self.GET(self.dir_url_base, followRedirect=True))
d.addCallback(lambda body: self.failUnlessIn(DIR_HTML_TAG, body))
d.addCallback(lambda body: self.failUnlessIn(DIR_HTML_TAG, str(body, "utf-8")))
d.addCallback(lambda ign: self.GET(self.dir_url_json1))
d.addCallback(lambda res: json.loads(res)) # just check it decodes
d.addCallback(lambda ign: self.GET(self.dir_url_json2))
@ -1373,8 +1399,8 @@ class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMi
d.addCallback(lambda body: self.failUnlessEqual(DATA, body))
def _block_dir(ign):
f = open(fn, "w")
f.write("%s %s\n" % (self.dir_si_b32, "dir-off-limits to you"))
f = open(fn, "wb")
f.write(b"%s %s\n" % (self.dir_si_b32, b"dir-off-limits to you"))
f.close()
self.g.clients[0].blacklist.last_mtime -= 2.0
d.addCallback(_block_dir)

View File

@ -746,7 +746,10 @@ class MultiFormatResourceTests(TrialTestCase):
"<title>400 - Bad Format</title>", response_body,
)
self.assertIn(
"Unknown t value: 'foo'", response_body,
"Unknown t value:", response_body,
)
self.assertIn(
"'foo'", response_body,
)

View File

@ -180,6 +180,7 @@ PORTED_TEST_MODULES = [
"allmydata.test.test_uri",
"allmydata.test.test_util",
"allmydata.test.web.test_common",
"allmydata.test.web.test_grid",
"allmydata.test.web.test_util",
"allmydata.test.web.test_status",
]

View File

@ -1,4 +1,5 @@
from past.builtins import unicode
from six import ensure_text, ensure_str
import time
import json
@ -99,17 +100,19 @@ def get_filenode_metadata(filenode):
def boolean_of_arg(arg):
# TODO: ""
arg = ensure_text(arg)
if arg.lower() not in ("true", "t", "1", "false", "f", "0", "on", "off"):
raise WebError("invalid boolean argument: %r" % (arg,), http.BAD_REQUEST)
return arg.lower() in ("true", "t", "1", "on")
def parse_replace_arg(replace):
replace = ensure_text(replace)
if replace.lower() == "only-files":
return replace
try:
return boolean_of_arg(replace)
except WebError:
raise WebError("invalid replace= argument: %r" % (replace,), http.BAD_REQUEST)
raise WebError("invalid replace= argument: %r" % (ensure_str(replace),), http.BAD_REQUEST)
def get_format(req, default="CHK"):
@ -118,11 +121,11 @@ def get_format(req, default="CHK"):
if boolean_of_arg(get_arg(req, "mutable", "false")):
return "SDMF"
return default
if arg.upper() == "CHK":
if arg.upper() == b"CHK":
return "CHK"
elif arg.upper() == "SDMF":
elif arg.upper() == b"SDMF":
return "SDMF"
elif arg.upper() == "MDMF":
elif arg.upper() == b"MDMF":
return "MDMF"
else:
raise WebError("Unknown format: %s, I know CHK, SDMF, MDMF" % arg,

View File

@ -4,6 +4,8 @@ Common utilities that are available from Python 3.
Can eventually be merged back into allmydata.web.common.
"""
from past.builtins import unicode
from twisted.web import resource, http
from allmydata.util import abbreviate
@ -23,7 +25,13 @@ def get_arg(req, argname, default=None, multiple=False):
empty), starting with all those in the query args.
:param TahoeLAFSRequest req: The request to consider.
:return: Either bytes or tuple of bytes.
"""
if isinstance(argname, unicode):
argname = argname.encode("utf-8")
if isinstance(default, unicode):
default = default.encode("utf-8")
results = []
if argname in req.args:
results.extend(req.args[argname])
@ -62,6 +70,9 @@ class MultiFormatResource(resource.Resource, object):
:return: The result of the selected renderer.
"""
t = get_arg(req, self.formatArgument, self.formatDefault)
# It's either bytes or None.
if isinstance(t, bytes):
t = unicode(t, "ascii")
renderer = self._get_renderer(t)
return renderer(req)

View File

@ -1,6 +1,6 @@
from past.builtins import unicode
import json
import urllib
from urllib.parse import quote as url_quote
from datetime import timedelta
from zope.interface import implementer
@ -20,7 +20,7 @@ from twisted.web.template import (
from hyperlink import URL
from twisted.python.filepath import FilePath
from allmydata.util import base32
from allmydata.util import base32, jsonbytes as json
from allmydata.util.encodingutil import (
to_bytes,
quote_output,
@ -109,7 +109,7 @@ class DirectoryNodeHandler(ReplaceMeMixin, Resource, object):
# or no further children) renders "this" page. We also need
# to reject "/uri/URI:DIR2:..//", so we look at postpath.
name = name.decode('utf8')
if not name and req.postpath != ['']:
if not name and req.postpath != [b'']:
return self
# Rejecting URIs that contain empty path pieces (for example:
@ -135,7 +135,7 @@ class DirectoryNodeHandler(ReplaceMeMixin, Resource, object):
terminal = (req.prepath + req.postpath)[-1].decode('utf8') == name
nonterminal = not terminal #len(req.postpath) > 0
t = get_arg(req, "t", "").strip()
t = get_arg(req, b"t", b"").strip()
if isinstance(node_or_failure, Failure):
f = node_or_failure
f.trap(NoSuchChildError)
@ -217,7 +217,7 @@ class DirectoryNodeHandler(ReplaceMeMixin, Resource, object):
@render_exception
def render_GET(self, req):
# This is where all of the directory-related ?t=* code goes.
t = get_arg(req, "t", "").strip()
t = unicode(get_arg(req, b"t", b"").strip(), "ascii")
# t=info contains variable ophandles, t=rename-form contains the name
# of the child being renamed. Neither is allowed an ETag.
@ -225,7 +225,7 @@ class DirectoryNodeHandler(ReplaceMeMixin, Resource, object):
if not self.node.is_mutable() and t in FIXED_OUTPUT_TYPES:
si = self.node.get_storage_index()
if si and req.setETag('DIR:%s-%s' % (base32.b2a(si), t or "")):
return ""
return b""
if not t:
# render the directory as HTML
@ -255,7 +255,7 @@ class DirectoryNodeHandler(ReplaceMeMixin, Resource, object):
@render_exception
def render_PUT(self, req):
t = get_arg(req, "t", "").strip()
t = get_arg(req, b"t", b"").strip()
replace = parse_replace_arg(get_arg(req, "replace", "true"))
if t == "mkdir":
@ -275,7 +275,7 @@ class DirectoryNodeHandler(ReplaceMeMixin, Resource, object):
@render_exception
def render_POST(self, req):
t = get_arg(req, "t", "").strip()
t = unicode(get_arg(req, b"t", b"").strip(), "ascii")
if t == "mkdir":
d = self._POST_mkdir(req)
@ -732,7 +732,7 @@ class DirectoryAsHTML(Element):
return ""
rocap = self.node.get_readonly_uri()
root = get_root(req)
uri_link = "%s/uri/%s/" % (root, urllib.quote(rocap))
uri_link = "%s/uri/%s/" % (root, url_quote(rocap))
return tag(tags.a("Read-Only Version", href=uri_link))
@renderer
@ -754,10 +754,10 @@ class DirectoryAsHTML(Element):
called by the 'children' renderer)
"""
name = name.encode("utf-8")
nameurl = urllib.quote(name, safe="") # encode any slashes too
nameurl = url_quote(name, safe="") # encode any slashes too
root = get_root(req)
here = "{}/uri/{}/".format(root, urllib.quote(self.node.get_uri()))
here = "{}/uri/{}/".format(root, url_quote(self.node.get_uri()))
if self.node.is_unknown() or self.node.is_readonly():
unlink = "-"
rename = "-"
@ -814,7 +814,7 @@ class DirectoryAsHTML(Element):
assert IFilesystemNode.providedBy(target), target
target_uri = target.get_uri() or ""
quoted_uri = urllib.quote(target_uri, safe="") # escape slashes too
quoted_uri = url_quote(target_uri, safe="") # escape slashes too
if IMutableFileNode.providedBy(target):
# to prevent javascript in displayed .html files from stealing a
@ -835,7 +835,7 @@ class DirectoryAsHTML(Element):
elif IDirectoryNode.providedBy(target):
# directory
uri_link = "%s/uri/%s/" % (root, urllib.quote(target_uri))
uri_link = "%s/uri/%s/" % (root, url_quote(target_uri))
slots["filename"] = tags.a(name, href=uri_link)
if not target.is_mutable():
dirtype = "DIR-IMM"
@ -871,7 +871,7 @@ class DirectoryAsHTML(Element):
slots["size"] = "-"
# use a directory-relative info link, so we can extract both the
# writecap and the readcap
info_link = "%s?t=info" % urllib.quote(name)
info_link = "%s?t=info" % url_quote(name)
if info_link:
slots["info"] = tags.a("More Info", href=info_link)
@ -888,7 +888,7 @@ class DirectoryAsHTML(Element):
# because action="." doesn't get us back to the dir page (but
# instead /uri itself)
root = get_root(req)
here = "{}/uri/{}/".format(root, urllib.quote(self.node.get_uri()))
here = "{}/uri/{}/".format(root, url_quote(self.node.get_uri()))
if self.node.is_readonly():
return tags.div("No upload forms: directory is read-only")
@ -1005,7 +1005,7 @@ def _directory_json_metadata(req, dirnode):
d = dirnode.list()
def _got(children):
kids = {}
for name, (childnode, metadata) in children.iteritems():
for name, (childnode, metadata) in children.items():
assert IFilesystemNode.providedBy(childnode), childnode
rw_uri = childnode.get_write_uri()
ro_uri = childnode.get_readonly_uri()
@ -1166,13 +1166,13 @@ def _cap_to_link(root, path, cap):
if isinstance(cap_obj, (CHKFileURI, WriteableSSKFileURI, ReadonlySSKFileURI)):
uri_link = root_url.child(
u"file",
u"{}".format(urllib.quote(cap)),
u"{}".format(urllib.quote(path[-1])),
u"{}".format(url_quote(cap)),
u"{}".format(url_quote(path[-1])),
)
else:
uri_link = root_url.child(
u"uri",
u"{}".format(urllib.quote(cap, safe="")),
u"{}".format(url_quote(cap, safe="")),
)
return tags.a(cap, href=uri_link.to_text())
else:
@ -1363,7 +1363,7 @@ class ManifestStreamer(dirnode.DeepStats):
j = json.dumps(d, ensure_ascii=True)
assert "\n" not in j
self.req.write(j+"\n")
self.req.write(j.encode("utf-8")+b"\n")
def finish(self):
stats = dirnode.DeepStats.get_results(self)
@ -1372,8 +1372,8 @@ class ManifestStreamer(dirnode.DeepStats):
}
j = json.dumps(d, ensure_ascii=True)
assert "\n" not in j
self.req.write(j+"\n")
return ""
self.req.write(j.encode("utf-8")+b"\n")
return b""
@implementer(IPushProducer)
class DeepCheckStreamer(dirnode.DeepStats):
@ -1441,7 +1441,7 @@ class DeepCheckStreamer(dirnode.DeepStats):
def write_line(self, data):
j = json.dumps(data, ensure_ascii=True)
assert "\n" not in j
self.req.write(j+"\n")
self.req.write(j.encode("utf-8")+b"\n")
def finish(self):
stats = dirnode.DeepStats.get_results(self)
@ -1450,8 +1450,8 @@ class DeepCheckStreamer(dirnode.DeepStats):
}
j = json.dumps(d, ensure_ascii=True)
assert "\n" not in j
self.req.write(j+"\n")
return ""
self.req.write(j.encode("utf-8")+b"\n")
return b""
class UnknownNodeHandler(Resource, object):
@ -1464,7 +1464,7 @@ class UnknownNodeHandler(Resource, object):
@render_exception
def render_GET(self, req):
t = get_arg(req, "t", "").strip()
t = unicode(get_arg(req, "t", "").strip(), "ascii")
if t == "info":
return MoreInfo(self.node)
if t == "json":

View File

@ -1,5 +1,4 @@
import json
from past.builtins import unicode, long
from twisted.web import http, static
from twisted.internet import defer
@ -41,6 +40,8 @@ from allmydata.web.check_results import (
LiteralCheckResultsRenderer,
)
from allmydata.web.info import MoreInfo
from allmydata.util import jsonbytes as json
class ReplaceMeMixin(object):
def replace_me_with_a_child(self, req, client, replace):
@ -117,7 +118,7 @@ class PlaceHolderNodeHandler(Resource, ReplaceMeMixin):
@render_exception
def render_PUT(self, req):
t = get_arg(req, "t", "").strip()
t = get_arg(req, b"t", b"").strip()
replace = parse_replace_arg(get_arg(req, "replace", "true"))
assert self.parentnode and self.name
@ -133,9 +134,9 @@ class PlaceHolderNodeHandler(Resource, ReplaceMeMixin):
@render_exception
def render_POST(self, req):
t = get_arg(req, "t", "").strip()
replace = boolean_of_arg(get_arg(req, "replace", "true"))
if t == "upload":
t = get_arg(req, b"t", b"").strip()
replace = boolean_of_arg(get_arg(req, b"replace", b"true"))
if t == b"upload":
# like PUT, but get the file data from an HTML form's input field.
# We could get here from POST /uri/mutablefilecap?t=upload,
# or POST /uri/path/file?t=upload, or
@ -179,7 +180,7 @@ class FileNodeHandler(Resource, ReplaceMeMixin, object):
@render_exception
def render_GET(self, req):
t = get_arg(req, "t", "").strip()
t = unicode(get_arg(req, b"t", b"").strip(), "ascii")
# t=info contains variable ophandles, so is not allowed an ETag.
FIXED_OUTPUT_TYPES = ["", "json", "uri", "readonly-uri"]
@ -237,19 +238,19 @@ class FileNodeHandler(Resource, ReplaceMeMixin, object):
@render_exception
def render_HEAD(self, req):
t = get_arg(req, "t", "").strip()
t = get_arg(req, b"t", b"").strip()
if t:
raise WebError("HEAD file: bad t=%s" % t)
filename = get_arg(req, "filename", self.name) or "unknown"
filename = get_arg(req, b"filename", self.name) or "unknown"
d = self.node.get_best_readable_version()
d.addCallback(lambda dn: FileDownloader(dn, filename))
return d
@render_exception
def render_PUT(self, req):
t = get_arg(req, "t", "").strip()
replace = parse_replace_arg(get_arg(req, "replace", "true"))
offset = parse_offset_arg(get_arg(req, "offset", None))
t = get_arg(req, b"t", b"").strip()
replace = parse_replace_arg(get_arg(req, b"replace", b"true"))
offset = parse_offset_arg(get_arg(req, b"offset", None))
if not t:
if not replace:
@ -290,11 +291,11 @@ class FileNodeHandler(Resource, ReplaceMeMixin, object):
@render_exception
def render_POST(self, req):
t = get_arg(req, "t", "").strip()
replace = boolean_of_arg(get_arg(req, "replace", "true"))
if t == "check":
t = get_arg(req, b"t", b"").strip()
replace = boolean_of_arg(get_arg(req, b"replace", b"true"))
if t == b"check":
d = self._POST_check(req)
elif t == "upload":
elif t == b"upload":
# like PUT, but get the file data from an HTML form's input field
# We could get here from POST /uri/mutablefilecap?t=upload,
# or POST /uri/path/file?t=upload, or

View File

@ -5,8 +5,7 @@ from twisted.web.template import Element, XMLFile, renderElement, renderer
from twisted.python.filepath import FilePath
from twisted.web import static
import allmydata
import json
from allmydata.util import idlib
from allmydata.util import idlib, jsonbytes as json
from allmydata.web.common import (
render_time,
MultiFormatResource,

View File

@ -1,6 +1,5 @@
import os
import time
import json
import urllib
from hyperlink import DecodedURL, URL
@ -21,7 +20,7 @@ from twisted.web.template import (
)
import allmydata # to display import path
from allmydata.util import log
from allmydata.util import log, jsonbytes as json
from allmydata.interfaces import IFileNode
from allmydata.web import (
filenode,
@ -158,7 +157,9 @@ class URIHandler(resource.Resource, object):
try:
node = self.client.create_node_from_uri(name)
return directory.make_handler_for(node, self.client)
except (TypeError, AssertionError):
except (TypeError, AssertionError) as e:
log.msg(format="Failed to parse cap, perhaps due to bug: %(e)s",
e=e, level=log.WEIRD)
raise WebError(
"'{}' is not a valid file- or directory- cap".format(name)
)
@ -226,7 +227,10 @@ class Root(MultiFormatResource):
self._client = client
self._now_fn = now_fn
self.putChild("uri", URIHandler(client))
# Children need to be bytes; for now just doing these to make specific
# tests pass on Python 3, but eventually will do all them when this
# module is ported to Python 3 (if not earlier).
self.putChild(b"uri", URIHandler(client))
self.putChild("cap", URIHandler(client))
# Handler for everything beneath "/private", an area of the resource

View File

@ -3,7 +3,6 @@ from past.builtins import long, unicode
import pprint
import itertools
import hashlib
import json
from twisted.internet import defer
from twisted.python.filepath import FilePath
from twisted.web.resource import Resource
@ -14,7 +13,7 @@ from twisted.web.template import (
renderElement,
tags,
)
from allmydata.util import base32, idlib
from allmydata.util import base32, idlib, jsonbytes as json
from allmydata.web.common import (
abbreviate_time,
abbreviate_rate,

View File

@ -1,6 +1,6 @@
from future.utils import PY2
import time, json
import time
from twisted.python.filepath import FilePath
from twisted.web.template import (
Element,
@ -14,7 +14,7 @@ from allmydata.web.common_py3 import (
MultiFormatResource
)
from allmydata.util.abbreviate import abbreviate_space
from allmydata.util import time_format, idlib
from allmydata.util import time_format, idlib, jsonbytes as json
def remove_prefix(s, prefix):

View File

@ -128,7 +128,7 @@ def _logFormatter(logDateTime, request):
# sure we censor these too.
if queryargs.startswith(b"uri="):
queryargs = b"uri=[CENSORED]"
queryargs = "?" + queryargs
queryargs = b"?" + queryargs
if path.startswith(b"/uri/"):
path = b"/uri/[CENSORED]"
elif path.startswith(b"/file/"):