Make sure we can handle bytes, plus a couple other fixes.

This commit is contained in:
Itamar Turner-Trauring 2020-12-21 13:12:01 -05:00
parent 2737229895
commit c25dd57768
7 changed files with 10 additions and 13 deletions

View File

@ -18,7 +18,6 @@ import time
from zope.interface import implementer from zope.interface import implementer
from twisted.internet import defer from twisted.internet import defer
from foolscap.api import fireEventually from foolscap.api import fireEventually
import json
from allmydata.crypto import aes from allmydata.crypto import aes
from allmydata.deep_stats import DeepStats from allmydata.deep_stats import DeepStats
@ -31,7 +30,7 @@ from allmydata.interfaces import IFilesystemNode, IDirectoryNode, IFileNode, \
from allmydata.check_results import DeepCheckResults, \ from allmydata.check_results import DeepCheckResults, \
DeepCheckAndRepairResults DeepCheckAndRepairResults
from allmydata.monitor import Monitor from allmydata.monitor import Monitor
from allmydata.util import hashutil, base32, log from allmydata.util import hashutil, base32, log, jsonbytes as json
from allmydata.util.encodingutil import quote_output, normalize from allmydata.util.encodingutil import quote_output, normalize
from allmydata.util.assertutil import precondition from allmydata.util.assertutil import precondition
from allmydata.util.netstring import netstring, split_netstring from allmydata.util.netstring import netstring, split_netstring

View File

@ -1,5 +1,7 @@
from __future__ import print_function from __future__ import print_function
from future.utils import bchr
# do not import any allmydata modules at this level. Do that from inside # do not import any allmydata modules at this level. Do that from inside
# individual functions instead. # individual functions instead.
import struct, time, os, sys import struct, time, os, sys
@ -905,7 +907,7 @@ def corrupt_share(options):
f = open(fn, "rb+") f = open(fn, "rb+")
f.seek(offset) f.seek(offset)
d = f.read(1) d = f.read(1)
d = chr(ord(d) ^ 0x01) d = bchr(ord(d) ^ 0x01)
f.seek(offset) f.seek(offset)
f.write(d) f.write(d)
f.close() f.close()
@ -920,7 +922,7 @@ def corrupt_share(options):
f.seek(m.DATA_OFFSET) f.seek(m.DATA_OFFSET)
data = f.read(2000) data = f.read(2000)
# make sure this slot contains an SMDF share # make sure this slot contains an SMDF share
assert data[0] == b"\x00", "non-SDMF mutable shares not supported" assert data[0:1] == b"\x00", "non-SDMF mutable shares not supported"
f.close() f.close()
(version, ig_seqnum, ig_roothash, ig_IV, ig_k, ig_N, ig_segsize, (version, ig_seqnum, ig_roothash, ig_IV, ig_k, ig_N, ig_segsize,

View File

@ -109,7 +109,6 @@ class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMi
d.addCallback(self.CHECK, "good", "t=check") d.addCallback(self.CHECK, "good", "t=check")
def _got_html_good(res): def _got_html_good(res):
res = unicode(res, "utf-8")
self.failUnlessIn("Healthy", res) self.failUnlessIn("Healthy", res)
self.failIfIn("Not Healthy", res) self.failIfIn("Not Healthy", res)
soup = BeautifulSoup(res, 'html5lib') soup = BeautifulSoup(res, 'html5lib')
@ -118,7 +117,6 @@ class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMi
d.addCallback(_got_html_good) d.addCallback(_got_html_good)
d.addCallback(self.CHECK, "good", "t=check&return_to=somewhere") d.addCallback(self.CHECK, "good", "t=check&return_to=somewhere")
def _got_html_good_return_to(res): def _got_html_good_return_to(res):
res = unicode(res, "utf-8")
self.failUnlessIn("Healthy", res) self.failUnlessIn("Healthy", res)
self.failIfIn("Not Healthy", res) self.failIfIn("Not Healthy", res)
self.failUnlessIn('<a href="somewhere">Return to file', res) self.failUnlessIn('<a href="somewhere">Return to file', res)

View File

@ -5,8 +5,7 @@ from twisted.web.template import Element, XMLFile, renderElement, renderer
from twisted.python.filepath import FilePath from twisted.python.filepath import FilePath
from twisted.web import static from twisted.web import static
import allmydata import allmydata
import json from allmydata.util import idlib, jsonbytes as json
from allmydata.util import idlib
from allmydata.web.common import ( from allmydata.web.common import (
render_time, render_time,
MultiFormatResource, MultiFormatResource,

View File

@ -3,7 +3,6 @@ from past.builtins import long, unicode
import pprint import pprint
import itertools import itertools
import hashlib import hashlib
import json
from twisted.internet import defer from twisted.internet import defer
from twisted.python.filepath import FilePath from twisted.python.filepath import FilePath
from twisted.web.resource import Resource from twisted.web.resource import Resource
@ -14,7 +13,7 @@ from twisted.web.template import (
renderElement, renderElement,
tags, tags,
) )
from allmydata.util import base32, idlib from allmydata.util import base32, idlib, jsonbytes as json
from allmydata.web.common import ( from allmydata.web.common import (
abbreviate_time, abbreviate_time,
abbreviate_rate, abbreviate_rate,

View File

@ -1,6 +1,6 @@
from future.utils import PY2 from future.utils import PY2
import time, json import time
from twisted.python.filepath import FilePath from twisted.python.filepath import FilePath
from twisted.web.template import ( from twisted.web.template import (
Element, Element,
@ -14,7 +14,7 @@ from allmydata.web.common_py3 import (
MultiFormatResource MultiFormatResource
) )
from allmydata.util.abbreviate import abbreviate_space from allmydata.util.abbreviate import abbreviate_space
from allmydata.util import time_format, idlib from allmydata.util import time_format, idlib, jsonbytes as json
def remove_prefix(s, prefix): def remove_prefix(s, prefix):

View File

@ -110,7 +110,7 @@ def _get_client_ip(request):
def _logFormatter(logDateTime, request): def _logFormatter(logDateTime, request):
print("REQUEST: {}".format(request.uri)) print("REQUEST: {} {}".format(request.method, request.uri))
# we build up a log string that hides most of the cap, to preserve # we build up a log string that hides most of the cap, to preserve
# user privacy. We retain the query args so we can identify things # user privacy. We retain the query args so we can identify things
# like t=json. Then we send it to the flog. We make no attempt to # like t=json. Then we send it to the flog. We make no attempt to