mirror of
https://github.com/tahoe-lafs/tahoe-lafs.git
synced 2025-04-26 13:59:59 +00:00
Merge remote-tracking branch 'origin/master' into 3579.ftp-python-3
This commit is contained in:
commit
3a6f3f2809
newsfragments
src/allmydata
0
newsfragments/3534.minor
Normal file
0
newsfragments/3534.minor
Normal file
0
newsfragments/3566.minor
Normal file
0
newsfragments/3566.minor
Normal file
0
newsfragments/3574.minor
Normal file
0
newsfragments/3574.minor
Normal file
0
newsfragments/3575.minor
Normal file
0
newsfragments/3575.minor
Normal file
0
newsfragments/3578.minor
Normal file
0
newsfragments/3578.minor
Normal file
@ -34,10 +34,10 @@ class Blacklist(object):
|
||||
try:
|
||||
if self.last_mtime is None or current_mtime > self.last_mtime:
|
||||
self.entries.clear()
|
||||
with open(self.blacklist_fn, "r") as f:
|
||||
with open(self.blacklist_fn, "rb") as f:
|
||||
for line in f:
|
||||
line = line.strip()
|
||||
if not line or line.startswith("#"):
|
||||
if not line or line.startswith(b"#"):
|
||||
continue
|
||||
si_s, reason = line.split(None, 1)
|
||||
si = base32.a2b(si_s) # must be valid base32
|
||||
|
@ -1,4 +1,15 @@
|
||||
"""Implementation of the deep stats class."""
|
||||
"""Implementation of the deep stats class.
|
||||
|
||||
Ported to Python 3.
|
||||
"""
|
||||
from __future__ import absolute_import
|
||||
from __future__ import division
|
||||
from __future__ import print_function
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from future.utils import PY2
|
||||
if PY2:
|
||||
from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
|
||||
|
||||
import math
|
||||
|
||||
@ -13,7 +24,7 @@ from allmydata.util import mathutil
|
||||
class DeepStats(object):
|
||||
"""Deep stats object.
|
||||
|
||||
Holds results of the deep-stats opetation.
|
||||
Holds results of the deep-stats operation.
|
||||
Used for json generation in the API."""
|
||||
|
||||
# Json API version.
|
||||
@ -121,7 +132,7 @@ class DeepStats(object):
|
||||
h[bucket] += 1
|
||||
|
||||
def get_results(self):
|
||||
"""Returns deep-stats resutls."""
|
||||
"""Returns deep-stats results."""
|
||||
stats = self.stats.copy()
|
||||
for key in self.histograms:
|
||||
h = self.histograms[key]
|
||||
|
@ -18,7 +18,6 @@ import time
|
||||
from zope.interface import implementer
|
||||
from twisted.internet import defer
|
||||
from foolscap.api import fireEventually
|
||||
import json
|
||||
|
||||
from allmydata.crypto import aes
|
||||
from allmydata.deep_stats import DeepStats
|
||||
@ -31,7 +30,7 @@ from allmydata.interfaces import IFilesystemNode, IDirectoryNode, IFileNode, \
|
||||
from allmydata.check_results import DeepCheckResults, \
|
||||
DeepCheckAndRepairResults
|
||||
from allmydata.monitor import Monitor
|
||||
from allmydata.util import hashutil, base32, log
|
||||
from allmydata.util import hashutil, base32, log, jsonbytes as json
|
||||
from allmydata.util.encodingutil import quote_output, normalize
|
||||
from allmydata.util.assertutil import precondition
|
||||
from allmydata.util.netstring import netstring, split_netstring
|
||||
|
@ -255,11 +255,11 @@ class Encoder(object):
|
||||
# captures the slot, not the value
|
||||
#d.addCallback(lambda res: self.do_segment(i))
|
||||
# use this form instead:
|
||||
d.addCallback(lambda res, i=i: self._encode_segment(i))
|
||||
d.addCallback(lambda res, i=i: self._encode_segment(i, is_tail=False))
|
||||
d.addCallback(self._send_segment, i)
|
||||
d.addCallback(self._turn_barrier)
|
||||
last_segnum = self.num_segments - 1
|
||||
d.addCallback(lambda res: self._encode_tail_segment(last_segnum))
|
||||
d.addCallback(lambda res: self._encode_segment(last_segnum, is_tail=True))
|
||||
d.addCallback(self._send_segment, last_segnum)
|
||||
d.addCallback(self._turn_barrier)
|
||||
|
||||
@ -317,8 +317,24 @@ class Encoder(object):
|
||||
dl.append(d)
|
||||
return self._gather_responses(dl)
|
||||
|
||||
def _encode_segment(self, segnum):
|
||||
codec = self._codec
|
||||
def _encode_segment(self, segnum, is_tail):
|
||||
"""
|
||||
Encode one segment of input into the configured number of shares.
|
||||
|
||||
:param segnum: Ostensibly, the number of the segment to encode. In
|
||||
reality, this parameter is ignored and the *next* segment is
|
||||
encoded and returned.
|
||||
|
||||
:param bool is_tail: ``True`` if this is the last segment, ``False``
|
||||
otherwise.
|
||||
|
||||
:return: A ``Deferred`` which fires with a two-tuple. The first
|
||||
element is a list of string-y objects representing the encoded
|
||||
segment data for one of the shares. The second element is a list
|
||||
of integers giving the share numbers of the shares in the first
|
||||
element.
|
||||
"""
|
||||
codec = self._tail_codec if is_tail else self._codec
|
||||
start = time.time()
|
||||
|
||||
# the ICodecEncoder API wants to receive a total of self.segment_size
|
||||
@ -350,9 +366,11 @@ class Encoder(object):
|
||||
# footprint to 430KiB at the expense of more hash-tree overhead.
|
||||
|
||||
d = self._gather_data(self.required_shares, input_piece_size,
|
||||
crypttext_segment_hasher)
|
||||
crypttext_segment_hasher, allow_short=is_tail)
|
||||
def _done_gathering(chunks):
|
||||
for c in chunks:
|
||||
# If is_tail then a short trailing chunk will have been padded
|
||||
# by _gather_data
|
||||
assert len(c) == input_piece_size
|
||||
self._crypttext_hashes.append(crypttext_segment_hasher.digest())
|
||||
# during this call, we hit 5*segsize memory
|
||||
@ -365,31 +383,6 @@ class Encoder(object):
|
||||
d.addCallback(_done)
|
||||
return d
|
||||
|
||||
def _encode_tail_segment(self, segnum):
|
||||
|
||||
start = time.time()
|
||||
codec = self._tail_codec
|
||||
input_piece_size = codec.get_block_size()
|
||||
|
||||
crypttext_segment_hasher = hashutil.crypttext_segment_hasher()
|
||||
|
||||
d = self._gather_data(self.required_shares, input_piece_size,
|
||||
crypttext_segment_hasher, allow_short=True)
|
||||
def _done_gathering(chunks):
|
||||
for c in chunks:
|
||||
# a short trailing chunk will have been padded by
|
||||
# _gather_data
|
||||
assert len(c) == input_piece_size
|
||||
self._crypttext_hashes.append(crypttext_segment_hasher.digest())
|
||||
return codec.encode(chunks)
|
||||
d.addCallback(_done_gathering)
|
||||
def _done(res):
|
||||
elapsed = time.time() - start
|
||||
self._times["cumulative_encoding"] += elapsed
|
||||
return res
|
||||
d.addCallback(_done)
|
||||
return d
|
||||
|
||||
def _gather_data(self, num_chunks, input_chunk_size,
|
||||
crypttext_segment_hasher,
|
||||
allow_short=False):
|
||||
|
@ -16,7 +16,7 @@ from six import ensure_text, ensure_str
|
||||
import time
|
||||
from zope.interface import implementer
|
||||
from twisted.application import service
|
||||
from foolscap.api import Referenceable, eventually
|
||||
from foolscap.api import Referenceable
|
||||
from allmydata.interfaces import InsufficientVersionError
|
||||
from allmydata.introducer.interfaces import IIntroducerClient, \
|
||||
RIIntroducerSubscriberClient_v2
|
||||
@ -24,6 +24,9 @@ from allmydata.introducer.common import sign_to_foolscap, unsign_from_foolscap,\
|
||||
get_tubid_string_from_ann
|
||||
from allmydata.util import log, yamlutil, connection_status
|
||||
from allmydata.util.rrefutil import add_version_to_remote_reference
|
||||
from allmydata.util.observer import (
|
||||
ObserverList,
|
||||
)
|
||||
from allmydata.crypto.error import BadSignature
|
||||
from allmydata.util.assertutil import precondition
|
||||
|
||||
@ -62,8 +65,7 @@ class IntroducerClient(service.Service, Referenceable):
|
||||
self._publisher = None
|
||||
self._since = None
|
||||
|
||||
self._local_subscribers = [] # (servicename,cb,args,kwargs) tuples
|
||||
self._subscribed_service_names = set()
|
||||
self._local_subscribers = {} # {servicename: ObserverList}
|
||||
self._subscriptions = set() # requests we've actually sent
|
||||
|
||||
# _inbound_announcements remembers one announcement per
|
||||
@ -177,21 +179,21 @@ class IntroducerClient(service.Service, Referenceable):
|
||||
return log.msg(*args, **kwargs)
|
||||
|
||||
def subscribe_to(self, service_name, cb, *args, **kwargs):
|
||||
self._local_subscribers.append( (service_name,cb,args,kwargs) )
|
||||
self._subscribed_service_names.add(service_name)
|
||||
obs = self._local_subscribers.setdefault(service_name, ObserverList())
|
||||
obs.subscribe(lambda key_s, ann: cb(key_s, ann, *args, **kwargs))
|
||||
self._maybe_subscribe()
|
||||
for index,(ann,key_s,when) in list(self._inbound_announcements.items()):
|
||||
precondition(isinstance(key_s, bytes), key_s)
|
||||
servicename = index[0]
|
||||
if servicename == service_name:
|
||||
eventually(cb, key_s, ann, *args, **kwargs)
|
||||
obs.notify(key_s, ann)
|
||||
|
||||
def _maybe_subscribe(self):
|
||||
if not self._publisher:
|
||||
self.log("want to subscribe, but no introducer yet",
|
||||
level=log.NOISY)
|
||||
return
|
||||
for service_name in self._subscribed_service_names:
|
||||
for service_name in self._local_subscribers:
|
||||
if service_name in self._subscriptions:
|
||||
continue
|
||||
self._subscriptions.add(service_name)
|
||||
@ -270,7 +272,7 @@ class IntroducerClient(service.Service, Referenceable):
|
||||
precondition(isinstance(key_s, bytes), key_s)
|
||||
self._debug_counts["inbound_announcement"] += 1
|
||||
service_name = str(ann["service-name"])
|
||||
if service_name not in self._subscribed_service_names:
|
||||
if service_name not in self._local_subscribers:
|
||||
self.log("announcement for a service we don't care about [%s]"
|
||||
% (service_name,), level=log.UNUSUAL, umid="dIpGNA")
|
||||
self._debug_counts["wrong_service"] += 1
|
||||
@ -341,9 +343,9 @@ class IntroducerClient(service.Service, Referenceable):
|
||||
def _deliver_announcements(self, key_s, ann):
|
||||
precondition(isinstance(key_s, bytes), key_s)
|
||||
service_name = str(ann["service-name"])
|
||||
for (service_name2,cb,args,kwargs) in self._local_subscribers:
|
||||
if service_name2 == service_name:
|
||||
eventually(cb, key_s, ann, *args, **kwargs)
|
||||
obs = self._local_subscribers.get(service_name)
|
||||
if obs is not None:
|
||||
obs.notify(key_s, ann)
|
||||
|
||||
def connection_status(self):
|
||||
assert self.running # startService builds _introducer_reconnector
|
||||
|
@ -1,5 +1,7 @@
|
||||
from __future__ import print_function
|
||||
|
||||
from future.utils import bchr
|
||||
|
||||
# do not import any allmydata modules at this level. Do that from inside
|
||||
# individual functions instead.
|
||||
import struct, time, os, sys
|
||||
@ -905,7 +907,7 @@ def corrupt_share(options):
|
||||
f = open(fn, "rb+")
|
||||
f.seek(offset)
|
||||
d = f.read(1)
|
||||
d = chr(ord(d) ^ 0x01)
|
||||
d = bchr(ord(d) ^ 0x01)
|
||||
f.seek(offset)
|
||||
f.write(d)
|
||||
f.close()
|
||||
@ -920,7 +922,7 @@ def corrupt_share(options):
|
||||
f.seek(m.DATA_OFFSET)
|
||||
data = f.read(2000)
|
||||
# make sure this slot contains an SMDF share
|
||||
assert data[0] == b"\x00", "non-SDMF mutable shares not supported"
|
||||
assert data[0:1] == b"\x00", "non-SDMF mutable shares not supported"
|
||||
f.close()
|
||||
|
||||
(version, ig_seqnum, ig_roothash, ig_IV, ig_k, ig_N, ig_segsize,
|
||||
|
@ -1,11 +1,16 @@
|
||||
"""
|
||||
Ported to Python 3.
|
||||
"""
|
||||
from __future__ import absolute_import
|
||||
from __future__ import division
|
||||
from __future__ import print_function
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import time
|
||||
|
||||
# Python 2 compatibility
|
||||
from future.utils import PY2
|
||||
if PY2:
|
||||
from future.builtins import str # noqa: F401
|
||||
from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
|
||||
|
||||
import time
|
||||
|
||||
from twisted.application import service
|
||||
from twisted.application.internet import TimerService
|
||||
|
@ -11,7 +11,7 @@ __all__ = [
|
||||
"skipIf",
|
||||
]
|
||||
|
||||
from past.builtins import chr as byteschr
|
||||
from past.builtins import chr as byteschr, unicode
|
||||
|
||||
import os, random, struct
|
||||
import six
|
||||
@ -825,13 +825,18 @@ class WebErrorMixin(object):
|
||||
code=None, substring=None, response_substring=None,
|
||||
callable=None, *args, **kwargs):
|
||||
# returns a Deferred with the response body
|
||||
assert substring is None or isinstance(substring, str)
|
||||
if isinstance(substring, bytes):
|
||||
substring = unicode(substring, "ascii")
|
||||
if isinstance(response_substring, unicode):
|
||||
response_substring = response_substring.encode("ascii")
|
||||
assert substring is None or isinstance(substring, unicode)
|
||||
assert response_substring is None or isinstance(response_substring, bytes)
|
||||
assert callable
|
||||
def _validate(f):
|
||||
if code is not None:
|
||||
self.failUnlessEqual(f.value.status, str(code), which)
|
||||
self.failUnlessEqual(f.value.status, b"%d" % code, which)
|
||||
if substring:
|
||||
code_string = str(f)
|
||||
code_string = unicode(f)
|
||||
self.failUnless(substring in code_string,
|
||||
"%s: substring '%s' not in '%s'"
|
||||
% (which, substring, code_string))
|
||||
|
@ -1,7 +1,8 @@
|
||||
from __future__ import print_function
|
||||
|
||||
from future.utils import PY2, native_str
|
||||
from future.utils import PY2, native_str, bchr, binary_type
|
||||
from future.builtins import str as future_str
|
||||
from past.builtins import unicode
|
||||
|
||||
import os
|
||||
import time
|
||||
@ -20,9 +21,6 @@ from twisted.trial import unittest
|
||||
from ..util.assertutil import precondition
|
||||
from ..scripts import runner
|
||||
from allmydata.util.encodingutil import unicode_platform, get_filesystem_encoding, get_io_encoding
|
||||
# Imported for backwards compatibility:
|
||||
from future.utils import bord, bchr, binary_type
|
||||
from past.builtins import unicode
|
||||
|
||||
|
||||
def skip_if_cannot_represent_filename(u):
|
||||
@ -183,13 +181,12 @@ def insecurerandstr(n):
|
||||
return b''.join(map(bchr, map(randrange, [0]*n, [256]*n)))
|
||||
|
||||
def flip_bit(good, which):
|
||||
# TODO Probs need to update with bchr/bord as with flip_one_bit, below.
|
||||
# flip the low-order bit of good[which]
|
||||
"""Flip the low-order bit of good[which]."""
|
||||
if which == -1:
|
||||
pieces = good[:which], good[-1:], ""
|
||||
pieces = good[:which], good[-1:], b""
|
||||
else:
|
||||
pieces = good[:which], good[which:which+1], good[which+1:]
|
||||
return pieces[0] + chr(ord(pieces[1]) ^ 0x01) + pieces[2]
|
||||
return pieces[0] + bchr(ord(pieces[1]) ^ 0x01) + pieces[2]
|
||||
|
||||
def flip_one_bit(s, offset=0, size=None):
|
||||
""" flip one random bit of the string s, in a byte greater than or equal to offset and less
|
||||
@ -198,7 +195,7 @@ def flip_one_bit(s, offset=0, size=None):
|
||||
if size is None:
|
||||
size=len(s)-offset
|
||||
i = randrange(offset, offset+size)
|
||||
result = s[:i] + bchr(bord(s[i])^(0x01<<randrange(0, 8))) + s[i+1:]
|
||||
result = s[:i] + bchr(ord(s[i:i+1])^(0x01<<randrange(0, 8))) + s[i+1:]
|
||||
assert result != s, "Internal error -- flip_one_bit() produced the same string as its input: %s == %s" % (result, s)
|
||||
return result
|
||||
|
||||
|
@ -24,6 +24,7 @@ from future.utils import PY2
|
||||
if PY2:
|
||||
from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
|
||||
from past.builtins import unicode
|
||||
from six import ensure_text
|
||||
|
||||
import os
|
||||
from base64 import b32encode
|
||||
@ -614,8 +615,7 @@ class GridTestMixin(object):
|
||||
method="GET", clientnum=0, **kwargs):
|
||||
# if return_response=True, this fires with (data, statuscode,
|
||||
# respheaders) instead of just data.
|
||||
assert not isinstance(urlpath, unicode)
|
||||
url = self.client_baseurls[clientnum] + urlpath
|
||||
url = self.client_baseurls[clientnum] + ensure_text(urlpath)
|
||||
|
||||
response = yield treq.request(method, url, persistent=False,
|
||||
allow_redirects=followRedirect,
|
||||
|
@ -173,7 +173,7 @@ class WebResultsRendering(unittest.TestCase):
|
||||
return c
|
||||
|
||||
def render_json(self, resource):
|
||||
return self.successResultOf(render(resource, {"output": ["json"]}))
|
||||
return self.successResultOf(render(resource, {b"output": [b"json"]}))
|
||||
|
||||
def render_element(self, element, args=None):
|
||||
if args is None:
|
||||
@ -186,7 +186,7 @@ class WebResultsRendering(unittest.TestCase):
|
||||
html = self.render_element(lcr)
|
||||
self.failUnlessIn(b"Literal files are always healthy", html)
|
||||
|
||||
html = self.render_element(lcr, args={"return_to": ["FOOURL"]})
|
||||
html = self.render_element(lcr, args={b"return_to": [b"FOOURL"]})
|
||||
self.failUnlessIn(b"Literal files are always healthy", html)
|
||||
self.failUnlessIn(b'<a href="FOOURL">Return to file.</a>', html)
|
||||
|
||||
@ -269,7 +269,7 @@ class WebResultsRendering(unittest.TestCase):
|
||||
self.failUnlessIn("File Check Results for SI=2k6avp", s) # abbreviated
|
||||
self.failUnlessIn("Not Recoverable! : rather dead", s)
|
||||
|
||||
html = self.render_element(w, args={"return_to": ["FOOURL"]})
|
||||
html = self.render_element(w, args={b"return_to": [b"FOOURL"]})
|
||||
self.failUnlessIn(b'<a href="FOOURL">Return to file/directory.</a>',
|
||||
html)
|
||||
|
||||
|
@ -102,9 +102,35 @@ class HashUtilTests(unittest.TestCase):
|
||||
got_a = base32.b2a(got)
|
||||
self.failUnlessEqual(got_a, expected_a)
|
||||
|
||||
def test_known_answers(self):
|
||||
# assert backwards compatibility
|
||||
def test_storage_index_hash_known_answers(self):
|
||||
"""
|
||||
Verify backwards compatibility by comparing ``storage_index_hash`` outputs
|
||||
for some well-known (to us) inputs.
|
||||
"""
|
||||
# This is a marginal case. b"" is not a valid aes 128 key. The
|
||||
# implementation does nothing to avoid producing a result for it,
|
||||
# though.
|
||||
self._testknown(hashutil.storage_index_hash, b"qb5igbhcc5esa6lwqorsy7e6am", b"")
|
||||
|
||||
# This is a little bit more realistic though clearly this is a poor key choice.
|
||||
self._testknown(hashutil.storage_index_hash, b"wvggbrnrezdpa5yayrgiw5nzja", b"x" * 16)
|
||||
|
||||
# Here's a much more realistic key that I generated by reading some
|
||||
# bytes from /dev/urandom. I computed the expected hash value twice.
|
||||
# First using hashlib.sha256 and then with sha256sum(1). The input
|
||||
# string given to the hash function was "43:<storage index tag>,<key>"
|
||||
# in each case.
|
||||
self._testknown(
|
||||
hashutil.storage_index_hash,
|
||||
b"aarbseqqrpsfowduchcjbonscq",
|
||||
base32.a2b(b"2ckv3dfzh6rgjis6ogfqhyxnzy"),
|
||||
)
|
||||
|
||||
def test_known_answers(self):
|
||||
"""
|
||||
Verify backwards compatibility by comparing hash outputs for some
|
||||
well-known (to us) inputs.
|
||||
"""
|
||||
self._testknown(hashutil.block_hash, b"msjr5bh4evuh7fa3zw7uovixfbvlnstr5b65mrerwfnvjxig2jvq", b"")
|
||||
self._testknown(hashutil.uri_extension_hash, b"wthsu45q7zewac2mnivoaa4ulh5xvbzdmsbuyztq2a5fzxdrnkka", b"")
|
||||
self._testknown(hashutil.plaintext_hash, b"5lz5hwz3qj3af7n6e3arblw7xzutvnd3p3fjsngqjcb7utf3x3da", b"")
|
||||
|
@ -15,7 +15,12 @@ from six import ensure_binary, ensure_text
|
||||
import os, re, itertools
|
||||
from base64 import b32decode
|
||||
import json
|
||||
from mock import Mock, patch
|
||||
from operator import (
|
||||
setitem,
|
||||
)
|
||||
from functools import (
|
||||
partial,
|
||||
)
|
||||
|
||||
from testtools.matchers import (
|
||||
Is,
|
||||
@ -84,7 +89,8 @@ class Node(testutil.SignalMixin, testutil.ReallyEqualMixin, AsyncTestCase):
|
||||
|
||||
def test_introducer_clients_unloadable(self):
|
||||
"""
|
||||
Error if introducers.yaml exists but we can't read it
|
||||
``create_introducer_clients`` raises ``EnvironmentError`` if
|
||||
``introducers.yaml`` exists but we can't read it.
|
||||
"""
|
||||
basedir = u"introducer.IntroducerNode.test_introducer_clients_unloadable"
|
||||
os.mkdir(basedir)
|
||||
@ -94,17 +100,10 @@ class Node(testutil.SignalMixin, testutil.ReallyEqualMixin, AsyncTestCase):
|
||||
f.write(u'---\n')
|
||||
os.chmod(yaml_fname, 0o000)
|
||||
self.addCleanup(lambda: os.chmod(yaml_fname, 0o700))
|
||||
# just mocking the yaml failure, as "yamlutil.safe_load" only
|
||||
# returns None on some platforms for unreadable files
|
||||
|
||||
with patch("allmydata.client.yamlutil") as p:
|
||||
p.safe_load = Mock(return_value=None)
|
||||
|
||||
fake_tub = Mock()
|
||||
config = read_config(basedir, "portnum")
|
||||
|
||||
with self.assertRaises(EnvironmentError):
|
||||
create_introducer_clients(config, fake_tub)
|
||||
config = read_config(basedir, "portnum")
|
||||
with self.assertRaises(EnvironmentError):
|
||||
create_introducer_clients(config, Tub())
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def test_furl(self):
|
||||
@ -1037,23 +1036,53 @@ class Signatures(SyncTestCase):
|
||||
unsign_from_foolscap, (bad_msg, sig, b"v999-key"))
|
||||
|
||||
def test_unsigned_announcement(self):
|
||||
ed25519.verifying_key_from_string(b"pub-v0-wodst6ly4f7i7akt2nxizsmmy2rlmer6apltl56zctn67wfyu5tq")
|
||||
mock_tub = Mock()
|
||||
"""
|
||||
An incorrectly signed announcement is not delivered to subscribers.
|
||||
"""
|
||||
private_key, public_key = ed25519.create_signing_keypair()
|
||||
public_key_str = ed25519.string_from_verifying_key(public_key)
|
||||
|
||||
ic = IntroducerClient(
|
||||
mock_tub,
|
||||
Tub(),
|
||||
"pb://",
|
||||
u"fake_nick",
|
||||
"0.0.0",
|
||||
"1.2.3",
|
||||
(0, u"i am a nonce"),
|
||||
"invalid",
|
||||
FilePath(self.mktemp()),
|
||||
)
|
||||
received = {}
|
||||
ic.subscribe_to("good-stuff", partial(setitem, received))
|
||||
|
||||
# Deliver a good message to prove our test code is valid.
|
||||
ann = {"service-name": "good-stuff", "payload": "hello"}
|
||||
ann_t = sign_to_foolscap(ann, private_key)
|
||||
ic.got_announcements([ann_t])
|
||||
|
||||
self.assertEqual(
|
||||
{public_key_str[len("pub-"):]: ann},
|
||||
received,
|
||||
)
|
||||
received.clear()
|
||||
|
||||
# Now deliver one without a valid signature and observe that it isn't
|
||||
# delivered to the subscriber.
|
||||
ann = {"service-name": "good-stuff", "payload": "bad stuff"}
|
||||
(msg, sig, key) = sign_to_foolscap(ann, private_key)
|
||||
# Drop a base32 word from the middle of the key to invalidate the
|
||||
# signature.
|
||||
sig_a = bytearray(sig)
|
||||
sig_a[20:22] = []
|
||||
sig = bytes(sig_a)
|
||||
ann_t = (msg, sig, key)
|
||||
ic.got_announcements([ann_t])
|
||||
|
||||
# The received announcements dict should remain empty because we
|
||||
# should not receive the announcement with the invalid signature.
|
||||
self.assertEqual(
|
||||
{},
|
||||
received,
|
||||
)
|
||||
self.assertEqual(0, ic._debug_counts["inbound_announcement"])
|
||||
ic.got_announcements([
|
||||
(b"message", b"v0-aaaaaaa", b"v0-wodst6ly4f7i7akt2nxizsmmy2rlmer6apltl56zctn67wfyu5tq")
|
||||
])
|
||||
# we should have rejected this announcement due to a bad signature
|
||||
self.assertEqual(0, ic._debug_counts["inbound_announcement"])
|
||||
|
||||
|
||||
# add tests of StorageFarmBroker: if it receives duplicate announcements, it
|
||||
|
@ -101,3 +101,56 @@ class Observer(unittest.TestCase):
|
||||
d.addCallback(_step2)
|
||||
d.addCallback(_check2)
|
||||
return d
|
||||
|
||||
def test_observer_list_reentrant(self):
|
||||
"""
|
||||
``ObserverList`` is reentrant.
|
||||
"""
|
||||
observed = []
|
||||
|
||||
def observer_one():
|
||||
obs.unsubscribe(observer_one)
|
||||
|
||||
def observer_two():
|
||||
observed.append(None)
|
||||
|
||||
obs = observer.ObserverList()
|
||||
obs.subscribe(observer_one)
|
||||
obs.subscribe(observer_two)
|
||||
obs.notify()
|
||||
|
||||
self.assertEqual([None], observed)
|
||||
|
||||
def test_observer_list_observer_errors(self):
|
||||
"""
|
||||
An error in an earlier observer does not prevent notification from being
|
||||
delivered to a later observer.
|
||||
"""
|
||||
observed = []
|
||||
|
||||
def observer_one():
|
||||
raise Exception("Some problem here")
|
||||
|
||||
def observer_two():
|
||||
observed.append(None)
|
||||
|
||||
obs = observer.ObserverList()
|
||||
obs.subscribe(observer_one)
|
||||
obs.subscribe(observer_two)
|
||||
obs.notify()
|
||||
|
||||
self.assertEqual([None], observed)
|
||||
self.assertEqual(1, len(self.flushLoggedErrors(Exception)))
|
||||
|
||||
def test_observer_list_propagate_keyboardinterrupt(self):
|
||||
"""
|
||||
``KeyboardInterrupt`` escapes ``ObserverList.notify``.
|
||||
"""
|
||||
def observer_one():
|
||||
raise KeyboardInterrupt()
|
||||
|
||||
obs = observer.ObserverList()
|
||||
obs.subscribe(observer_one)
|
||||
|
||||
with self.assertRaises(KeyboardInterrupt):
|
||||
obs.notify()
|
||||
|
@ -1,3 +1,14 @@
|
||||
"""
|
||||
Ported to Python 3.
|
||||
"""
|
||||
from __future__ import absolute_import
|
||||
from __future__ import division
|
||||
from __future__ import print_function
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from future.utils import PY2
|
||||
if PY2:
|
||||
from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
|
||||
|
||||
from twisted.trial import unittest
|
||||
from twisted.application import service
|
||||
|
@ -70,7 +70,7 @@ def renderJSON(resource):
|
||||
"""
|
||||
Render a JSON from the given resource.
|
||||
"""
|
||||
return render(resource, {"t": ["json"]})
|
||||
return render(resource, {b"t": [b"json"]})
|
||||
|
||||
class MyBucketCountingCrawler(BucketCountingCrawler):
|
||||
def finished_prefix(self, cycle, prefix):
|
||||
|
@ -1,6 +1,17 @@
|
||||
"""
|
||||
Ported to Python 3.
|
||||
"""
|
||||
from __future__ import print_function
|
||||
from __future__ import absolute_import
|
||||
from __future__ import division
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import os.path, re, urllib
|
||||
from future.utils import PY2
|
||||
if PY2:
|
||||
from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
|
||||
|
||||
import os.path, re
|
||||
from urllib.parse import quote as url_quote
|
||||
import json
|
||||
from six.moves import StringIO
|
||||
|
||||
@ -37,7 +48,7 @@ DIR_HTML_TAG = '<html lang="en">'
|
||||
class CompletelyUnhandledError(Exception):
|
||||
pass
|
||||
|
||||
class ErrorBoom(object, resource.Resource):
|
||||
class ErrorBoom(resource.Resource, object):
|
||||
@render_exception
|
||||
def render(self, req):
|
||||
raise CompletelyUnhandledError("whoops")
|
||||
@ -47,32 +58,38 @@ class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMi
|
||||
def CHECK(self, ign, which, args, clientnum=0):
|
||||
fileurl = self.fileurls[which]
|
||||
url = fileurl + "?" + args
|
||||
return self.GET(url, method="POST", clientnum=clientnum)
|
||||
return self.GET_unicode(url, method="POST", clientnum=clientnum)
|
||||
|
||||
def GET_unicode(self, *args, **kwargs):
|
||||
"""Send an HTTP request, but convert result to Unicode string."""
|
||||
d = GridTestMixin.GET(self, *args, **kwargs)
|
||||
d.addCallback(str, "utf-8")
|
||||
return d
|
||||
|
||||
def test_filecheck(self):
|
||||
self.basedir = "web/Grid/filecheck"
|
||||
self.set_up_grid()
|
||||
c0 = self.g.clients[0]
|
||||
self.uris = {}
|
||||
DATA = "data" * 100
|
||||
d = c0.upload(upload.Data(DATA, convergence=""))
|
||||
DATA = b"data" * 100
|
||||
d = c0.upload(upload.Data(DATA, convergence=b""))
|
||||
def _stash_uri(ur, which):
|
||||
self.uris[which] = ur.get_uri()
|
||||
d.addCallback(_stash_uri, "good")
|
||||
d.addCallback(lambda ign:
|
||||
c0.upload(upload.Data(DATA+"1", convergence="")))
|
||||
c0.upload(upload.Data(DATA+b"1", convergence=b"")))
|
||||
d.addCallback(_stash_uri, "sick")
|
||||
d.addCallback(lambda ign:
|
||||
c0.upload(upload.Data(DATA+"2", convergence="")))
|
||||
c0.upload(upload.Data(DATA+b"2", convergence=b"")))
|
||||
d.addCallback(_stash_uri, "dead")
|
||||
def _stash_mutable_uri(n, which):
|
||||
self.uris[which] = n.get_uri()
|
||||
assert isinstance(self.uris[which], str)
|
||||
assert isinstance(self.uris[which], bytes)
|
||||
d.addCallback(lambda ign:
|
||||
c0.create_mutable_file(publish.MutableData(DATA+"3")))
|
||||
c0.create_mutable_file(publish.MutableData(DATA+b"3")))
|
||||
d.addCallback(_stash_mutable_uri, "corrupt")
|
||||
d.addCallback(lambda ign:
|
||||
c0.upload(upload.Data("literal", convergence="")))
|
||||
c0.upload(upload.Data(b"literal", convergence=b"")))
|
||||
d.addCallback(_stash_uri, "small")
|
||||
d.addCallback(lambda ign: c0.create_immutable_dirnode({}))
|
||||
d.addCallback(_stash_mutable_uri, "smalldir")
|
||||
@ -80,7 +97,7 @@ class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMi
|
||||
def _compute_fileurls(ignored):
|
||||
self.fileurls = {}
|
||||
for which in self.uris:
|
||||
self.fileurls[which] = "uri/" + urllib.quote(self.uris[which])
|
||||
self.fileurls[which] = "uri/" + url_quote(self.uris[which])
|
||||
d.addCallback(_compute_fileurls)
|
||||
|
||||
def _clobber_shares(ignored):
|
||||
@ -203,28 +220,28 @@ class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMi
|
||||
self.set_up_grid()
|
||||
c0 = self.g.clients[0]
|
||||
self.uris = {}
|
||||
DATA = "data" * 100
|
||||
d = c0.upload(upload.Data(DATA, convergence=""))
|
||||
DATA = b"data" * 100
|
||||
d = c0.upload(upload.Data(DATA, convergence=b""))
|
||||
def _stash_uri(ur, which):
|
||||
self.uris[which] = ur.get_uri()
|
||||
d.addCallback(_stash_uri, "good")
|
||||
d.addCallback(lambda ign:
|
||||
c0.upload(upload.Data(DATA+"1", convergence="")))
|
||||
c0.upload(upload.Data(DATA+b"1", convergence=b"")))
|
||||
d.addCallback(_stash_uri, "sick")
|
||||
d.addCallback(lambda ign:
|
||||
c0.upload(upload.Data(DATA+"2", convergence="")))
|
||||
c0.upload(upload.Data(DATA+b"2", convergence=b"")))
|
||||
d.addCallback(_stash_uri, "dead")
|
||||
def _stash_mutable_uri(n, which):
|
||||
self.uris[which] = n.get_uri()
|
||||
assert isinstance(self.uris[which], str)
|
||||
assert isinstance(self.uris[which], bytes)
|
||||
d.addCallback(lambda ign:
|
||||
c0.create_mutable_file(publish.MutableData(DATA+"3")))
|
||||
c0.create_mutable_file(publish.MutableData(DATA+b"3")))
|
||||
d.addCallback(_stash_mutable_uri, "corrupt")
|
||||
|
||||
def _compute_fileurls(ignored):
|
||||
self.fileurls = {}
|
||||
for which in self.uris:
|
||||
self.fileurls[which] = "uri/" + urllib.quote(self.uris[which])
|
||||
self.fileurls[which] = "uri/" + url_quote(self.uris[which])
|
||||
d.addCallback(_compute_fileurls)
|
||||
|
||||
def _clobber_shares(ignored):
|
||||
@ -286,8 +303,8 @@ class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMi
|
||||
self.set_up_grid()
|
||||
c0 = self.g.clients[0]
|
||||
self.uris = {}
|
||||
DATA = "data" * 100
|
||||
d = c0.upload(upload.Data(DATA+"1", convergence=""))
|
||||
DATA = b"data" * 100
|
||||
d = c0.upload(upload.Data(DATA+b"1", convergence=b""))
|
||||
def _stash_uri(ur, which):
|
||||
self.uris[which] = ur.get_uri()
|
||||
d.addCallback(_stash_uri, "sick")
|
||||
@ -295,7 +312,7 @@ class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMi
|
||||
def _compute_fileurls(ignored):
|
||||
self.fileurls = {}
|
||||
for which in self.uris:
|
||||
self.fileurls[which] = "uri/" + urllib.quote(self.uris[which])
|
||||
self.fileurls[which] = "uri/" + url_quote(self.uris[which])
|
||||
d.addCallback(_compute_fileurls)
|
||||
|
||||
def _clobber_shares(ignored):
|
||||
@ -329,7 +346,7 @@ class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMi
|
||||
self.fileurls = {}
|
||||
|
||||
# the future cap format may contain slashes, which must be tolerated
|
||||
expected_info_url = "uri/%s?t=info" % urllib.quote(unknown_rwcap,
|
||||
expected_info_url = "uri/%s?t=info" % url_quote(unknown_rwcap,
|
||||
safe="")
|
||||
|
||||
if immutable:
|
||||
@ -343,8 +360,8 @@ class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMi
|
||||
|
||||
def _stash_root_and_create_file(n):
|
||||
self.rootnode = n
|
||||
self.rooturl = "uri/" + urllib.quote(n.get_uri())
|
||||
self.rourl = "uri/" + urllib.quote(n.get_readonly_uri())
|
||||
self.rooturl = "uri/" + url_quote(n.get_uri())
|
||||
self.rourl = "uri/" + url_quote(n.get_readonly_uri())
|
||||
if not immutable:
|
||||
return self.rootnode.set_node(name, future_node)
|
||||
d.addCallback(_stash_root_and_create_file)
|
||||
@ -352,18 +369,19 @@ class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMi
|
||||
# make sure directory listing tolerates unknown nodes
|
||||
d.addCallback(lambda ign: self.GET(self.rooturl))
|
||||
def _check_directory_html(res, expected_type_suffix):
|
||||
pattern = re.compile(r'<td>\?%s</td>[ \t\n\r]*'
|
||||
'<td>%s</td>' % (expected_type_suffix, str(name)),
|
||||
pattern = re.compile(br'<td>\?%s</td>[ \t\n\r]*'
|
||||
b'<td>%s</td>' % (
|
||||
expected_type_suffix, name.encode("ascii")),
|
||||
re.DOTALL)
|
||||
self.failUnless(re.search(pattern, res), res)
|
||||
# find the More Info link for name, should be relative
|
||||
mo = re.search(r'<a href="([^"]+)">More Info</a>', res)
|
||||
mo = re.search(br'<a href="([^"]+)">More Info</a>', res)
|
||||
info_url = mo.group(1)
|
||||
self.failUnlessReallyEqual(info_url, "%s?t=info" % (str(name),))
|
||||
self.failUnlessReallyEqual(info_url, b"%s?t=info" % (name.encode("ascii"),))
|
||||
if immutable:
|
||||
d.addCallback(_check_directory_html, "-IMM")
|
||||
d.addCallback(_check_directory_html, b"-IMM")
|
||||
else:
|
||||
d.addCallback(_check_directory_html, "")
|
||||
d.addCallback(_check_directory_html, b"")
|
||||
|
||||
d.addCallback(lambda ign: self.GET(self.rooturl+"?t=json"))
|
||||
def _check_directory_json(res, expect_rw_uri):
|
||||
@ -383,7 +401,6 @@ class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMi
|
||||
d.addCallback(_check_directory_json, expect_rw_uri=not immutable)
|
||||
|
||||
def _check_info(res, expect_rw_uri, expect_ro_uri):
|
||||
self.failUnlessIn("Object Type: <span>unknown</span>", res)
|
||||
if expect_rw_uri:
|
||||
self.failUnlessIn(unknown_rwcap, res)
|
||||
if expect_ro_uri:
|
||||
@ -393,6 +410,8 @@ class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMi
|
||||
self.failUnlessIn(unknown_rocap, res)
|
||||
else:
|
||||
self.failIfIn(unknown_rocap, res)
|
||||
res = str(res, "utf-8")
|
||||
self.failUnlessIn("Object Type: <span>unknown</span>", res)
|
||||
self.failIfIn("Raw data as", res)
|
||||
self.failIfIn("Directory writecap", res)
|
||||
self.failIfIn("Checker Operations", res)
|
||||
@ -404,7 +423,7 @@ class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMi
|
||||
|
||||
d.addCallback(lambda ign: self.GET(expected_info_url))
|
||||
d.addCallback(_check_info, expect_rw_uri=False, expect_ro_uri=False)
|
||||
d.addCallback(lambda ign: self.GET("%s/%s?t=info" % (self.rooturl, str(name))))
|
||||
d.addCallback(lambda ign: self.GET("%s/%s?t=info" % (self.rooturl, name)))
|
||||
d.addCallback(_check_info, expect_rw_uri=False, expect_ro_uri=True)
|
||||
|
||||
def _check_json(res, expect_rw_uri):
|
||||
@ -436,9 +455,9 @@ class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMi
|
||||
# or not future_node was immutable.
|
||||
d.addCallback(lambda ign: self.GET(self.rourl))
|
||||
if immutable:
|
||||
d.addCallback(_check_directory_html, "-IMM")
|
||||
d.addCallback(_check_directory_html, b"-IMM")
|
||||
else:
|
||||
d.addCallback(_check_directory_html, "-RO")
|
||||
d.addCallback(_check_directory_html, b"-RO")
|
||||
|
||||
d.addCallback(lambda ign: self.GET(self.rourl+"?t=json"))
|
||||
d.addCallback(_check_directory_json, expect_rw_uri=False)
|
||||
@ -462,9 +481,9 @@ class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMi
|
||||
self.uris = {}
|
||||
self.fileurls = {}
|
||||
|
||||
lonely_uri = "URI:LIT:n5xgk" # LIT for "one"
|
||||
mut_write_uri = "URI:SSK:vfvcbdfbszyrsaxchgevhmmlii:euw4iw7bbnkrrwpzuburbhppuxhc3gwxv26f6imekhz7zyw2ojnq"
|
||||
mut_read_uri = "URI:SSK-RO:e3mdrzfwhoq42hy5ubcz6rp3o4:ybyibhnp3vvwuq2vaw2ckjmesgkklfs6ghxleztqidihjyofgw7q"
|
||||
lonely_uri = b"URI:LIT:n5xgk" # LIT for "one"
|
||||
mut_write_uri = b"URI:SSK:vfvcbdfbszyrsaxchgevhmmlii:euw4iw7bbnkrrwpzuburbhppuxhc3gwxv26f6imekhz7zyw2ojnq"
|
||||
mut_read_uri = b"URI:SSK-RO:e3mdrzfwhoq42hy5ubcz6rp3o4:ybyibhnp3vvwuq2vaw2ckjmesgkklfs6ghxleztqidihjyofgw7q"
|
||||
|
||||
# This method tests mainly dirnode, but we'd have to duplicate code in order to
|
||||
# test the dirnode and web layers separately.
|
||||
@ -507,10 +526,10 @@ class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMi
|
||||
rep = str(dn)
|
||||
self.failUnlessIn("RO-IMM", rep)
|
||||
cap = dn.get_cap()
|
||||
self.failUnlessIn("CHK", cap.to_string())
|
||||
self.failUnlessIn(b"CHK", cap.to_string())
|
||||
self.cap = cap
|
||||
self.rootnode = dn
|
||||
self.rooturl = "uri/" + urllib.quote(dn.get_uri())
|
||||
self.rooturl = "uri/" + url_quote(dn.get_uri())
|
||||
return download_to_data(dn._node)
|
||||
d.addCallback(_created)
|
||||
|
||||
@ -526,7 +545,7 @@ class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMi
|
||||
entry = entries[0]
|
||||
(name_utf8, ro_uri, rwcapdata, metadata_s), subpos = split_netstring(entry, 4)
|
||||
name = name_utf8.decode("utf-8")
|
||||
self.failUnlessEqual(rwcapdata, "")
|
||||
self.failUnlessEqual(rwcapdata, b"")
|
||||
self.failUnlessIn(name, kids)
|
||||
(expected_child, ign) = kids[name]
|
||||
self.failUnlessReallyEqual(ro_uri, expected_child.get_readonly_uri())
|
||||
@ -553,13 +572,13 @@ class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMi
|
||||
d.addCallback(lambda ign: self.GET(self.rooturl))
|
||||
def _check_html(res):
|
||||
soup = BeautifulSoup(res, 'html5lib')
|
||||
self.failIfIn("URI:SSK", res)
|
||||
self.failIfIn(b"URI:SSK", res)
|
||||
found = False
|
||||
for td in soup.find_all(u"td"):
|
||||
if td.text != u"FILE":
|
||||
continue
|
||||
a = td.findNextSibling()(u"a")[0]
|
||||
self.assertIn(urllib.quote(lonely_uri), a[u"href"])
|
||||
self.assertIn(url_quote(lonely_uri), a[u"href"])
|
||||
self.assertEqual(u"lonely", a.text)
|
||||
self.assertEqual(a[u"rel"], [u"noreferrer"])
|
||||
self.assertEqual(u"{}".format(len("one")), td.findNextSibling().findNextSibling().text)
|
||||
@ -573,7 +592,7 @@ class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMi
|
||||
if a.text == u"More Info"
|
||||
)
|
||||
self.assertEqual(1, len(infos))
|
||||
self.assertTrue(infos[0].endswith(urllib.quote(lonely_uri) + "?t=info"))
|
||||
self.assertTrue(infos[0].endswith(url_quote(lonely_uri) + "?t=info"))
|
||||
d.addCallback(_check_html)
|
||||
|
||||
# ... and in JSON.
|
||||
@ -596,12 +615,12 @@ class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMi
|
||||
c0 = self.g.clients[0]
|
||||
self.uris = {}
|
||||
self.fileurls = {}
|
||||
DATA = "data" * 100
|
||||
DATA = b"data" * 100
|
||||
d = c0.create_dirnode()
|
||||
def _stash_root_and_create_file(n):
|
||||
self.rootnode = n
|
||||
self.fileurls["root"] = "uri/" + urllib.quote(n.get_uri())
|
||||
return n.add_file(u"good", upload.Data(DATA, convergence=""))
|
||||
self.fileurls["root"] = "uri/" + url_quote(n.get_uri())
|
||||
return n.add_file(u"good", upload.Data(DATA, convergence=b""))
|
||||
d.addCallback(_stash_root_and_create_file)
|
||||
def _stash_uri(fn, which):
|
||||
self.uris[which] = fn.get_uri()
|
||||
@ -609,13 +628,13 @@ class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMi
|
||||
d.addCallback(_stash_uri, "good")
|
||||
d.addCallback(lambda ign:
|
||||
self.rootnode.add_file(u"small",
|
||||
upload.Data("literal",
|
||||
convergence="")))
|
||||
upload.Data(b"literal",
|
||||
convergence=b"")))
|
||||
d.addCallback(_stash_uri, "small")
|
||||
d.addCallback(lambda ign:
|
||||
self.rootnode.add_file(u"sick",
|
||||
upload.Data(DATA+"1",
|
||||
convergence="")))
|
||||
upload.Data(DATA+b"1",
|
||||
convergence=b"")))
|
||||
d.addCallback(_stash_uri, "sick")
|
||||
|
||||
# this tests that deep-check and stream-manifest will ignore
|
||||
@ -695,13 +714,13 @@ class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMi
|
||||
d.addCallback(_stash_uri, "subdir")
|
||||
d.addCallback(lambda subdir_node:
|
||||
subdir_node.add_file(u"grandchild",
|
||||
upload.Data(DATA+"2",
|
||||
convergence="")))
|
||||
upload.Data(DATA+b"2",
|
||||
convergence=b"")))
|
||||
d.addCallback(_stash_uri, "grandchild")
|
||||
|
||||
d.addCallback(lambda ign:
|
||||
self.delete_shares_numbered(self.uris["subdir"],
|
||||
range(1, 10)))
|
||||
list(range(1, 10))))
|
||||
|
||||
# root
|
||||
# root/good
|
||||
@ -770,30 +789,30 @@ class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMi
|
||||
c0 = self.g.clients[0]
|
||||
self.uris = {}
|
||||
self.fileurls = {}
|
||||
DATA = "data" * 100
|
||||
DATA = b"data" * 100
|
||||
d = c0.create_dirnode()
|
||||
def _stash_root_and_create_file(n):
|
||||
self.rootnode = n
|
||||
self.fileurls["root"] = "uri/" + urllib.quote(n.get_uri())
|
||||
return n.add_file(u"good", upload.Data(DATA, convergence=""))
|
||||
self.fileurls["root"] = "uri/" + url_quote(n.get_uri())
|
||||
return n.add_file(u"good", upload.Data(DATA, convergence=b""))
|
||||
d.addCallback(_stash_root_and_create_file)
|
||||
def _stash_uri(fn, which):
|
||||
self.uris[which] = fn.get_uri()
|
||||
d.addCallback(_stash_uri, "good")
|
||||
d.addCallback(lambda ign:
|
||||
self.rootnode.add_file(u"small",
|
||||
upload.Data("literal",
|
||||
convergence="")))
|
||||
upload.Data(b"literal",
|
||||
convergence=b"")))
|
||||
d.addCallback(_stash_uri, "small")
|
||||
d.addCallback(lambda ign:
|
||||
self.rootnode.add_file(u"sick",
|
||||
upload.Data(DATA+"1",
|
||||
convergence="")))
|
||||
upload.Data(DATA+b"1",
|
||||
convergence=b"")))
|
||||
d.addCallback(_stash_uri, "sick")
|
||||
#d.addCallback(lambda ign:
|
||||
# self.rootnode.add_file(u"dead",
|
||||
# upload.Data(DATA+"2",
|
||||
# convergence="")))
|
||||
# upload.Data(DATA+b"2",
|
||||
# convergence=b"")))
|
||||
#d.addCallback(_stash_uri, "dead")
|
||||
|
||||
#d.addCallback(lambda ign: c0.create_mutable_file("mutable"))
|
||||
@ -888,25 +907,25 @@ class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMi
|
||||
self.set_up_grid(num_clients=2, oneshare=True)
|
||||
c0 = self.g.clients[0]
|
||||
self.uris = {}
|
||||
DATA = "data" * 100
|
||||
d = c0.upload(upload.Data(DATA, convergence=""))
|
||||
DATA = b"data" * 100
|
||||
d = c0.upload(upload.Data(DATA, convergence=b""))
|
||||
def _stash_uri(ur, which):
|
||||
self.uris[which] = ur.get_uri()
|
||||
d.addCallback(_stash_uri, "one")
|
||||
d.addCallback(lambda ign:
|
||||
c0.upload(upload.Data(DATA+"1", convergence="")))
|
||||
c0.upload(upload.Data(DATA+b"1", convergence=b"")))
|
||||
d.addCallback(_stash_uri, "two")
|
||||
def _stash_mutable_uri(n, which):
|
||||
self.uris[which] = n.get_uri()
|
||||
assert isinstance(self.uris[which], str)
|
||||
assert isinstance(self.uris[which], bytes)
|
||||
d.addCallback(lambda ign:
|
||||
c0.create_mutable_file(publish.MutableData(DATA+"2")))
|
||||
c0.create_mutable_file(publish.MutableData(DATA+b"2")))
|
||||
d.addCallback(_stash_mutable_uri, "mutable")
|
||||
|
||||
def _compute_fileurls(ignored):
|
||||
self.fileurls = {}
|
||||
for which in self.uris:
|
||||
self.fileurls[which] = "uri/" + urllib.quote(self.uris[which])
|
||||
self.fileurls[which] = "uri/" + url_quote(self.uris[which])
|
||||
d.addCallback(_compute_fileurls)
|
||||
|
||||
d.addCallback(self._count_leases, "one")
|
||||
@ -982,25 +1001,25 @@ class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMi
|
||||
c0 = self.g.clients[0]
|
||||
self.uris = {}
|
||||
self.fileurls = {}
|
||||
DATA = "data" * 100
|
||||
DATA = b"data" * 100
|
||||
d = c0.create_dirnode()
|
||||
def _stash_root_and_create_file(n):
|
||||
self.rootnode = n
|
||||
self.uris["root"] = n.get_uri()
|
||||
self.fileurls["root"] = "uri/" + urllib.quote(n.get_uri())
|
||||
return n.add_file(u"one", upload.Data(DATA, convergence=""))
|
||||
self.fileurls["root"] = "uri/" + url_quote(n.get_uri())
|
||||
return n.add_file(u"one", upload.Data(DATA, convergence=b""))
|
||||
d.addCallback(_stash_root_and_create_file)
|
||||
def _stash_uri(fn, which):
|
||||
self.uris[which] = fn.get_uri()
|
||||
d.addCallback(_stash_uri, "one")
|
||||
d.addCallback(lambda ign:
|
||||
self.rootnode.add_file(u"small",
|
||||
upload.Data("literal",
|
||||
convergence="")))
|
||||
upload.Data(b"literal",
|
||||
convergence=b"")))
|
||||
d.addCallback(_stash_uri, "small")
|
||||
|
||||
d.addCallback(lambda ign:
|
||||
c0.create_mutable_file(publish.MutableData("mutable")))
|
||||
c0.create_mutable_file(publish.MutableData(b"mutable")))
|
||||
d.addCallback(lambda fn: self.rootnode.set_node(u"mutable", fn))
|
||||
d.addCallback(_stash_uri, "mutable")
|
||||
|
||||
@ -1051,36 +1070,36 @@ class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMi
|
||||
c0 = self.g.clients[0]
|
||||
c0.encoding_params['happy'] = 2
|
||||
self.fileurls = {}
|
||||
DATA = "data" * 100
|
||||
DATA = b"data" * 100
|
||||
d = c0.create_dirnode()
|
||||
def _stash_root(n):
|
||||
self.fileurls["root"] = "uri/" + urllib.quote(n.get_uri())
|
||||
self.fileurls["root"] = "uri/" + url_quote(n.get_uri())
|
||||
self.fileurls["imaginary"] = self.fileurls["root"] + "/imaginary"
|
||||
return n
|
||||
d.addCallback(_stash_root)
|
||||
d.addCallback(lambda ign: c0.upload(upload.Data(DATA, convergence="")))
|
||||
d.addCallback(lambda ign: c0.upload(upload.Data(DATA, convergence=b"")))
|
||||
def _stash_bad(ur):
|
||||
self.fileurls["1share"] = "uri/" + urllib.quote(ur.get_uri())
|
||||
self.delete_shares_numbered(ur.get_uri(), range(1,10))
|
||||
self.fileurls["1share"] = "uri/" + url_quote(ur.get_uri())
|
||||
self.delete_shares_numbered(ur.get_uri(), list(range(1,10)))
|
||||
|
||||
u = uri.from_string(ur.get_uri())
|
||||
u.key = testutil.flip_bit(u.key, 0)
|
||||
baduri = u.to_string()
|
||||
self.fileurls["0shares"] = "uri/" + urllib.quote(baduri)
|
||||
self.fileurls["0shares"] = "uri/" + url_quote(baduri)
|
||||
d.addCallback(_stash_bad)
|
||||
d.addCallback(lambda ign: c0.create_dirnode())
|
||||
def _mangle_dirnode_1share(n):
|
||||
u = n.get_uri()
|
||||
url = self.fileurls["dir-1share"] = "uri/" + urllib.quote(u)
|
||||
url = self.fileurls["dir-1share"] = "uri/" + url_quote(u)
|
||||
self.fileurls["dir-1share-json"] = url + "?t=json"
|
||||
self.delete_shares_numbered(u, range(1,10))
|
||||
self.delete_shares_numbered(u, list(range(1,10)))
|
||||
d.addCallback(_mangle_dirnode_1share)
|
||||
d.addCallback(lambda ign: c0.create_dirnode())
|
||||
def _mangle_dirnode_0share(n):
|
||||
u = n.get_uri()
|
||||
url = self.fileurls["dir-0share"] = "uri/" + urllib.quote(u)
|
||||
url = self.fileurls["dir-0share"] = "uri/" + url_quote(u)
|
||||
self.fileurls["dir-0share-json"] = url + "?t=json"
|
||||
self.delete_shares_numbered(u, range(0,10))
|
||||
self.delete_shares_numbered(u, list(range(0,10)))
|
||||
d.addCallback(_mangle_dirnode_0share)
|
||||
|
||||
# NotEnoughSharesError should be reported sensibly, with a
|
||||
@ -1092,6 +1111,7 @@ class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMi
|
||||
410, "Gone", "NoSharesError",
|
||||
self.GET, self.fileurls["0shares"]))
|
||||
def _check_zero_shares(body):
|
||||
body = str(body, "utf-8")
|
||||
self.failIfIn("<html>", body)
|
||||
body = " ".join(body.strip().split())
|
||||
exp = ("NoSharesError: no shares could be found. "
|
||||
@ -1100,7 +1120,7 @@ class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMi
|
||||
"severe corruption. You should perform a filecheck on "
|
||||
"this object to learn more. The full error message is: "
|
||||
"no shares (need 3). Last failure: None")
|
||||
self.failUnlessReallyEqual(exp, body)
|
||||
self.assertEqual(exp, body)
|
||||
d.addCallback(_check_zero_shares)
|
||||
|
||||
|
||||
@ -1109,6 +1129,7 @@ class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMi
|
||||
410, "Gone", "NotEnoughSharesError",
|
||||
self.GET, self.fileurls["1share"]))
|
||||
def _check_one_share(body):
|
||||
body = str(body, "utf-8")
|
||||
self.failIfIn("<html>", body)
|
||||
body = " ".join(body.strip().split())
|
||||
msgbase = ("NotEnoughSharesError: This indicates that some "
|
||||
@ -1133,10 +1154,11 @@ class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMi
|
||||
404, "Not Found", None,
|
||||
self.GET, self.fileurls["imaginary"]))
|
||||
def _missing_child(body):
|
||||
body = str(body, "utf-8")
|
||||
self.failUnlessIn("No such child: imaginary", body)
|
||||
d.addCallback(_missing_child)
|
||||
|
||||
d.addCallback(lambda ignored: self.GET(self.fileurls["dir-0share"]))
|
||||
d.addCallback(lambda ignored: self.GET_unicode(self.fileurls["dir-0share"]))
|
||||
def _check_0shares_dir_html(body):
|
||||
self.failUnlessIn(DIR_HTML_TAG, body)
|
||||
# we should see the regular page, but without the child table or
|
||||
@ -1155,7 +1177,7 @@ class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMi
|
||||
self.failUnlessIn("No upload forms: directory is unreadable", body)
|
||||
d.addCallback(_check_0shares_dir_html)
|
||||
|
||||
d.addCallback(lambda ignored: self.GET(self.fileurls["dir-1share"]))
|
||||
d.addCallback(lambda ignored: self.GET_unicode(self.fileurls["dir-1share"]))
|
||||
def _check_1shares_dir_html(body):
|
||||
# at some point, we'll split UnrecoverableFileError into 0-shares
|
||||
# and some-shares like we did for immutable files (since there
|
||||
@ -1182,6 +1204,7 @@ class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMi
|
||||
self.GET,
|
||||
self.fileurls["dir-0share-json"]))
|
||||
def _check_unrecoverable_file(body):
|
||||
body = str(body, "utf-8")
|
||||
self.failIfIn("<html>", body)
|
||||
body = " ".join(body.strip().split())
|
||||
exp = ("UnrecoverableFileError: the directory (or mutable file) "
|
||||
@ -1209,7 +1232,7 @@ class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMi
|
||||
# attach a webapi child that throws a random error, to test how it
|
||||
# gets rendered.
|
||||
w = c0.getServiceNamed("webish")
|
||||
w.root.putChild("ERRORBOOM", ErrorBoom())
|
||||
w.root.putChild(b"ERRORBOOM", ErrorBoom())
|
||||
|
||||
# "Accept: */*" : should get a text/html stack trace
|
||||
# "Accept: text/plain" : should get a text/plain stack trace
|
||||
@ -1222,6 +1245,7 @@ class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMi
|
||||
self.GET, "ERRORBOOM",
|
||||
headers={"accept": "*/*"}))
|
||||
def _internal_error_html1(body):
|
||||
body = str(body, "utf-8")
|
||||
self.failUnlessIn("<html>", "expected HTML, not '%s'" % body)
|
||||
d.addCallback(_internal_error_html1)
|
||||
|
||||
@ -1231,6 +1255,7 @@ class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMi
|
||||
self.GET, "ERRORBOOM",
|
||||
headers={"accept": "text/plain"}))
|
||||
def _internal_error_text2(body):
|
||||
body = str(body, "utf-8")
|
||||
self.failIfIn("<html>", body)
|
||||
self.failUnless(body.startswith("Traceback "), body)
|
||||
d.addCallback(_internal_error_text2)
|
||||
@ -1242,6 +1267,7 @@ class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMi
|
||||
self.GET, "ERRORBOOM",
|
||||
headers={"accept": CLI_accepts}))
|
||||
def _internal_error_text3(body):
|
||||
body = str(body, "utf-8")
|
||||
self.failIfIn("<html>", body)
|
||||
self.failUnless(body.startswith("Traceback "), body)
|
||||
d.addCallback(_internal_error_text3)
|
||||
@ -1251,7 +1277,7 @@ class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMi
|
||||
500, "Internal Server Error", None,
|
||||
self.GET, "ERRORBOOM"))
|
||||
def _internal_error_html4(body):
|
||||
self.failUnlessIn("<html>", body)
|
||||
self.failUnlessIn(b"<html>", body)
|
||||
d.addCallback(_internal_error_html4)
|
||||
|
||||
def _flush_errors(res):
|
||||
@ -1269,12 +1295,12 @@ class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMi
|
||||
c0 = self.g.clients[0]
|
||||
fn = c0.config.get_config_path("access.blacklist")
|
||||
self.uris = {}
|
||||
DATA = "off-limits " * 50
|
||||
DATA = b"off-limits " * 50
|
||||
|
||||
d = c0.upload(upload.Data(DATA, convergence=""))
|
||||
d = c0.upload(upload.Data(DATA, convergence=b""))
|
||||
def _stash_uri_and_create_dir(ur):
|
||||
self.uri = ur.get_uri()
|
||||
self.url = "uri/"+self.uri
|
||||
self.url = b"uri/"+self.uri
|
||||
u = uri.from_string_filenode(self.uri)
|
||||
self.si = u.get_storage_index()
|
||||
childnode = c0.create_node_from_uri(self.uri, None)
|
||||
@ -1283,9 +1309,9 @@ class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMi
|
||||
def _stash_dir(node):
|
||||
self.dir_node = node
|
||||
self.dir_uri = node.get_uri()
|
||||
self.dir_url = "uri/"+self.dir_uri
|
||||
self.dir_url = b"uri/"+self.dir_uri
|
||||
d.addCallback(_stash_dir)
|
||||
d.addCallback(lambda ign: self.GET(self.dir_url, followRedirect=True))
|
||||
d.addCallback(lambda ign: self.GET_unicode(self.dir_url, followRedirect=True))
|
||||
def _check_dir_html(body):
|
||||
self.failUnlessIn(DIR_HTML_TAG, body)
|
||||
self.failUnlessIn("blacklisted.txt</a>", body)
|
||||
@ -1298,7 +1324,7 @@ class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMi
|
||||
f.write(" # this is a comment\n")
|
||||
f.write(" \n")
|
||||
f.write("\n") # also exercise blank lines
|
||||
f.write("%s %s\n" % (base32.b2a(self.si), "off-limits to you"))
|
||||
f.write("%s off-limits to you\n" % (str(base32.b2a(self.si), "ascii"),))
|
||||
f.close()
|
||||
# clients should be checking the blacklist each time, so we don't
|
||||
# need to restart the client
|
||||
@ -1309,14 +1335,14 @@ class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMi
|
||||
self.GET, self.url))
|
||||
|
||||
# We should still be able to list the parent directory, in HTML...
|
||||
d.addCallback(lambda ign: self.GET(self.dir_url, followRedirect=True))
|
||||
d.addCallback(lambda ign: self.GET_unicode(self.dir_url, followRedirect=True))
|
||||
def _check_dir_html2(body):
|
||||
self.failUnlessIn(DIR_HTML_TAG, body)
|
||||
self.failUnlessIn("blacklisted.txt</strike>", body)
|
||||
d.addCallback(_check_dir_html2)
|
||||
|
||||
# ... and in JSON (used by CLI).
|
||||
d.addCallback(lambda ign: self.GET(self.dir_url+"?t=json", followRedirect=True))
|
||||
d.addCallback(lambda ign: self.GET(self.dir_url+b"?t=json", followRedirect=True))
|
||||
def _check_dir_json(res):
|
||||
data = json.loads(res)
|
||||
self.failUnless(isinstance(data, list), data)
|
||||
@ -1355,14 +1381,14 @@ class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMi
|
||||
d.addCallback(_add_dir)
|
||||
def _get_dircap(dn):
|
||||
self.dir_si_b32 = base32.b2a(dn.get_storage_index())
|
||||
self.dir_url_base = "uri/"+dn.get_write_uri()
|
||||
self.dir_url_json1 = "uri/"+dn.get_write_uri()+"?t=json"
|
||||
self.dir_url_json2 = "uri/"+dn.get_write_uri()+"?t=json"
|
||||
self.dir_url_json_ro = "uri/"+dn.get_readonly_uri()+"?t=json"
|
||||
self.child_url = "uri/"+dn.get_readonly_uri()+"/child"
|
||||
self.dir_url_base = b"uri/"+dn.get_write_uri()
|
||||
self.dir_url_json1 = b"uri/"+dn.get_write_uri()+b"?t=json"
|
||||
self.dir_url_json2 = b"uri/"+dn.get_write_uri()+b"?t=json"
|
||||
self.dir_url_json_ro = b"uri/"+dn.get_readonly_uri()+b"?t=json"
|
||||
self.child_url = b"uri/"+dn.get_readonly_uri()+b"/child"
|
||||
d.addCallback(_get_dircap)
|
||||
d.addCallback(lambda ign: self.GET(self.dir_url_base, followRedirect=True))
|
||||
d.addCallback(lambda body: self.failUnlessIn(DIR_HTML_TAG, body))
|
||||
d.addCallback(lambda body: self.failUnlessIn(DIR_HTML_TAG, str(body, "utf-8")))
|
||||
d.addCallback(lambda ign: self.GET(self.dir_url_json1))
|
||||
d.addCallback(lambda res: json.loads(res)) # just check it decodes
|
||||
d.addCallback(lambda ign: self.GET(self.dir_url_json2))
|
||||
@ -1373,8 +1399,8 @@ class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMi
|
||||
d.addCallback(lambda body: self.failUnlessEqual(DATA, body))
|
||||
|
||||
def _block_dir(ign):
|
||||
f = open(fn, "w")
|
||||
f.write("%s %s\n" % (self.dir_si_b32, "dir-off-limits to you"))
|
||||
f = open(fn, "wb")
|
||||
f.write(b"%s %s\n" % (self.dir_si_b32, b"dir-off-limits to you"))
|
||||
f.close()
|
||||
self.g.clients[0].blacklist.last_mtime -= 2.0
|
||||
d.addCallback(_block_dir)
|
||||
|
@ -746,7 +746,10 @@ class MultiFormatResourceTests(TrialTestCase):
|
||||
"<title>400 - Bad Format</title>", response_body,
|
||||
)
|
||||
self.assertIn(
|
||||
"Unknown t value: 'foo'", response_body,
|
||||
"Unknown t value:", response_body,
|
||||
)
|
||||
self.assertIn(
|
||||
"'foo'", response_body,
|
||||
)
|
||||
|
||||
|
||||
|
@ -34,6 +34,7 @@ PORTED_MODULES = [
|
||||
"allmydata.crypto.error",
|
||||
"allmydata.crypto.rsa",
|
||||
"allmydata.crypto.util",
|
||||
"allmydata.deep_stats",
|
||||
"allmydata.dirnode",
|
||||
"allmydata.frontends.ftpd",
|
||||
"allmydata.hashtree",
|
||||
@ -70,6 +71,7 @@ PORTED_MODULES = [
|
||||
"allmydata.mutable.servermap",
|
||||
"allmydata.node",
|
||||
"allmydata.nodemaker",
|
||||
"allmydata.stats",
|
||||
"allmydata.storage_client",
|
||||
"allmydata.storage.common",
|
||||
"allmydata.storage.crawler",
|
||||
@ -168,6 +170,7 @@ PORTED_TEST_MODULES = [
|
||||
"allmydata.test.test_repairer",
|
||||
"allmydata.test.test_spans",
|
||||
"allmydata.test.test_statistics",
|
||||
"allmydata.test.test_stats",
|
||||
"allmydata.test.test_storage",
|
||||
"allmydata.test.test_storage_client",
|
||||
"allmydata.test.test_storage_web",
|
||||
@ -182,6 +185,7 @@ PORTED_TEST_MODULES = [
|
||||
"allmydata.test.test_uri",
|
||||
"allmydata.test.test_util",
|
||||
"allmydata.test.web.test_common",
|
||||
"allmydata.test.web.test_grid",
|
||||
"allmydata.test.web.test_util",
|
||||
"allmydata.test.web.test_status",
|
||||
]
|
||||
|
@ -142,7 +142,9 @@ def a2b(cs):
|
||||
# Add padding back, to make Python's base64 module happy:
|
||||
while (len(cs) * 5) % 8 != 0:
|
||||
cs += b"="
|
||||
return base64.b32decode(cs)
|
||||
# Let newbytes come through and still work on Python 2, where the base64
|
||||
# module gets confused by them.
|
||||
return base64.b32decode(backwardscompat_bytes(cs))
|
||||
|
||||
|
||||
__all__ = ["b2a", "a2b", "b2a_or_none", "BASE32CHAR_3bits", "BASE32CHAR_1bits", "BASE32CHAR", "BASE32STR_anybytes", "could_be_base32_encoded"]
|
||||
|
@ -16,6 +16,9 @@ if PY2:
|
||||
import weakref
|
||||
from twisted.internet import defer
|
||||
from foolscap.api import eventually
|
||||
from twisted.logger import (
|
||||
Logger,
|
||||
)
|
||||
|
||||
"""The idiom we use is for the observed object to offer a method named
|
||||
'when_something', which returns a deferred. That deferred will be fired when
|
||||
@ -97,7 +100,10 @@ class LazyOneShotObserverList(OneShotObserverList):
|
||||
self._fire(self._get_result())
|
||||
|
||||
class ObserverList(object):
|
||||
"""A simple class to distribute events to a number of subscribers."""
|
||||
"""
|
||||
Immediately distribute events to a number of subscribers.
|
||||
"""
|
||||
_logger = Logger()
|
||||
|
||||
def __init__(self):
|
||||
self._watchers = []
|
||||
@ -109,8 +115,11 @@ class ObserverList(object):
|
||||
self._watchers.remove(observer)
|
||||
|
||||
def notify(self, *args, **kwargs):
|
||||
for o in self._watchers:
|
||||
eventually(o, *args, **kwargs)
|
||||
for o in self._watchers[:]:
|
||||
try:
|
||||
o(*args, **kwargs)
|
||||
except Exception:
|
||||
self._logger.failure("While notifying {o!r}", o=o)
|
||||
|
||||
class EventStreamObserver(object):
|
||||
"""A simple class to distribute multiple events to a single subscriber.
|
||||
|
@ -1,4 +1,5 @@
|
||||
from past.builtins import unicode
|
||||
from six import ensure_text, ensure_str
|
||||
|
||||
import time
|
||||
import json
|
||||
@ -99,17 +100,19 @@ def get_filenode_metadata(filenode):
|
||||
|
||||
def boolean_of_arg(arg):
|
||||
# TODO: ""
|
||||
arg = ensure_text(arg)
|
||||
if arg.lower() not in ("true", "t", "1", "false", "f", "0", "on", "off"):
|
||||
raise WebError("invalid boolean argument: %r" % (arg,), http.BAD_REQUEST)
|
||||
return arg.lower() in ("true", "t", "1", "on")
|
||||
|
||||
def parse_replace_arg(replace):
|
||||
replace = ensure_text(replace)
|
||||
if replace.lower() == "only-files":
|
||||
return replace
|
||||
try:
|
||||
return boolean_of_arg(replace)
|
||||
except WebError:
|
||||
raise WebError("invalid replace= argument: %r" % (replace,), http.BAD_REQUEST)
|
||||
raise WebError("invalid replace= argument: %r" % (ensure_str(replace),), http.BAD_REQUEST)
|
||||
|
||||
|
||||
def get_format(req, default="CHK"):
|
||||
@ -118,11 +121,11 @@ def get_format(req, default="CHK"):
|
||||
if boolean_of_arg(get_arg(req, "mutable", "false")):
|
||||
return "SDMF"
|
||||
return default
|
||||
if arg.upper() == "CHK":
|
||||
if arg.upper() == b"CHK":
|
||||
return "CHK"
|
||||
elif arg.upper() == "SDMF":
|
||||
elif arg.upper() == b"SDMF":
|
||||
return "SDMF"
|
||||
elif arg.upper() == "MDMF":
|
||||
elif arg.upper() == b"MDMF":
|
||||
return "MDMF"
|
||||
else:
|
||||
raise WebError("Unknown format: %s, I know CHK, SDMF, MDMF" % arg,
|
||||
|
@ -4,6 +4,8 @@ Common utilities that are available from Python 3.
|
||||
Can eventually be merged back into allmydata.web.common.
|
||||
"""
|
||||
|
||||
from past.builtins import unicode
|
||||
|
||||
from twisted.web import resource, http
|
||||
|
||||
from allmydata.util import abbreviate
|
||||
@ -23,7 +25,13 @@ def get_arg(req, argname, default=None, multiple=False):
|
||||
empty), starting with all those in the query args.
|
||||
|
||||
:param TahoeLAFSRequest req: The request to consider.
|
||||
|
||||
:return: Either bytes or tuple of bytes.
|
||||
"""
|
||||
if isinstance(argname, unicode):
|
||||
argname = argname.encode("utf-8")
|
||||
if isinstance(default, unicode):
|
||||
default = default.encode("utf-8")
|
||||
results = []
|
||||
if argname in req.args:
|
||||
results.extend(req.args[argname])
|
||||
@ -62,6 +70,9 @@ class MultiFormatResource(resource.Resource, object):
|
||||
:return: The result of the selected renderer.
|
||||
"""
|
||||
t = get_arg(req, self.formatArgument, self.formatDefault)
|
||||
# It's either bytes or None.
|
||||
if isinstance(t, bytes):
|
||||
t = unicode(t, "ascii")
|
||||
renderer = self._get_renderer(t)
|
||||
return renderer(req)
|
||||
|
||||
|
@ -1,6 +1,6 @@
|
||||
from past.builtins import unicode
|
||||
|
||||
import json
|
||||
import urllib
|
||||
from urllib.parse import quote as url_quote
|
||||
from datetime import timedelta
|
||||
|
||||
from zope.interface import implementer
|
||||
@ -20,7 +20,7 @@ from twisted.web.template import (
|
||||
from hyperlink import URL
|
||||
from twisted.python.filepath import FilePath
|
||||
|
||||
from allmydata.util import base32
|
||||
from allmydata.util import base32, jsonbytes as json
|
||||
from allmydata.util.encodingutil import (
|
||||
to_bytes,
|
||||
quote_output,
|
||||
@ -109,7 +109,7 @@ class DirectoryNodeHandler(ReplaceMeMixin, Resource, object):
|
||||
# or no further children) renders "this" page. We also need
|
||||
# to reject "/uri/URI:DIR2:..//", so we look at postpath.
|
||||
name = name.decode('utf8')
|
||||
if not name and req.postpath != ['']:
|
||||
if not name and req.postpath != [b'']:
|
||||
return self
|
||||
|
||||
# Rejecting URIs that contain empty path pieces (for example:
|
||||
@ -135,7 +135,7 @@ class DirectoryNodeHandler(ReplaceMeMixin, Resource, object):
|
||||
terminal = (req.prepath + req.postpath)[-1].decode('utf8') == name
|
||||
nonterminal = not terminal #len(req.postpath) > 0
|
||||
|
||||
t = get_arg(req, "t", "").strip()
|
||||
t = get_arg(req, b"t", b"").strip()
|
||||
if isinstance(node_or_failure, Failure):
|
||||
f = node_or_failure
|
||||
f.trap(NoSuchChildError)
|
||||
@ -217,7 +217,7 @@ class DirectoryNodeHandler(ReplaceMeMixin, Resource, object):
|
||||
@render_exception
|
||||
def render_GET(self, req):
|
||||
# This is where all of the directory-related ?t=* code goes.
|
||||
t = get_arg(req, "t", "").strip()
|
||||
t = unicode(get_arg(req, b"t", b"").strip(), "ascii")
|
||||
|
||||
# t=info contains variable ophandles, t=rename-form contains the name
|
||||
# of the child being renamed. Neither is allowed an ETag.
|
||||
@ -225,7 +225,7 @@ class DirectoryNodeHandler(ReplaceMeMixin, Resource, object):
|
||||
if not self.node.is_mutable() and t in FIXED_OUTPUT_TYPES:
|
||||
si = self.node.get_storage_index()
|
||||
if si and req.setETag('DIR:%s-%s' % (base32.b2a(si), t or "")):
|
||||
return ""
|
||||
return b""
|
||||
|
||||
if not t:
|
||||
# render the directory as HTML
|
||||
@ -255,7 +255,7 @@ class DirectoryNodeHandler(ReplaceMeMixin, Resource, object):
|
||||
|
||||
@render_exception
|
||||
def render_PUT(self, req):
|
||||
t = get_arg(req, "t", "").strip()
|
||||
t = get_arg(req, b"t", b"").strip()
|
||||
replace = parse_replace_arg(get_arg(req, "replace", "true"))
|
||||
|
||||
if t == "mkdir":
|
||||
@ -275,7 +275,7 @@ class DirectoryNodeHandler(ReplaceMeMixin, Resource, object):
|
||||
|
||||
@render_exception
|
||||
def render_POST(self, req):
|
||||
t = get_arg(req, "t", "").strip()
|
||||
t = unicode(get_arg(req, b"t", b"").strip(), "ascii")
|
||||
|
||||
if t == "mkdir":
|
||||
d = self._POST_mkdir(req)
|
||||
@ -732,7 +732,7 @@ class DirectoryAsHTML(Element):
|
||||
return ""
|
||||
rocap = self.node.get_readonly_uri()
|
||||
root = get_root(req)
|
||||
uri_link = "%s/uri/%s/" % (root, urllib.quote(rocap))
|
||||
uri_link = "%s/uri/%s/" % (root, url_quote(rocap))
|
||||
return tag(tags.a("Read-Only Version", href=uri_link))
|
||||
|
||||
@renderer
|
||||
@ -754,10 +754,10 @@ class DirectoryAsHTML(Element):
|
||||
called by the 'children' renderer)
|
||||
"""
|
||||
name = name.encode("utf-8")
|
||||
nameurl = urllib.quote(name, safe="") # encode any slashes too
|
||||
nameurl = url_quote(name, safe="") # encode any slashes too
|
||||
|
||||
root = get_root(req)
|
||||
here = "{}/uri/{}/".format(root, urllib.quote(self.node.get_uri()))
|
||||
here = "{}/uri/{}/".format(root, url_quote(self.node.get_uri()))
|
||||
if self.node.is_unknown() or self.node.is_readonly():
|
||||
unlink = "-"
|
||||
rename = "-"
|
||||
@ -814,7 +814,7 @@ class DirectoryAsHTML(Element):
|
||||
|
||||
assert IFilesystemNode.providedBy(target), target
|
||||
target_uri = target.get_uri() or ""
|
||||
quoted_uri = urllib.quote(target_uri, safe="") # escape slashes too
|
||||
quoted_uri = url_quote(target_uri, safe="") # escape slashes too
|
||||
|
||||
if IMutableFileNode.providedBy(target):
|
||||
# to prevent javascript in displayed .html files from stealing a
|
||||
@ -835,7 +835,7 @@ class DirectoryAsHTML(Element):
|
||||
|
||||
elif IDirectoryNode.providedBy(target):
|
||||
# directory
|
||||
uri_link = "%s/uri/%s/" % (root, urllib.quote(target_uri))
|
||||
uri_link = "%s/uri/%s/" % (root, url_quote(target_uri))
|
||||
slots["filename"] = tags.a(name, href=uri_link)
|
||||
if not target.is_mutable():
|
||||
dirtype = "DIR-IMM"
|
||||
@ -871,7 +871,7 @@ class DirectoryAsHTML(Element):
|
||||
slots["size"] = "-"
|
||||
# use a directory-relative info link, so we can extract both the
|
||||
# writecap and the readcap
|
||||
info_link = "%s?t=info" % urllib.quote(name)
|
||||
info_link = "%s?t=info" % url_quote(name)
|
||||
|
||||
if info_link:
|
||||
slots["info"] = tags.a("More Info", href=info_link)
|
||||
@ -888,7 +888,7 @@ class DirectoryAsHTML(Element):
|
||||
# because action="." doesn't get us back to the dir page (but
|
||||
# instead /uri itself)
|
||||
root = get_root(req)
|
||||
here = "{}/uri/{}/".format(root, urllib.quote(self.node.get_uri()))
|
||||
here = "{}/uri/{}/".format(root, url_quote(self.node.get_uri()))
|
||||
|
||||
if self.node.is_readonly():
|
||||
return tags.div("No upload forms: directory is read-only")
|
||||
@ -1005,7 +1005,7 @@ def _directory_json_metadata(req, dirnode):
|
||||
d = dirnode.list()
|
||||
def _got(children):
|
||||
kids = {}
|
||||
for name, (childnode, metadata) in children.iteritems():
|
||||
for name, (childnode, metadata) in children.items():
|
||||
assert IFilesystemNode.providedBy(childnode), childnode
|
||||
rw_uri = childnode.get_write_uri()
|
||||
ro_uri = childnode.get_readonly_uri()
|
||||
@ -1166,13 +1166,13 @@ def _cap_to_link(root, path, cap):
|
||||
if isinstance(cap_obj, (CHKFileURI, WriteableSSKFileURI, ReadonlySSKFileURI)):
|
||||
uri_link = root_url.child(
|
||||
u"file",
|
||||
u"{}".format(urllib.quote(cap)),
|
||||
u"{}".format(urllib.quote(path[-1])),
|
||||
u"{}".format(url_quote(cap)),
|
||||
u"{}".format(url_quote(path[-1])),
|
||||
)
|
||||
else:
|
||||
uri_link = root_url.child(
|
||||
u"uri",
|
||||
u"{}".format(urllib.quote(cap, safe="")),
|
||||
u"{}".format(url_quote(cap, safe="")),
|
||||
)
|
||||
return tags.a(cap, href=uri_link.to_text())
|
||||
else:
|
||||
@ -1363,7 +1363,7 @@ class ManifestStreamer(dirnode.DeepStats):
|
||||
|
||||
j = json.dumps(d, ensure_ascii=True)
|
||||
assert "\n" not in j
|
||||
self.req.write(j+"\n")
|
||||
self.req.write(j.encode("utf-8")+b"\n")
|
||||
|
||||
def finish(self):
|
||||
stats = dirnode.DeepStats.get_results(self)
|
||||
@ -1372,8 +1372,8 @@ class ManifestStreamer(dirnode.DeepStats):
|
||||
}
|
||||
j = json.dumps(d, ensure_ascii=True)
|
||||
assert "\n" not in j
|
||||
self.req.write(j+"\n")
|
||||
return ""
|
||||
self.req.write(j.encode("utf-8")+b"\n")
|
||||
return b""
|
||||
|
||||
@implementer(IPushProducer)
|
||||
class DeepCheckStreamer(dirnode.DeepStats):
|
||||
@ -1441,7 +1441,7 @@ class DeepCheckStreamer(dirnode.DeepStats):
|
||||
def write_line(self, data):
|
||||
j = json.dumps(data, ensure_ascii=True)
|
||||
assert "\n" not in j
|
||||
self.req.write(j+"\n")
|
||||
self.req.write(j.encode("utf-8")+b"\n")
|
||||
|
||||
def finish(self):
|
||||
stats = dirnode.DeepStats.get_results(self)
|
||||
@ -1450,8 +1450,8 @@ class DeepCheckStreamer(dirnode.DeepStats):
|
||||
}
|
||||
j = json.dumps(d, ensure_ascii=True)
|
||||
assert "\n" not in j
|
||||
self.req.write(j+"\n")
|
||||
return ""
|
||||
self.req.write(j.encode("utf-8")+b"\n")
|
||||
return b""
|
||||
|
||||
|
||||
class UnknownNodeHandler(Resource, object):
|
||||
@ -1464,7 +1464,7 @@ class UnknownNodeHandler(Resource, object):
|
||||
|
||||
@render_exception
|
||||
def render_GET(self, req):
|
||||
t = get_arg(req, "t", "").strip()
|
||||
t = unicode(get_arg(req, "t", "").strip(), "ascii")
|
||||
if t == "info":
|
||||
return MoreInfo(self.node)
|
||||
if t == "json":
|
||||
|
@ -1,5 +1,4 @@
|
||||
|
||||
import json
|
||||
from past.builtins import unicode, long
|
||||
|
||||
from twisted.web import http, static
|
||||
from twisted.internet import defer
|
||||
@ -41,6 +40,8 @@ from allmydata.web.check_results import (
|
||||
LiteralCheckResultsRenderer,
|
||||
)
|
||||
from allmydata.web.info import MoreInfo
|
||||
from allmydata.util import jsonbytes as json
|
||||
|
||||
|
||||
class ReplaceMeMixin(object):
|
||||
def replace_me_with_a_child(self, req, client, replace):
|
||||
@ -117,7 +118,7 @@ class PlaceHolderNodeHandler(Resource, ReplaceMeMixin):
|
||||
|
||||
@render_exception
|
||||
def render_PUT(self, req):
|
||||
t = get_arg(req, "t", "").strip()
|
||||
t = get_arg(req, b"t", b"").strip()
|
||||
replace = parse_replace_arg(get_arg(req, "replace", "true"))
|
||||
|
||||
assert self.parentnode and self.name
|
||||
@ -133,9 +134,9 @@ class PlaceHolderNodeHandler(Resource, ReplaceMeMixin):
|
||||
|
||||
@render_exception
|
||||
def render_POST(self, req):
|
||||
t = get_arg(req, "t", "").strip()
|
||||
replace = boolean_of_arg(get_arg(req, "replace", "true"))
|
||||
if t == "upload":
|
||||
t = get_arg(req, b"t", b"").strip()
|
||||
replace = boolean_of_arg(get_arg(req, b"replace", b"true"))
|
||||
if t == b"upload":
|
||||
# like PUT, but get the file data from an HTML form's input field.
|
||||
# We could get here from POST /uri/mutablefilecap?t=upload,
|
||||
# or POST /uri/path/file?t=upload, or
|
||||
@ -179,7 +180,7 @@ class FileNodeHandler(Resource, ReplaceMeMixin, object):
|
||||
|
||||
@render_exception
|
||||
def render_GET(self, req):
|
||||
t = get_arg(req, "t", "").strip()
|
||||
t = unicode(get_arg(req, b"t", b"").strip(), "ascii")
|
||||
|
||||
# t=info contains variable ophandles, so is not allowed an ETag.
|
||||
FIXED_OUTPUT_TYPES = ["", "json", "uri", "readonly-uri"]
|
||||
@ -237,19 +238,19 @@ class FileNodeHandler(Resource, ReplaceMeMixin, object):
|
||||
|
||||
@render_exception
|
||||
def render_HEAD(self, req):
|
||||
t = get_arg(req, "t", "").strip()
|
||||
t = get_arg(req, b"t", b"").strip()
|
||||
if t:
|
||||
raise WebError("HEAD file: bad t=%s" % t)
|
||||
filename = get_arg(req, "filename", self.name) or "unknown"
|
||||
filename = get_arg(req, b"filename", self.name) or "unknown"
|
||||
d = self.node.get_best_readable_version()
|
||||
d.addCallback(lambda dn: FileDownloader(dn, filename))
|
||||
return d
|
||||
|
||||
@render_exception
|
||||
def render_PUT(self, req):
|
||||
t = get_arg(req, "t", "").strip()
|
||||
replace = parse_replace_arg(get_arg(req, "replace", "true"))
|
||||
offset = parse_offset_arg(get_arg(req, "offset", None))
|
||||
t = get_arg(req, b"t", b"").strip()
|
||||
replace = parse_replace_arg(get_arg(req, b"replace", b"true"))
|
||||
offset = parse_offset_arg(get_arg(req, b"offset", None))
|
||||
|
||||
if not t:
|
||||
if not replace:
|
||||
@ -290,11 +291,11 @@ class FileNodeHandler(Resource, ReplaceMeMixin, object):
|
||||
|
||||
@render_exception
|
||||
def render_POST(self, req):
|
||||
t = get_arg(req, "t", "").strip()
|
||||
replace = boolean_of_arg(get_arg(req, "replace", "true"))
|
||||
if t == "check":
|
||||
t = get_arg(req, b"t", b"").strip()
|
||||
replace = boolean_of_arg(get_arg(req, b"replace", b"true"))
|
||||
if t == b"check":
|
||||
d = self._POST_check(req)
|
||||
elif t == "upload":
|
||||
elif t == b"upload":
|
||||
# like PUT, but get the file data from an HTML form's input field
|
||||
# We could get here from POST /uri/mutablefilecap?t=upload,
|
||||
# or POST /uri/path/file?t=upload, or
|
||||
|
@ -5,8 +5,7 @@ from twisted.web.template import Element, XMLFile, renderElement, renderer
|
||||
from twisted.python.filepath import FilePath
|
||||
from twisted.web import static
|
||||
import allmydata
|
||||
import json
|
||||
from allmydata.util import idlib
|
||||
from allmydata.util import idlib, jsonbytes as json
|
||||
from allmydata.web.common import (
|
||||
render_time,
|
||||
MultiFormatResource,
|
||||
|
@ -1,6 +1,5 @@
|
||||
import os
|
||||
import time
|
||||
import json
|
||||
import urllib
|
||||
|
||||
from hyperlink import DecodedURL, URL
|
||||
@ -21,7 +20,7 @@ from twisted.web.template import (
|
||||
)
|
||||
|
||||
import allmydata # to display import path
|
||||
from allmydata.util import log
|
||||
from allmydata.util import log, jsonbytes as json
|
||||
from allmydata.interfaces import IFileNode
|
||||
from allmydata.web import (
|
||||
filenode,
|
||||
@ -158,7 +157,9 @@ class URIHandler(resource.Resource, object):
|
||||
try:
|
||||
node = self.client.create_node_from_uri(name)
|
||||
return directory.make_handler_for(node, self.client)
|
||||
except (TypeError, AssertionError):
|
||||
except (TypeError, AssertionError) as e:
|
||||
log.msg(format="Failed to parse cap, perhaps due to bug: %(e)s",
|
||||
e=e, level=log.WEIRD)
|
||||
raise WebError(
|
||||
"'{}' is not a valid file- or directory- cap".format(name)
|
||||
)
|
||||
@ -226,7 +227,10 @@ class Root(MultiFormatResource):
|
||||
self._client = client
|
||||
self._now_fn = now_fn
|
||||
|
||||
self.putChild("uri", URIHandler(client))
|
||||
# Children need to be bytes; for now just doing these to make specific
|
||||
# tests pass on Python 3, but eventually will do all them when this
|
||||
# module is ported to Python 3 (if not earlier).
|
||||
self.putChild(b"uri", URIHandler(client))
|
||||
self.putChild("cap", URIHandler(client))
|
||||
|
||||
# Handler for everything beneath "/private", an area of the resource
|
||||
|
@ -3,7 +3,6 @@ from past.builtins import long, unicode
|
||||
import pprint
|
||||
import itertools
|
||||
import hashlib
|
||||
import json
|
||||
from twisted.internet import defer
|
||||
from twisted.python.filepath import FilePath
|
||||
from twisted.web.resource import Resource
|
||||
@ -14,7 +13,7 @@ from twisted.web.template import (
|
||||
renderElement,
|
||||
tags,
|
||||
)
|
||||
from allmydata.util import base32, idlib
|
||||
from allmydata.util import base32, idlib, jsonbytes as json
|
||||
from allmydata.web.common import (
|
||||
abbreviate_time,
|
||||
abbreviate_rate,
|
||||
|
@ -1,6 +1,6 @@
|
||||
from future.utils import PY2
|
||||
|
||||
import time, json
|
||||
import time
|
||||
from twisted.python.filepath import FilePath
|
||||
from twisted.web.template import (
|
||||
Element,
|
||||
@ -14,7 +14,7 @@ from allmydata.web.common_py3 import (
|
||||
MultiFormatResource
|
||||
)
|
||||
from allmydata.util.abbreviate import abbreviate_space
|
||||
from allmydata.util import time_format, idlib
|
||||
from allmydata.util import time_format, idlib, jsonbytes as json
|
||||
|
||||
|
||||
def remove_prefix(s, prefix):
|
||||
|
@ -128,7 +128,7 @@ def _logFormatter(logDateTime, request):
|
||||
# sure we censor these too.
|
||||
if queryargs.startswith(b"uri="):
|
||||
queryargs = b"uri=[CENSORED]"
|
||||
queryargs = "?" + queryargs
|
||||
queryargs = b"?" + queryargs
|
||||
if path.startswith(b"/uri/"):
|
||||
path = b"/uri/[CENSORED]"
|
||||
elif path.startswith(b"/file/"):
|
||||
|
Loading…
x
Reference in New Issue
Block a user