Merge branch 'master' into 3394.py38-tests

This commit is contained in:
Jason R. Coombs 2020-09-09 21:42:32 -04:00
commit d74d7e733c
40 changed files with 778 additions and 704 deletions

View File

@ -211,7 +211,8 @@ jobs:
environment: environment:
<<: *UTF_8_ENVIRONMENT <<: *UTF_8_ENVIRONMENT
TAHOE_LAFS_TOX_ENVIRONMENT: "pypy27-coverage" # We don't do coverage since it makes PyPy far too slow:
TAHOE_LAFS_TOX_ENVIRONMENT: "pypy27"
c-locale: c-locale:

View File

@ -68,6 +68,10 @@ export SUBUNITREPORTER_OUTPUT_PATH="${SUBUNIT2}"
export TAHOE_LAFS_TRIAL_ARGS="${TAHOE_LAFS_TRIAL_ARGS:---reporter=subunitv2-file --rterrors}" export TAHOE_LAFS_TRIAL_ARGS="${TAHOE_LAFS_TRIAL_ARGS:---reporter=subunitv2-file --rterrors}"
export PIP_NO_INDEX="1" export PIP_NO_INDEX="1"
# Make output unbuffered, so progress reports from subunitv2-file get streamed
# and notify CircleCI we're still alive.
export PYTHONUNBUFFERED=1
if [ "${ALLOWED_FAILURE}" = "yes" ]; then if [ "${ALLOWED_FAILURE}" = "yes" ]; then
alternative="true" alternative="true"
else else

1
.gitignore vendored
View File

@ -9,6 +9,7 @@ venv*
*~ *~
*.DS_Store *.DS_Store
.*.kate-swp .*.kate-swp
*.bak
/build/ /build/
/support/ /support/

0
newsfragments/3374.minor Normal file
View File

0
newsfragments/3392.minor Normal file
View File

0
newsfragments/3393.minor Normal file
View File

0
newsfragments/3396.minor Normal file
View File

0
newsfragments/3397.minor Normal file
View File

0
newsfragments/3401.minor Normal file
View File

0
newsfragments/3403.minor Normal file
View File

View File

@ -116,6 +116,11 @@ install_requires = [
# know works on Python 2.7. # know works on Python 2.7.
"eliot ~= 1.7", "eliot ~= 1.7",
# Pyrsistent 0.17.0 (which we use by way of Eliot) has dropped
# Python 2 entirely; stick to the version known to work for us.
# XXX: drop this bound: https://tahoe-lafs.org/trac/tahoe-lafs/ticket/3404
"pyrsistent < 0.17.0",
# A great way to define types of values. # A great way to define types of values.
# XXX: drop the upper bound: https://tahoe-lafs.org/trac/tahoe-lafs/ticket/3390 # XXX: drop the upper bound: https://tahoe-lafs.org/trac/tahoe-lafs/ticket/3390
"attrs >= 18.2.0, < 20", "attrs >= 18.2.0, < 20",

View File

@ -2,7 +2,10 @@ import os, stat, time, weakref
from base64 import urlsafe_b64encode from base64 import urlsafe_b64encode
from functools import partial from functools import partial
from errno import ENOENT, EPERM from errno import ENOENT, EPERM
from ConfigParser import NoSectionError try:
from ConfigParser import NoSectionError
except ImportError:
from configparser import NoSectionError
from foolscap.furl import ( from foolscap.furl import (
decode_furl, decode_furl,

View File

@ -1,4 +1,16 @@
# -*- test-case-name: allmydata.test.test_encode_share -*- """
CRS encoding and decoding.
Ported to Python 3.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from future.utils import PY2
if PY2:
from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
from zope.interface import implementer from zope.interface import implementer
from twisted.internet import defer from twisted.internet import defer
@ -9,7 +21,7 @@ import zfec
@implementer(ICodecEncoder) @implementer(ICodecEncoder)
class CRSEncoder(object): class CRSEncoder(object):
ENCODER_TYPE = "crs" ENCODER_TYPE = b"crs"
def set_params(self, data_size, required_shares, max_shares): def set_params(self, data_size, required_shares, max_shares):
assert required_shares <= max_shares assert required_shares <= max_shares
@ -27,7 +39,7 @@ class CRSEncoder(object):
return (self.data_size, self.required_shares, self.max_shares) return (self.data_size, self.required_shares, self.max_shares)
def get_serialized_params(self): def get_serialized_params(self):
return "%d-%d-%d" % (self.data_size, self.required_shares, return b"%d-%d-%d" % (self.data_size, self.required_shares,
self.max_shares) self.max_shares)
def get_block_size(self): def get_block_size(self):
@ -37,7 +49,7 @@ class CRSEncoder(object):
precondition(desired_share_ids is None or len(desired_share_ids) <= self.max_shares, desired_share_ids, self.max_shares) precondition(desired_share_ids is None or len(desired_share_ids) <= self.max_shares, desired_share_ids, self.max_shares)
if desired_share_ids is None: if desired_share_ids is None:
desired_share_ids = range(self.max_shares) desired_share_ids = list(range(self.max_shares))
for inshare in inshares: for inshare in inshares:
assert len(inshare) == self.share_size, (len(inshare), self.share_size, self.data_size, self.required_shares) assert len(inshare) == self.share_size, (len(inshare), self.share_size, self.data_size, self.required_shares)
@ -71,5 +83,5 @@ class CRSDecoder(object):
return defer.succeed(data) return defer.succeed(data)
def parse_params(serializedparams): def parse_params(serializedparams):
pieces = serializedparams.split("-") pieces = serializedparams.split(b"-")
return int(pieces[0]), int(pieces[1]), int(pieces[2]) return int(pieces[0]), int(pieces[1]), int(pieces[2])

View File

@ -1,4 +1,6 @@
"""Directory Node implementation.""" """Directory Node implementation."""
from past.builtins import unicode
import time import time
from zope.interface import implementer from zope.interface import implementer
@ -227,7 +229,7 @@ def pack_children(childrenx, writekey, deep_immutable=False):
return _pack_normalized_children(children, writekey=writekey, deep_immutable=deep_immutable) return _pack_normalized_children(children, writekey=writekey, deep_immutable=deep_immutable)
ZERO_LEN_NETSTR=netstring('') ZERO_LEN_NETSTR=netstring(b'')
def _pack_normalized_children(children, writekey, deep_immutable=False): def _pack_normalized_children(children, writekey, deep_immutable=False):
"""Take a dict that maps: """Take a dict that maps:
children[unicode_nfc_name] = (IFileSystemNode, metadata_dict) children[unicode_nfc_name] = (IFileSystemNode, metadata_dict)

View File

@ -4,7 +4,7 @@ from foolscap.api import eventually
from allmydata.interfaces import NotEnoughSharesError, NoSharesError from allmydata.interfaces import NotEnoughSharesError, NoSharesError
from allmydata.util import log from allmydata.util import log
from allmydata.util.dictutil import DictOfSets from allmydata.util.dictutil import DictOfSets
from common import OVERDUE, COMPLETE, CORRUPT, DEAD, BADSEGNUM, \ from .common import OVERDUE, COMPLETE, CORRUPT, DEAD, BADSEGNUM, \
BadSegmentNumberError BadSegmentNumberError
class SegmentFetcher(object): class SegmentFetcher(object):

View File

@ -5,7 +5,7 @@ from foolscap.api import eventually
from allmydata.util import base32, log from allmydata.util import base32, log
from twisted.internet import reactor from twisted.internet import reactor
from share import Share, CommonShare from .share import Share, CommonShare
def incidentally(res, f, *args, **kwargs): def incidentally(res, f, *args, **kwargs):
"""Add me to a Deferred chain like this: """Add me to a Deferred chain like this:

View File

@ -13,10 +13,10 @@ from allmydata.hashtree import IncompleteHashTree, BadHashError, \
NotEnoughHashesError NotEnoughHashesError
# local imports # local imports
from finder import ShareFinder from .finder import ShareFinder
from fetcher import SegmentFetcher from .fetcher import SegmentFetcher
from segmentation import Segmentation from .segmentation import Segmentation
from common import BadCiphertextHashError from .common import BadCiphertextHashError
class IDownloadStatusHandlingConsumer(Interface): class IDownloadStatusHandlingConsumer(Interface):
def set_download_status_read_event(read_ev): def set_download_status_read_event(read_ev):

View File

@ -9,7 +9,7 @@ from allmydata.util import log
from allmydata.util.spans import overlap from allmydata.util.spans import overlap
from allmydata.interfaces import DownloadStopped from allmydata.interfaces import DownloadStopped
from common import BadSegmentNumberError, WrongSegmentError from .common import BadSegmentNumberError, WrongSegmentError
@implementer(IPushProducer) @implementer(IPushProducer)
class Segmentation(object): class Segmentation(object):

View File

@ -13,7 +13,7 @@ from allmydata.hashtree import IncompleteHashTree, BadHashError, \
from allmydata.immutable.layout import make_write_bucket_proxy from allmydata.immutable.layout import make_write_bucket_proxy
from allmydata.util.observer import EventStreamObserver from allmydata.util.observer import EventStreamObserver
from common import COMPLETE, CORRUPT, DEAD, BADSEGNUM from .common import COMPLETE, CORRUPT, DEAD, BADSEGNUM
class LayoutInvalid(Exception): class LayoutInvalid(Exception):

View File

@ -171,7 +171,7 @@ class WriteBucketProxy(object):
def put_block(self, segmentnum, data): def put_block(self, segmentnum, data):
offset = self._offsets['data'] + segmentnum * self._block_size offset = self._offsets['data'] + segmentnum * self._block_size
assert offset + len(data) <= self._offsets['uri_extension'] assert offset + len(data) <= self._offsets['uri_extension']
assert isinstance(data, str) assert isinstance(data, bytes)
if segmentnum < self._num_segments-1: if segmentnum < self._num_segments-1:
precondition(len(data) == self._block_size, precondition(len(data) == self._block_size,
len(data), self._block_size) len(data), self._block_size)
@ -185,7 +185,7 @@ class WriteBucketProxy(object):
def put_crypttext_hashes(self, hashes): def put_crypttext_hashes(self, hashes):
offset = self._offsets['crypttext_hash_tree'] offset = self._offsets['crypttext_hash_tree']
assert isinstance(hashes, list) assert isinstance(hashes, list)
data = "".join(hashes) data = b"".join(hashes)
precondition(len(data) == self._segment_hash_size, precondition(len(data) == self._segment_hash_size,
len(data), self._segment_hash_size) len(data), self._segment_hash_size)
precondition(offset + len(data) <= self._offsets['block_hashes'], precondition(offset + len(data) <= self._offsets['block_hashes'],
@ -196,7 +196,7 @@ class WriteBucketProxy(object):
def put_block_hashes(self, blockhashes): def put_block_hashes(self, blockhashes):
offset = self._offsets['block_hashes'] offset = self._offsets['block_hashes']
assert isinstance(blockhashes, list) assert isinstance(blockhashes, list)
data = "".join(blockhashes) data = b"".join(blockhashes)
precondition(len(data) == self._segment_hash_size, precondition(len(data) == self._segment_hash_size,
len(data), self._segment_hash_size) len(data), self._segment_hash_size)
precondition(offset + len(data) <= self._offsets['share_hashes'], precondition(offset + len(data) <= self._offsets['share_hashes'],
@ -209,7 +209,7 @@ class WriteBucketProxy(object):
# as 2+32=34 bytes each # as 2+32=34 bytes each
offset = self._offsets['share_hashes'] offset = self._offsets['share_hashes']
assert isinstance(sharehashes, list) assert isinstance(sharehashes, list)
data = "".join([struct.pack(">H", hashnum) + hashvalue data = b"".join([struct.pack(">H", hashnum) + hashvalue
for hashnum,hashvalue in sharehashes]) for hashnum,hashvalue in sharehashes])
precondition(len(data) == self._share_hashtree_size, precondition(len(data) == self._share_hashtree_size,
len(data), self._share_hashtree_size) len(data), self._share_hashtree_size)
@ -220,7 +220,7 @@ class WriteBucketProxy(object):
def put_uri_extension(self, data): def put_uri_extension(self, data):
offset = self._offsets['uri_extension'] offset = self._offsets['uri_extension']
assert isinstance(data, str) assert isinstance(data, bytes)
precondition(len(data) <= self._uri_extension_size_max, precondition(len(data) <= self._uri_extension_size_max,
len(data), self._uri_extension_size_max) len(data), self._uri_extension_size_max)
length = struct.pack(self.fieldstruct, len(data)) length = struct.pack(self.fieldstruct, len(data))

View File

@ -1,3 +1,5 @@
from past.builtins import long
import os, time, weakref, itertools import os, time, weakref, itertools
from zope.interface import implementer from zope.interface import implementer
from twisted.python import failure from twisted.python import failure
@ -26,7 +28,7 @@ from allmydata.interfaces import IUploadable, IUploader, IUploadResults, \
from allmydata.immutable import layout from allmydata.immutable import layout
from six.moves import cStringIO as StringIO from six.moves import cStringIO as StringIO
from happiness_upload import share_placement, calculate_happiness from .happiness_upload import share_placement, calculate_happiness
from ..util.eliotutil import ( from ..util.eliotutil import (
log_call_deferred, log_call_deferred,

View File

@ -1,3 +1,19 @@
"""
Interfaces for Tahoe-LAFS.
Ported to Python 3.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from future.utils import PY2
if PY2:
# Don't import object/str/dict/etc. types, so we don't break any
# interfaces. Not importing open() because it triggers bogus flake8 error.
from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, pow, round, super, range, max, min # noqa: F401
from past.builtins import long from past.builtins import long
from zope.interface import Interface, Attribute from zope.interface import Interface, Attribute
@ -58,7 +74,7 @@ class RIBucketReader(RemoteInterface):
def read(offset=Offset, length=ReadSize): def read(offset=Offset, length=ReadSize):
return ShareData return ShareData
def advise_corrupt_share(reason=str): def advise_corrupt_share(reason=bytes):
"""Clients who discover hash failures in shares that they have """Clients who discover hash failures in shares that they have
downloaded from me will use this method to inform me about the downloaded from me will use this method to inform me about the
failures. I will record their concern so that my operator can failures. I will record their concern so that my operator can
@ -71,7 +87,7 @@ class RIBucketReader(RemoteInterface):
""" """
TestVector = ListOf(TupleOf(Offset, ReadSize, str, str)) TestVector = ListOf(TupleOf(Offset, ReadSize, bytes, bytes))
# elements are (offset, length, operator, specimen) # elements are (offset, length, operator, specimen)
# operator is one of "lt, le, eq, ne, ge, gt" # operator is one of "lt, le, eq, ne, ge, gt"
# nop always passes and is used to fetch data while writing. # nop always passes and is used to fetch data while writing.
@ -89,13 +105,13 @@ ReadData = ListOf(ShareData)
class RIStorageServer(RemoteInterface): class RIStorageServer(RemoteInterface):
__remote_name__ = "RIStorageServer.tahoe.allmydata.com" __remote_name__ = b"RIStorageServer.tahoe.allmydata.com"
def get_version(): def get_version():
""" """
Return a dictionary of version information. Return a dictionary of version information.
""" """
return DictOf(str, Any()) return DictOf(bytes, Any())
def allocate_buckets(storage_index=StorageIndex, def allocate_buckets(storage_index=StorageIndex,
renew_secret=LeaseRenewSecret, renew_secret=LeaseRenewSecret,
@ -277,8 +293,8 @@ class RIStorageServer(RemoteInterface):
""" """
return TupleOf(bool, DictOf(int, ReadData)) return TupleOf(bool, DictOf(int, ReadData))
def advise_corrupt_share(share_type=str, storage_index=StorageIndex, def advise_corrupt_share(share_type=bytes, storage_index=StorageIndex,
shnum=int, reason=str): shnum=int, reason=bytes):
"""Clients who discover hash failures in shares that they have """Clients who discover hash failures in shares that they have
downloaded from me will use this method to inform me about the downloaded from me will use this method to inform me about the
failures. I will record their concern so that my operator can failures. I will record their concern so that my operator can
@ -2859,7 +2875,7 @@ UploadResults = Any() #DictOf(str, str)
class RIEncryptedUploadable(RemoteInterface): class RIEncryptedUploadable(RemoteInterface):
__remote_name__ = "RIEncryptedUploadable.tahoe.allmydata.com" __remote_name__ = b"RIEncryptedUploadable.tahoe.allmydata.com"
def get_size(): def get_size():
return Offset return Offset
@ -2875,7 +2891,7 @@ class RIEncryptedUploadable(RemoteInterface):
class RICHKUploadHelper(RemoteInterface): class RICHKUploadHelper(RemoteInterface):
__remote_name__ = "RIUploadHelper.tahoe.allmydata.com" __remote_name__ = b"RIUploadHelper.tahoe.allmydata.com"
def get_version(): def get_version():
""" """
@ -2888,7 +2904,7 @@ class RICHKUploadHelper(RemoteInterface):
class RIHelper(RemoteInterface): class RIHelper(RemoteInterface):
__remote_name__ = "RIHelper.tahoe.allmydata.com" __remote_name__ = b"RIHelper.tahoe.allmydata.com"
def get_version(): def get_version():
""" """
@ -2915,7 +2931,7 @@ class RIHelper(RemoteInterface):
class RIStatsProvider(RemoteInterface): class RIStatsProvider(RemoteInterface):
__remote_name__ = "RIStatsProvider.tahoe.allmydata.com" __remote_name__ = b"RIStatsProvider.tahoe.allmydata.com"
""" """
Provides access to statistics and monitoring information. Provides access to statistics and monitoring information.
""" """
@ -2932,7 +2948,7 @@ class RIStatsProvider(RemoteInterface):
class RIStatsGatherer(RemoteInterface): class RIStatsGatherer(RemoteInterface):
__remote_name__ = "RIStatsGatherer.tahoe.allmydata.com" __remote_name__ = b"RIStatsGatherer.tahoe.allmydata.com"
""" """
Provides a monitoring service for centralised collection of stats Provides a monitoring service for centralised collection of stats
""" """

View File

@ -1,7 +1,21 @@
"""
Manage status of long-running operations.
Ported to Python 3.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from future.utils import PY2
if PY2:
from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
from zope.interface import Interface, implementer from zope.interface import Interface, implementer
from allmydata.util import observer from allmydata.util import observer
class IMonitor(Interface): class IMonitor(Interface):
"""I manage status, progress, and cancellation for long-running operations. """I manage status, progress, and cancellation for long-running operations.

View File

@ -1,3 +1,4 @@
from past.utils import old_div
import struct import struct
from allmydata.mutable.common import NeedMoreDataError, UnknownVersionError, \ from allmydata.mutable.common import NeedMoreDataError, UnknownVersionError, \
@ -180,11 +181,11 @@ def pack_offsets(verification_key_length, signature_length,
def pack_share(prefix, verification_key, signature, def pack_share(prefix, verification_key, signature,
share_hash_chain, block_hash_tree, share_hash_chain, block_hash_tree,
share_data, encprivkey): share_data, encprivkey):
share_hash_chain_s = "".join([struct.pack(">H32s", i, share_hash_chain[i]) share_hash_chain_s = b"".join([struct.pack(">H32s", i, share_hash_chain[i])
for i in sorted(share_hash_chain.keys())]) for i in sorted(share_hash_chain.keys())])
for h in block_hash_tree: for h in block_hash_tree:
assert len(h) == 32 assert len(h) == 32
block_hash_tree_s = "".join(block_hash_tree) block_hash_tree_s = b"".join(block_hash_tree)
offsets = pack_offsets(len(verification_key), offsets = pack_offsets(len(verification_key),
len(signature), len(signature),
@ -192,7 +193,7 @@ def pack_share(prefix, verification_key, signature,
len(block_hash_tree_s), len(block_hash_tree_s),
len(share_data), len(share_data),
len(encprivkey)) len(encprivkey))
final_share = "".join([prefix, final_share = b"".join([prefix,
offsets, offsets,
verification_key, verification_key,
signature, signature,
@ -255,7 +256,7 @@ class SDMFSlotWriteProxy(object):
self._required_shares) self._required_shares)
assert expected_segment_size == segment_size assert expected_segment_size == segment_size
self._block_size = self._segment_size / self._required_shares self._block_size = old_div(self._segment_size, self._required_shares)
# This is meant to mimic how SDMF files were built before MDMF # This is meant to mimic how SDMF files were built before MDMF
# entered the picture: we generate each share in its entirety, # entered the picture: we generate each share in its entirety,
@ -296,7 +297,7 @@ class SDMFSlotWriteProxy(object):
salt) salt)
else: else:
checkstring = checkstring_or_seqnum checkstring = checkstring_or_seqnum
self._testvs = [(0, len(checkstring), "eq", checkstring)] self._testvs = [(0, len(checkstring), b"eq", checkstring)]
def get_checkstring(self): def get_checkstring(self):
@ -306,7 +307,7 @@ class SDMFSlotWriteProxy(object):
""" """
if self._testvs: if self._testvs:
return self._testvs[0][3] return self._testvs[0][3]
return "" return b""
def put_block(self, data, segnum, salt): def put_block(self, data, segnum, salt):
@ -343,7 +344,7 @@ class SDMFSlotWriteProxy(object):
assert len(h) == HASH_SIZE assert len(h) == HASH_SIZE
# serialize the blockhashes, then set them. # serialize the blockhashes, then set them.
blockhashes_s = "".join(blockhashes) blockhashes_s = b"".join(blockhashes)
self._share_pieces['block_hash_tree'] = blockhashes_s self._share_pieces['block_hash_tree'] = blockhashes_s
return defer.succeed(None) return defer.succeed(None)
@ -354,11 +355,11 @@ class SDMFSlotWriteProxy(object):
Add the share hash chain to the share. Add the share hash chain to the share.
""" """
assert isinstance(sharehashes, dict) assert isinstance(sharehashes, dict)
for h in sharehashes.itervalues(): for h in sharehashes.values():
assert len(h) == HASH_SIZE assert len(h) == HASH_SIZE
# serialize the sharehashes, then set them. # serialize the sharehashes, then set them.
sharehashes_s = "".join([struct.pack(">H32s", i, sharehashes[i]) sharehashes_s = b"".join([struct.pack(">H32s", i, sharehashes[i])
for i in sorted(sharehashes.keys())]) for i in sorted(sharehashes.keys())])
self._share_pieces['share_hash_chain'] = sharehashes_s self._share_pieces['share_hash_chain'] = sharehashes_s
@ -383,7 +384,7 @@ class SDMFSlotWriteProxy(object):
assert len(salt) == SALT_SIZE assert len(salt) == SALT_SIZE
self._share_pieces['salt'] = salt self._share_pieces['salt'] = salt
self._share_pieces['sharedata'] = "" self._share_pieces['sharedata'] = b""
def get_signable(self): def get_signable(self):
@ -519,7 +520,7 @@ class SDMFSlotWriteProxy(object):
# to the remote server in one write. # to the remote server in one write.
offsets = self._pack_offsets() offsets = self._pack_offsets()
prefix = self.get_signable() prefix = self.get_signable()
final_share = "".join([prefix, final_share = b"".join([prefix,
offsets, offsets,
self._share_pieces['verification_key'], self._share_pieces['verification_key'],
self._share_pieces['signature'], self._share_pieces['signature'],
@ -537,7 +538,7 @@ class SDMFSlotWriteProxy(object):
# yet, so we assume that we are writing a new share, and set # yet, so we assume that we are writing a new share, and set
# a test vector that will allow a new share to be written. # a test vector that will allow a new share to be written.
self._testvs = [] self._testvs = []
self._testvs.append(tuple([0, 1, "eq", ""])) self._testvs.append(tuple([0, 1, b"eq", b""]))
tw_vectors = {} tw_vectors = {}
tw_vectors[self.shnum] = (self._testvs, datavs, None) tw_vectors[self.shnum] = (self._testvs, datavs, None)
@ -788,7 +789,7 @@ class MDMFSlotWriteProxy(object):
# and also because it provides a useful amount of bounds checking. # and also because it provides a useful amount of bounds checking.
self._num_segments = mathutil.div_ceil(self._data_length, self._num_segments = mathutil.div_ceil(self._data_length,
self._segment_size) self._segment_size)
self._block_size = self._segment_size / self._required_shares self._block_size = old_div(self._segment_size, self._required_shares)
# We also calculate the share size, to help us with block # We also calculate the share size, to help us with block
# constraints later. # constraints later.
tail_size = self._data_length % self._segment_size tail_size = self._data_length % self._segment_size
@ -797,7 +798,7 @@ class MDMFSlotWriteProxy(object):
else: else:
self._tail_block_size = mathutil.next_multiple(tail_size, self._tail_block_size = mathutil.next_multiple(tail_size,
self._required_shares) self._required_shares)
self._tail_block_size /= self._required_shares self._tail_block_size = old_div(self._tail_block_size, self._required_shares)
# We already know where the sharedata starts; right after the end # We already know where the sharedata starts; right after the end
# of the header (which is defined as the signable part + the offsets) # of the header (which is defined as the signable part + the offsets)
@ -868,7 +869,7 @@ class MDMFSlotWriteProxy(object):
else: else:
checkstring = seqnum_or_checkstring checkstring = seqnum_or_checkstring
if checkstring == "": if checkstring == b"":
# We special-case this, since len("") = 0, but we need # We special-case this, since len("") = 0, but we need
# length of 1 for the case of an empty share to work on the # length of 1 for the case of an empty share to work on the
# storage server, which is what a checkstring that is the # storage server, which is what a checkstring that is the
@ -876,7 +877,7 @@ class MDMFSlotWriteProxy(object):
self._testvs = [] self._testvs = []
else: else:
self._testvs = [] self._testvs = []
self._testvs.append((0, len(checkstring), "eq", checkstring)) self._testvs.append((0, len(checkstring), b"eq", checkstring))
def __repr__(self): def __repr__(self):
@ -893,7 +894,7 @@ class MDMFSlotWriteProxy(object):
if self._root_hash: if self._root_hash:
roothash = self._root_hash roothash = self._root_hash
else: else:
roothash = "\x00" * 32 roothash = b"\x00" * 32
return struct.pack(MDMFCHECKSTRING, return struct.pack(MDMFCHECKSTRING,
1, 1,
self._seqnum, self._seqnum,
@ -964,7 +965,7 @@ class MDMFSlotWriteProxy(object):
assert isinstance(blockhashes, list) assert isinstance(blockhashes, list)
blockhashes_s = "".join(blockhashes) blockhashes_s = b"".join(blockhashes)
self._offsets['EOF'] = self._offsets['block_hash_tree'] + len(blockhashes_s) self._offsets['EOF'] = self._offsets['block_hash_tree'] + len(blockhashes_s)
self._writevs.append(tuple([self._offsets['block_hash_tree'], self._writevs.append(tuple([self._offsets['block_hash_tree'],
@ -998,7 +999,7 @@ class MDMFSlotWriteProxy(object):
if "verification_key" in self._offsets: if "verification_key" in self._offsets:
raise LayoutInvalid("You must write the share hash chain " raise LayoutInvalid("You must write the share hash chain "
"before you write the signature") "before you write the signature")
sharehashes_s = "".join([struct.pack(">H32s", i, sharehashes[i]) sharehashes_s = b"".join([struct.pack(">H32s", i, sharehashes[i])
for i in sorted(sharehashes.keys())]) for i in sorted(sharehashes.keys())])
self._offsets['signature'] = self._offsets['share_hash_chain'] + \ self._offsets['signature'] = self._offsets['share_hash_chain'] + \
len(sharehashes_s) len(sharehashes_s)
@ -1149,7 +1150,7 @@ class MDMFSlotWriteProxy(object):
tw_vectors = {} tw_vectors = {}
if not self._testvs: if not self._testvs:
self._testvs = [] self._testvs = []
self._testvs.append(tuple([0, 1, "eq", ""])) self._testvs.append(tuple([0, 1, b"eq", b""]))
if not self._written: if not self._written:
# Write a new checkstring to the share when we write it, so # Write a new checkstring to the share when we write it, so
# that we have something to check later. # that we have something to check later.
@ -1157,7 +1158,7 @@ class MDMFSlotWriteProxy(object):
datavs.append((0, new_checkstring)) datavs.append((0, new_checkstring))
def _first_write(): def _first_write():
self._written = True self._written = True
self._testvs = [(0, len(new_checkstring), "eq", new_checkstring)] self._testvs = [(0, len(new_checkstring), b"eq", new_checkstring)]
on_success = _first_write on_success = _first_write
tw_vectors[self.shnum] = (self._testvs, datavs, None) tw_vectors[self.shnum] = (self._testvs, datavs, None)
d = self._storage_server.slot_testv_and_readv_and_writev( d = self._storage_server.slot_testv_and_readv_and_writev(
@ -1194,7 +1195,7 @@ class MDMFSlotReadProxy(object):
storage_server, storage_server,
storage_index, storage_index,
shnum, shnum,
data="", data=b"",
data_is_everything=False): data_is_everything=False):
# Start the initialization process. # Start the initialization process.
self._storage_server = storage_server self._storage_server = storage_server
@ -1238,7 +1239,7 @@ class MDMFSlotReadProxy(object):
# None if there isn't any cached data, but the way we index the # None if there isn't any cached data, but the way we index the
# cached data requires a string, so convert None to "". # cached data requires a string, so convert None to "".
if self._data == None: if self._data == None:
self._data = "" self._data = b""
def _maybe_fetch_offsets_and_header(self, force_remote=False): def _maybe_fetch_offsets_and_header(self, force_remote=False):
@ -1317,7 +1318,7 @@ class MDMFSlotReadProxy(object):
self._segment_size = segsize self._segment_size = segsize
self._data_length = datalen self._data_length = datalen
self._block_size = self._segment_size / self._required_shares self._block_size = old_div(self._segment_size, self._required_shares)
# We can upload empty files, and need to account for this fact # We can upload empty files, and need to account for this fact
# so as to avoid zero-division and zero-modulo errors. # so as to avoid zero-division and zero-modulo errors.
if datalen > 0: if datalen > 0:
@ -1329,7 +1330,7 @@ class MDMFSlotReadProxy(object):
else: else:
self._tail_block_size = mathutil.next_multiple(tail_size, self._tail_block_size = mathutil.next_multiple(tail_size,
self._required_shares) self._required_shares)
self._tail_block_size /= self._required_shares self._tail_block_size = old_div(self._tail_block_size, self._required_shares)
return encoding_parameters return encoding_parameters
@ -1416,7 +1417,7 @@ class MDMFSlotReadProxy(object):
# when we fetched the header # when we fetched the header
data = results[self.shnum] data = results[self.shnum]
if not data: if not data:
data = "" data = b""
else: else:
if len(data) != 1: if len(data) != 1:
raise BadShareError("got %d vectors, not 1" % len(data)) raise BadShareError("got %d vectors, not 1" % len(data))
@ -1425,7 +1426,7 @@ class MDMFSlotReadProxy(object):
else: else:
data = results[self.shnum] data = results[self.shnum]
if not data: if not data:
salt = data = "" salt = data = b""
else: else:
salt_and_data = results[self.shnum][0] salt_and_data = results[self.shnum][0]
salt = salt_and_data[:SALT_SIZE] salt = salt_and_data[:SALT_SIZE]
@ -1743,7 +1744,7 @@ class MDMFSlotReadProxy(object):
def _read(self, readvs, force_remote=False): def _read(self, readvs, force_remote=False):
unsatisfiable = filter(lambda x: x[0] + x[1] > len(self._data), readvs) unsatisfiable = list(filter(lambda x: x[0] + x[1] > len(self._data), readvs))
# TODO: It's entirely possible to tweak this so that it just # TODO: It's entirely possible to tweak this so that it just
# fulfills the requests that it can, and not demand that all # fulfills the requests that it can, and not demand that all
# requests are satisfiable before running it. # requests are satisfiable before running it.

View File

@ -2,6 +2,8 @@
This module contains classes and functions to implement and manage This module contains classes and functions to implement and manage
a node for Tahoe-LAFS. a node for Tahoe-LAFS.
""" """
from past.builtins import unicode
import datetime import datetime
import os.path import os.path
import re import re
@ -272,7 +274,10 @@ class _Config(object):
self.config = configparser self.config = configparser
nickname_utf8 = self.get_config("node", "nickname", "<unspecified>") nickname_utf8 = self.get_config("node", "nickname", "<unspecified>")
if isinstance(nickname_utf8, bytes): # Python 2
self.nickname = nickname_utf8.decode("utf-8") self.nickname = nickname_utf8.decode("utf-8")
else:
self.nickname = nickname_utf8
assert type(self.nickname) is unicode assert type(self.nickname) is unicode
def validate(self, valid_config_sections): def validate(self, valid_config_sections):

View File

@ -1,3 +1,5 @@
from future.utils import bytes_to_native_str
import os, stat, struct, time import os, stat, struct, time
from foolscap.api import Referenceable from foolscap.api import Referenceable
@ -85,7 +87,7 @@ class ShareFile(object):
seekpos = self._data_offset+offset seekpos = self._data_offset+offset
actuallength = max(0, min(length, self._lease_offset-seekpos)) actuallength = max(0, min(length, self._lease_offset-seekpos))
if actuallength == 0: if actuallength == 0:
return "" return b""
with open(self.home, 'rb') as f: with open(self.home, 'rb') as f:
f.seek(seekpos) f.seek(seekpos)
return f.read(actuallength) return f.read(actuallength)
@ -298,7 +300,9 @@ class BucketReader(Referenceable):
def __repr__(self): def __repr__(self):
return "<%s %s %s>" % (self.__class__.__name__, return "<%s %s %s>" % (self.__class__.__name__,
base32.b2a(self.storage_index[:8])[:12], bytes_to_native_str(
base32.b2a(self.storage_index[:8])[:12]
),
self.shnum) self.shnum)
def remote_read(self, offset, length): def remote_read(self, offset, length):
@ -309,7 +313,7 @@ class BucketReader(Referenceable):
return data return data
def remote_advise_corrupt_share(self, reason): def remote_advise_corrupt_share(self, reason):
return self.ss.remote_advise_corrupt_share("immutable", return self.ss.remote_advise_corrupt_share(b"immutable",
self.storage_index, self.storage_index,
self.shnum, self.shnum,
reason) reason)

View File

@ -113,7 +113,7 @@ class MutableShareFile(object):
# start beyond the end of the data return an empty string. # start beyond the end of the data return an empty string.
length = max(0, data_length-offset) length = max(0, data_length-offset)
if length == 0: if length == 0:
return "" return b""
precondition(offset+length <= data_length) precondition(offset+length <= data_length)
f.seek(self.DATA_OFFSET+offset) f.seek(self.DATA_OFFSET+offset)
data = f.read(length) data = f.read(length)
@ -421,18 +421,18 @@ class MutableShareFile(object):
# self._change_container_size() here. # self._change_container_size() here.
def testv_compare(a, op, b): def testv_compare(a, op, b):
assert op in ("lt", "le", "eq", "ne", "ge", "gt") assert op in (b"lt", b"le", b"eq", b"ne", b"ge", b"gt")
if op == "lt": if op == b"lt":
return a < b return a < b
if op == "le": if op == b"le":
return a <= b return a <= b
if op == "eq": if op == b"eq":
return a == b return a == b
if op == "ne": if op == b"ne":
return a != b return a != b
if op == "ge": if op == b"ge":
return a >= b return a >= b
if op == "gt": if op == b"gt":
return a > b return a > b
# never reached # never reached
@ -441,7 +441,7 @@ class EmptyShare(object):
def check_testv(self, testv): def check_testv(self, testv):
test_good = True test_good = True
for (offset, length, operator, specimen) in testv: for (offset, length, operator, specimen) in testv:
data = "" data = b""
if not testv_compare(data, operator, specimen): if not testv_compare(data, operator, specimen):
test_good = False test_good = False
break break

View File

@ -1,3 +1,4 @@
from future.utils import bytes_to_native_str
import os, re, struct, time import os, re, struct, time
import weakref import weakref
import six import six
@ -51,6 +52,7 @@ class StorageServer(service.MultiService, Referenceable):
service.MultiService.__init__(self) service.MultiService.__init__(self)
assert isinstance(nodeid, bytes) assert isinstance(nodeid, bytes)
assert len(nodeid) == 20 assert len(nodeid) == 20
assert isinstance(nodeid, bytes)
self.my_nodeid = nodeid self.my_nodeid = nodeid
self.storedir = storedir self.storedir = storedir
sharedir = os.path.join(storedir, "shares") sharedir = os.path.join(storedir, "shares")
@ -398,7 +400,7 @@ class StorageServer(service.MultiService, Referenceable):
# since all shares get the same lease data, we just grab the leases # since all shares get the same lease data, we just grab the leases
# from the first share # from the first share
try: try:
shnum, filename = self._get_bucket_shares(storage_index).next() shnum, filename = next(self._get_bucket_shares(storage_index))
sf = ShareFile(filename) sf = ShareFile(filename)
return sf.get_leases() return sf.get_leases()
except StopIteration: except StopIteration:
@ -676,6 +678,10 @@ class StorageServer(service.MultiService, Referenceable):
def remote_advise_corrupt_share(self, share_type, storage_index, shnum, def remote_advise_corrupt_share(self, share_type, storage_index, shnum,
reason): reason):
# This is a remote API, I believe, so this has to be bytes for legacy
# protocol backwards compatibility reasons.
assert isinstance(share_type, bytes)
assert isinstance(reason, bytes)
fileutil.make_dirs(self.corruption_advisory_dir) fileutil.make_dirs(self.corruption_advisory_dir)
now = time_format.iso_utc(sep="T") now = time_format.iso_utc(sep="T")
si_s = si_b2a(storage_index) si_s = si_b2a(storage_index)
@ -684,11 +690,11 @@ class StorageServer(service.MultiService, Referenceable):
"%s--%s-%d" % (now, si_s, shnum)).replace(":","") "%s--%s-%d" % (now, si_s, shnum)).replace(":","")
with open(fn, "w") as f: with open(fn, "w") as f:
f.write("report: Share Corruption\n") f.write("report: Share Corruption\n")
f.write("type: %s\n" % share_type) f.write("type: %s\n" % bytes_to_native_str(share_type))
f.write("storage_index: %s\n" % si_s) f.write("storage_index: %s\n" % bytes_to_native_str(si_s))
f.write("share_number: %d\n" % shnum) f.write("share_number: %d\n" % shnum)
f.write("\n") f.write("\n")
f.write(reason) f.write(bytes_to_native_str(reason))
f.write("\n") f.write("\n")
log.msg(format=("client claims corruption in (%(share_type)s) " + log.msg(format=("client claims corruption in (%(share_type)s) " +
"%(si)s-%(shnum)d: %(reason)s"), "%(si)s-%(shnum)d: %(reason)s"),

View File

@ -30,9 +30,12 @@ the foolscap-based server implemented in src/allmydata/storage/*.py .
import re, time, hashlib import re, time, hashlib
from ConfigParser import ( try:
from ConfigParser import (
NoSectionError, NoSectionError,
) )
except ImportError:
from configparser import NoSectionError
import attr import attr
from zope.interface import ( from zope.interface import (
Attribute, Attribute,
@ -534,11 +537,11 @@ class _NullStorage(object):
which we can't communicate. which we can't communicate.
""" """
nickname = "" nickname = ""
permutation_seed = hashlib.sha256("").digest() permutation_seed = hashlib.sha256(b"").digest()
tubid = hashlib.sha256("").digest() tubid = hashlib.sha256(b"").digest()
storage_server = None storage_server = None
lease_seed = hashlib.sha256("").digest() lease_seed = hashlib.sha256(b"").digest()
name = "<unsupported>" name = "<unsupported>"
longname = "<storage with unsupported protocol>" longname = "<storage with unsupported protocol>"

View File

@ -52,7 +52,6 @@ from twisted.internet.defer import inlineCallbacks, returnValue
from twisted.internet.interfaces import IPullProducer from twisted.internet.interfaces import IPullProducer
from twisted.python import failure from twisted.python import failure
from twisted.python.filepath import FilePath from twisted.python.filepath import FilePath
from twisted.application import service
from twisted.web.error import Error as WebError from twisted.web.error import Error as WebError
from twisted.internet.interfaces import ( from twisted.internet.interfaces import (
IStreamServerEndpointStringParser, IStreamServerEndpointStringParser,
@ -88,6 +87,8 @@ from ..crypto import (
from .eliotutil import ( from .eliotutil import (
EliotLoggedRunTest, EliotLoggedRunTest,
) )
# Backwards compatibility imports:
from .common_py3 import LoggingServiceParent, ShouldFailMixin # noqa: F401
TEST_RSA_KEY_SIZE = 522 TEST_RSA_KEY_SIZE = 522
@ -780,53 +781,8 @@ def create_mutable_filenode(contents, mdmf=False, all_contents=None):
return filenode return filenode
class LoggingServiceParent(service.MultiService):
def log(self, *args, **kwargs):
return log.msg(*args, **kwargs)
TEST_DATA="\x02"*(Uploader.URI_LIT_SIZE_THRESHOLD+1) TEST_DATA="\x02"*(Uploader.URI_LIT_SIZE_THRESHOLD+1)
class ShouldFailMixin(object):
def shouldFail(self, expected_failure, which, substring,
callable, *args, **kwargs):
"""Assert that a function call raises some exception. This is a
Deferred-friendly version of TestCase.assertRaises() .
Suppose you want to verify the following function:
def broken(a, b, c):
if a < 0:
raise TypeError('a must not be negative')
return defer.succeed(b+c)
You can use:
d = self.shouldFail(TypeError, 'test name',
'a must not be negative',
broken, -4, 5, c=12)
in your test method. The 'test name' string will be included in the
error message, if any, because Deferred chains frequently make it
difficult to tell which assertion was tripped.
The substring= argument, if not None, must appear in the 'repr'
of the message wrapped by this Failure, or the test will fail.
"""
assert substring is None or isinstance(substring, str)
d = defer.maybeDeferred(callable, *args, **kwargs)
def done(res):
if isinstance(res, failure.Failure):
res.trap(expected_failure)
if substring:
message = repr(res.value.args[0])
self.failUnless(substring in message,
"%s: substring '%s' not in '%s'"
% (which, substring, message))
else:
self.fail("%s was supposed to raise %s, not get '%s'" %
(which, expected_failure, res))
d.addBoth(done)
return d
class WebErrorMixin(object): class WebErrorMixin(object):
def explain_web_error(self, f): def explain_web_error(self, f):

View File

@ -19,11 +19,13 @@ import time
import signal import signal
from twisted.internet import defer, reactor from twisted.internet import defer, reactor
from twisted.application import service
from twisted.python import failure from twisted.python import failure
from twisted.trial import unittest from twisted.trial import unittest
from ..util.assertutil import precondition from ..util.assertutil import precondition
from ..util.encodingutil import unicode_platform, get_filesystem_encoding from ..util.encodingutil import unicode_platform, get_filesystem_encoding
from ..util import log
class TimezoneMixin(object): class TimezoneMixin(object):
@ -77,6 +79,28 @@ class ShouldFailMixin(object):
def shouldFail(self, expected_failure, which, substring, def shouldFail(self, expected_failure, which, substring,
callable, *args, **kwargs): callable, *args, **kwargs):
"""Assert that a function call raises some exception. This is a
Deferred-friendly version of TestCase.assertRaises() .
Suppose you want to verify the following function:
def broken(a, b, c):
if a < 0:
raise TypeError('a must not be negative')
return defer.succeed(b+c)
You can use:
d = self.shouldFail(TypeError, 'test name',
'a must not be negative',
broken, -4, 5, c=12)
in your test method. The 'test name' string will be included in the
error message, if any, because Deferred chains frequently make it
difficult to tell which assertion was tripped.
The substring= argument, if not None, must appear in the 'repr'
of the message wrapped by this Failure, or the test will fail.
"""
assert substring is None or isinstance(substring, (bytes, unicode)) assert substring is None or isinstance(substring, (bytes, unicode))
d = defer.maybeDeferred(callable, *args, **kwargs) d = defer.maybeDeferred(callable, *args, **kwargs)
def done(res): def done(res):
@ -135,3 +159,9 @@ class FakeCanary(object):
if self.ignore: if self.ignore:
return return
del self.disconnectors[marker] del self.disconnectors[marker]
class LoggingServiceParent(service.MultiService):
def log(self, *args, **kwargs):
return log.msg(*args, **kwargs)

View File

@ -1,64 +1,7 @@
import re
import treq import treq
from twisted.internet import defer from twisted.internet import defer
from twisted.web.error import Error from twisted.web.error import Error
from nevow.testutil import FakeRequest
from nevow import inevow, context
class WebRenderingMixin(object):
# d=page.renderString() or s=page.renderSynchronously() will exercise
# docFactory, render_*/data_* . It won't exercise want_json(), or my
# renderHTTP() override which tests want_json(). To exercise args=, we
# must build a context. Pages which use a return_to= argument need a
# context.
# d=page.renderHTTP(ctx) will exercise my renderHTTP, want_json, and
# docFactory/render_*/data_*, but it requires building a context. Since
# we're already building a context, it is easy to exercise args= .
# so, use at least two d=page.renderHTTP(ctx) per page (one for json, one
# for html), then use lots of simple s=page.renderSynchronously() to
# exercise the fine details (the ones that don't require args=).
def make_context(self, req):
ctx = context.RequestContext(tag=req)
ctx.remember(req, inevow.IRequest)
ctx.remember(None, inevow.IData)
ctx = context.WovenContext(parent=ctx, precompile=False)
return ctx
def render1(self, page, **kwargs):
# use this to exercise an overridden renderHTTP, usually for
# output=json or render_GET. It always returns a Deferred.
req = FakeRequest(**kwargs)
req.fields = None
ctx = self.make_context(req)
d = defer.maybeDeferred(page.renderHTTP, ctx)
def _done(res):
if isinstance(res, str):
return res + req.v
return req.v
d.addCallback(_done)
return d
def render2(self, page, **kwargs):
# use this to exercise the normal Nevow docFactory rendering. It
# returns a string. If one of the render_* methods returns a
# Deferred, this will throw an exception. (note that
# page.renderString is the Deferred-returning equivalent)
req = FakeRequest(**kwargs)
req.fields = None
ctx = self.make_context(req)
return page.renderSynchronously(ctx)
def failUnlessIn(self, substring, s):
self.failUnless(substring in s, s)
def remove_tags(self, s):
s = re.sub(r'<[^>]*>', ' ', s)
s = re.sub(r'\s+', ' ', s)
return s
@defer.inlineCallbacks @defer.inlineCallbacks
def do_http(method, url, **kwargs): def do_http(method, url, **kwargs):

View File

@ -2,6 +2,8 @@
Tools aimed at the interaction between tests and Eliot. Tools aimed at the interaction between tests and Eliot.
""" """
from past.builtins import unicode
__all__ = [ __all__ = [
"RUN_TEST", "RUN_TEST",
"EliotLoggedRunTest", "EliotLoggedRunTest",

View File

@ -1,8 +1,21 @@
"""
Tests for allmydata.codec.
Ported to Python 3.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from future.utils import PY2
if PY2:
from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
import os import os
from twisted.trial import unittest from twisted.trial import unittest
from twisted.python import log from twisted.python import log
from allmydata.codec import CRSEncoder, CRSDecoder from allmydata.codec import CRSEncoder, CRSDecoder, parse_params
import random import random
from allmydata.util import mathutil from allmydata.util import mathutil
@ -13,6 +26,8 @@ class T(unittest.TestCase):
enc.set_params(size, required_shares, max_shares) enc.set_params(size, required_shares, max_shares)
params = enc.get_params() params = enc.get_params()
assert params == (size, required_shares, max_shares) assert params == (size, required_shares, max_shares)
serialized_params = enc.get_serialized_params()
self.assertEqual(parse_params(serialized_params), params)
log.msg("params: %s" % (params,)) log.msg("params: %s" % (params,))
d = enc.encode(data0s) d = enc.encode(data0s)
def _done_encoding_all(shares_and_shareids): def _done_encoding_all(shares_and_shareids):
@ -23,7 +38,7 @@ class T(unittest.TestCase):
d.addCallback(_done_encoding_all) d.addCallback(_done_encoding_all)
if fewer_shares is not None: if fewer_shares is not None:
# also validate that the desired_shareids= parameter works # also validate that the desired_shareids= parameter works
desired_shareids = random.sample(range(max_shares), fewer_shares) desired_shareids = random.sample(list(range(max_shares)), fewer_shares)
d.addCallback(lambda res: enc.encode(data0s, desired_shareids)) d.addCallback(lambda res: enc.encode(data0s, desired_shareids))
def _check_fewer_shares(some_shares_and_their_shareids): def _check_fewer_shares(some_shares_and_their_shareids):
(some_shares, their_shareids) = some_shares_and_their_shareids (some_shares, their_shareids) = some_shares_and_their_shareids
@ -38,11 +53,11 @@ class T(unittest.TestCase):
return d1 return d1
def _check_data(decoded_shares): def _check_data(decoded_shares):
self.failUnlessEqual(len(''.join(decoded_shares)), len(''.join(data0s))) self.failUnlessEqual(len(b''.join(decoded_shares)), len(b''.join(data0s)))
self.failUnlessEqual(len(decoded_shares), len(data0s)) self.failUnlessEqual(len(decoded_shares), len(data0s))
for (i, (x, y)) in enumerate(zip(data0s, decoded_shares)): for (i, (x, y)) in enumerate(zip(data0s, decoded_shares)):
self.failUnlessEqual(x, y, "%s: %r != %r.... first share was %r" % (str(i), x, y, data0s[0],)) self.failUnlessEqual(x, y, "%s: %r != %r.... first share was %r" % (str(i), x, y, data0s[0],))
self.failUnless(''.join(decoded_shares) == ''.join(data0s), "%s" % ("???",)) self.failUnless(b''.join(decoded_shares) == b''.join(data0s), "%s" % ("???",))
# 0data0sclipped = tuple(data0s) # 0data0sclipped = tuple(data0s)
# data0sclipped[-1] = # data0sclipped[-1] =
# self.failUnless(tuple(decoded_shares) == tuple(data0s)) # self.failUnless(tuple(decoded_shares) == tuple(data0s))
@ -59,7 +74,7 @@ class T(unittest.TestCase):
def _decode_some_random(res): def _decode_some_random(res):
log.msg("_decode_some_random") log.msg("_decode_some_random")
# use a randomly-selected minimal subset # use a randomly-selected minimal subset
l = random.sample(zip(self.shares, self.shareids), required_shares) l = random.sample(list(zip(self.shares, self.shareids)), required_shares)
some_shares = [ x[0] for x in l ] some_shares = [ x[0] for x in l ]
some_shareids = [ x[1] for x in l ] some_shareids = [ x[1] for x in l ]
return _decode((some_shares, some_shareids)) return _decode((some_shares, some_shareids))
@ -70,10 +85,10 @@ class T(unittest.TestCase):
log.msg("_decode_multiple") log.msg("_decode_multiple")
# make sure we can re-use the decoder object # make sure we can re-use the decoder object
shares1 = random.sample(self.shares, required_shares) shares1 = random.sample(self.shares, required_shares)
sharesl1 = random.sample(zip(self.shares, self.shareids), required_shares) sharesl1 = random.sample(list(zip(self.shares, self.shareids)), required_shares)
shares1 = [ x[0] for x in sharesl1 ] shares1 = [ x[0] for x in sharesl1 ]
shareids1 = [ x[1] for x in sharesl1 ] shareids1 = [ x[1] for x in sharesl1 ]
sharesl2 = random.sample(zip(self.shares, self.shareids), required_shares) sharesl2 = random.sample(list(zip(self.shares, self.shareids)), required_shares)
shares2 = [ x[0] for x in sharesl2 ] shares2 = [ x[0] for x in sharesl2 ]
shareids2 = [ x[1] for x in sharesl2 ] shareids2 = [ x[1] for x in sharesl2 ]
dec = CRSDecoder() dec = CRSDecoder()

View File

@ -0,0 +1,52 @@
"""
Tests for allmydata.monitor.
"""
from __future__ import unicode_literals
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from future.utils import PY2
if PY2:
from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
from twisted.trial import unittest
from allmydata.monitor import Monitor, OperationCancelledError
class MonitorTests(unittest.TestCase):
"""Tests for the Monitor class."""
def test_cancellation(self):
"""The monitor can be cancelled."""
m = Monitor()
self.assertFalse(m.is_cancelled())
m.raise_if_cancelled()
m.cancel()
self.assertTrue(m.is_cancelled())
with self.assertRaises(OperationCancelledError):
m.raise_if_cancelled()
def test_status(self):
"""The monitor can have its status set."""
m = Monitor()
self.assertEqual(m.get_status(), None)
m.set_status("discombobulated")
self.assertEqual(m.get_status(), "discombobulated")
def test_finish(self):
"""The monitor can finish."""
m = Monitor()
self.assertFalse(m.is_finished())
d = m.when_done()
self.assertNoResult(d)
result = m.finish(300)
self.assertEqual(result, 300)
self.assertEqual(m.get_status(), 300)
self.assertTrue(m.is_finished())
d.addBoth(self.assertEqual, 300)
return d

File diff suppressed because it is too large Load Diff

View File

@ -980,6 +980,8 @@ class CountingDataUploadable(upload.Data):
class SystemTest(SystemTestMixin, RunBinTahoeMixin, unittest.TestCase): class SystemTest(SystemTestMixin, RunBinTahoeMixin, unittest.TestCase):
timeout = 180
def test_connections(self): def test_connections(self):
self.basedir = "system/SystemTest/test_connections" self.basedir = "system/SystemTest/test_connections"
d = self.set_up_nodes() d = self.set_up_nodes()

View File

@ -24,6 +24,7 @@ if PY2:
# Keep these sorted alphabetically, to reduce merge conflicts: # Keep these sorted alphabetically, to reduce merge conflicts:
PORTED_MODULES = [ PORTED_MODULES = [
"allmydata.codec",
"allmydata.crypto", "allmydata.crypto",
"allmydata.crypto.aes", "allmydata.crypto.aes",
"allmydata.crypto.ed25519", "allmydata.crypto.ed25519",
@ -32,6 +33,8 @@ PORTED_MODULES = [
"allmydata.crypto.util", "allmydata.crypto.util",
"allmydata.hashtree", "allmydata.hashtree",
"allmydata.immutable.happiness_upload", "allmydata.immutable.happiness_upload",
"allmydata.interfaces",
"allmydata.monitor",
"allmydata.storage.crawler", "allmydata.storage.crawler",
"allmydata.storage.expirer", "allmydata.storage.expirer",
"allmydata.test.common_py3", "allmydata.test.common_py3",
@ -68,6 +71,7 @@ PORTED_TEST_MODULES = [
"allmydata.test.test_abbreviate", "allmydata.test.test_abbreviate",
"allmydata.test.test_base32", "allmydata.test.test_base32",
"allmydata.test.test_base62", "allmydata.test.test_base62",
"allmydata.test.test_codec",
"allmydata.test.test_configutil", "allmydata.test.test_configutil",
"allmydata.test.test_connection_status", "allmydata.test.test_connection_status",
"allmydata.test.test_crawler", "allmydata.test.test_crawler",
@ -81,12 +85,14 @@ PORTED_TEST_MODULES = [
"allmydata.test.test_humanreadable", "allmydata.test.test_humanreadable",
"allmydata.test.test_iputil", "allmydata.test.test_iputil",
"allmydata.test.test_log", "allmydata.test.test_log",
"allmydata.test.test_monitor",
"allmydata.test.test_netstring", "allmydata.test.test_netstring",
"allmydata.test.test_observer", "allmydata.test.test_observer",
"allmydata.test.test_pipeline", "allmydata.test.test_pipeline",
"allmydata.test.test_python3", "allmydata.test.test_python3",
"allmydata.test.test_spans", "allmydata.test.test_spans",
"allmydata.test.test_statistics", "allmydata.test.test_statistics",
"allmydata.test.test_storage",
"allmydata.test.test_storage_web", "allmydata.test.test_storage_web",
"allmydata.test.test_time_format", "allmydata.test.test_time_format",
"allmydata.test.test_uri", "allmydata.test.test_uri",

View File

@ -1,31 +0,0 @@
"""
Implement a work-around for <https://github.com/twisted/nevow/issues/106>.
"""
from __future__ import (
print_function,
unicode_literals,
absolute_import,
division,
)
from nevow import inevow
from twisted.internet import defer
def renderHTTP(self, ctx):
request = inevow.IRequest(ctx)
if self.real_prepath_len is not None:
request.postpath = request.prepath + request.postpath
request.prepath = request.postpath[:self.real_prepath_len]
del request.postpath[:self.real_prepath_len]
result = defer.maybeDeferred(self.original.render, request).addCallback(
self._handle_NOT_DONE_YET, request)
return result
def patch():
"""
Monkey-patch the proposed fix into place.
"""
from nevow.appserver import OldResourceAdapter
OldResourceAdapter.renderHTTP = renderHTTP

View File

@ -54,11 +54,6 @@ from .logs import (
create_log_resources, create_log_resources,
) )
# Hotfix work-around https://github.com/twisted/nevow/issues/106
from . import _nevow_106
_nevow_106.patch()
del _nevow_106
SCHEME = b"tahoe-lafs" SCHEME = b"tahoe-lafs"
class IToken(ICredentials): class IToken(ICredentials):