Merge branch 'master' into 3398.pre-commit

This commit is contained in:
Jason R. Coombs 2020-09-11 14:57:23 -04:00
commit 69762df708
48 changed files with 826 additions and 748 deletions

View File

@ -211,7 +211,8 @@ jobs:
environment:
<<: *UTF_8_ENVIRONMENT
TAHOE_LAFS_TOX_ENVIRONMENT: "pypy27-coverage"
# We don't do coverage since it makes PyPy far too slow:
TAHOE_LAFS_TOX_ENVIRONMENT: "pypy27"
c-locale:

View File

@ -68,6 +68,10 @@ export SUBUNITREPORTER_OUTPUT_PATH="${SUBUNIT2}"
export TAHOE_LAFS_TRIAL_ARGS="${TAHOE_LAFS_TRIAL_ARGS:---reporter=subunitv2-file --rterrors}"
export PIP_NO_INDEX="1"
# Make output unbuffered, so progress reports from subunitv2-file get streamed
# and notify CircleCI we're still alive.
export PYTHONUNBUFFERED=1
if [ "${ALLOWED_FAILURE}" = "yes" ]; then
alternative="true"
else

1
.gitignore vendored
View File

@ -9,6 +9,7 @@ venv*
*~
*.DS_Store
.*.kate-swp
*.bak
/build/
/support/

0
newsfragments/3374.minor Normal file
View File

0
newsfragments/3392.minor Normal file
View File

0
newsfragments/3393.minor Normal file
View File

0
newsfragments/3394.minor Normal file
View File

0
newsfragments/3396.minor Normal file
View File

0
newsfragments/3397.minor Normal file
View File

0
newsfragments/3401.minor Normal file
View File

0
newsfragments/3403.minor Normal file
View File

View File

@ -116,6 +116,11 @@ install_requires = [
# know works on Python 2.7.
"eliot ~= 1.7",
# Pyrsistent 0.17.0 (which we use by way of Eliot) has dropped
# Python 2 entirely; stick to the version known to work for us.
# XXX: drop this bound: https://tahoe-lafs.org/trac/tahoe-lafs/ticket/3404
"pyrsistent < 0.17.0",
# A great way to define types of values.
# XXX: drop the upper bound: https://tahoe-lafs.org/trac/tahoe-lafs/ticket/3390
"attrs >= 18.2.0, < 20",
@ -361,7 +366,7 @@ setup(name="tahoe-lafs", # also set in __init__.py
classifiers=trove_classifiers,
# We support Python 2.7, and we're working on support for 3.6 (the
# highest version that PyPy currently supports).
python_requires=">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*, <3.7",
python_requires=">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*",
install_requires=install_requires,
extras_require={
# Duplicate the Twisted pywin32 dependency here. See

View File

@ -2,7 +2,10 @@ import os, stat, time, weakref
from base64 import urlsafe_b64encode
from functools import partial
from errno import ENOENT, EPERM
from ConfigParser import NoSectionError
try:
from ConfigParser import NoSectionError
except ImportError:
from configparser import NoSectionError
from foolscap.furl import (
decode_furl,

View File

@ -1,4 +1,16 @@
# -*- test-case-name: allmydata.test.test_encode_share -*-
"""
CRS encoding and decoding.
Ported to Python 3.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from future.utils import PY2
if PY2:
from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
from zope.interface import implementer
from twisted.internet import defer
@ -9,7 +21,7 @@ import zfec
@implementer(ICodecEncoder)
class CRSEncoder(object):
ENCODER_TYPE = "crs"
ENCODER_TYPE = b"crs"
def set_params(self, data_size, required_shares, max_shares):
assert required_shares <= max_shares
@ -27,8 +39,8 @@ class CRSEncoder(object):
return (self.data_size, self.required_shares, self.max_shares)
def get_serialized_params(self):
return "%d-%d-%d" % (self.data_size, self.required_shares,
self.max_shares)
return b"%d-%d-%d" % (self.data_size, self.required_shares,
self.max_shares)
def get_block_size(self):
return self.share_size
@ -37,7 +49,7 @@ class CRSEncoder(object):
precondition(desired_share_ids is None or len(desired_share_ids) <= self.max_shares, desired_share_ids, self.max_shares)
if desired_share_ids is None:
desired_share_ids = range(self.max_shares)
desired_share_ids = list(range(self.max_shares))
for inshare in inshares:
assert len(inshare) == self.share_size, (len(inshare), self.share_size, self.data_size, self.required_shares)
@ -71,5 +83,5 @@ class CRSDecoder(object):
return defer.succeed(data)
def parse_params(serializedparams):
pieces = serializedparams.split("-")
pieces = serializedparams.split(b"-")
return int(pieces[0]), int(pieces[1]), int(pieces[2])

View File

@ -1,4 +1,6 @@
"""Directory Node implementation."""
from past.builtins import unicode
import time
from zope.interface import implementer
@ -227,7 +229,7 @@ def pack_children(childrenx, writekey, deep_immutable=False):
return _pack_normalized_children(children, writekey=writekey, deep_immutable=deep_immutable)
ZERO_LEN_NETSTR=netstring('')
ZERO_LEN_NETSTR=netstring(b'')
def _pack_normalized_children(children, writekey, deep_immutable=False):
"""Take a dict that maps:
children[unicode_nfc_name] = (IFileSystemNode, metadata_dict)

View File

@ -559,7 +559,7 @@ class ShortReadOnlySFTPFile(PrefixingLogMixin):
"""I represent a file handle to a particular file on an SFTP connection.
I am used only for short immutable files opened in read-only mode.
When I am created, the file contents start to be downloaded to memory.
self.async is used to delay read requests until the download has finished."""
self.async_ is used to delay read requests until the download has finished."""
def __init__(self, userpath, filenode, metadata):
PrefixingLogMixin.__init__(self, facility="tahoe.sftp", prefix=userpath)
@ -569,7 +569,7 @@ class ShortReadOnlySFTPFile(PrefixingLogMixin):
userpath=userpath, filenode=filenode)
self.filenode = filenode
self.metadata = metadata
self.async = download_to_data(filenode)
self.async_ = download_to_data(filenode)
self.closed = False
def readChunk(self, offset, length):
@ -598,7 +598,7 @@ class ShortReadOnlySFTPFile(PrefixingLogMixin):
else:
eventually_callback(d)(data[offset:offset+length]) # truncated if offset+length > len(data)
return data
self.async.addCallbacks(_read, eventually_errback(d))
self.async_.addCallbacks(_read, eventually_errback(d))
d.addBoth(_convert_error, request)
return d
@ -639,7 +639,7 @@ class GeneralSFTPFile(PrefixingLogMixin):
storing the file contents. In order to allow write requests to be satisfied
immediately, there is effectively a FIFO queue between requests made to this
file handle, and requests to my OverwriteableFileConsumer. This queue is
implemented by the callback chain of self.async.
implemented by the callback chain of self.async_.
When first constructed, I am in an 'unopened' state that causes most
operations to be delayed until 'open' is called."""
@ -654,7 +654,7 @@ class GeneralSFTPFile(PrefixingLogMixin):
self.flags = flags
self.close_notify = close_notify
self.convergence = convergence
self.async = defer.Deferred()
self.async_ = defer.Deferred()
# Creating or truncating the file is a change, but if FXF_EXCL is set, a zero-length file has already been created.
self.has_changed = (flags & (FXF_CREAT | FXF_TRUNC)) and not (flags & FXF_EXCL)
self.closed = False
@ -664,7 +664,7 @@ class GeneralSFTPFile(PrefixingLogMixin):
self.filenode = None
self.metadata = None
# self.consumer should only be relied on in callbacks for self.async, since it might
# self.consumer should only be relied on in callbacks for self.async_, since it might
# not be set before then.
self.consumer = None
@ -691,7 +691,7 @@ class GeneralSFTPFile(PrefixingLogMixin):
self.consumer = OverwriteableFileConsumer(0, tempfile_maker)
self.consumer.download_done("download not needed")
else:
self.async.addCallback(lambda ignored: filenode.get_best_readable_version())
self.async_.addCallback(lambda ignored: filenode.get_best_readable_version())
def _read(version):
if noisy: self.log("_read", level=NOISY)
@ -707,9 +707,9 @@ class GeneralSFTPFile(PrefixingLogMixin):
self.consumer.download_done(res)
d.addBoth(_finished)
# It is correct to drop d here.
self.async.addCallback(_read)
self.async_.addCallback(_read)
eventually_callback(self.async)(None)
eventually_callback(self.async_)(None)
if noisy: self.log("open done", level=NOISY)
return self
@ -739,7 +739,7 @@ class GeneralSFTPFile(PrefixingLogMixin):
self.log(".sync()", level=OPERATIONAL)
d = defer.Deferred()
self.async.addBoth(eventually_callback(d))
self.async_.addBoth(eventually_callback(d))
def _done(res):
if noisy: self.log("_done(%r) in .sync()" % (res,), level=NOISY)
return res
@ -765,7 +765,7 @@ class GeneralSFTPFile(PrefixingLogMixin):
d2.addBoth(eventually_callback(d))
# It is correct to drop d2 here.
return None
self.async.addCallbacks(_read, eventually_errback(d))
self.async_.addCallbacks(_read, eventually_errback(d))
d.addBoth(_convert_error, request)
return d
@ -802,8 +802,8 @@ class GeneralSFTPFile(PrefixingLogMixin):
self.consumer.overwrite(write_offset, data)
if noisy: self.log("overwrite done", level=NOISY)
return None
self.async.addCallback(_write)
# don't addErrback to self.async, just allow subsequent async ops to fail.
self.async_.addCallback(_write)
# don't addErrback to self.async_, just allow subsequent async ops to fail.
return defer.succeed(None)
def _do_close(self, res, d=None):
@ -812,7 +812,7 @@ class GeneralSFTPFile(PrefixingLogMixin):
if self.consumer:
status = self.consumer.close()
# We must close_notify before re-firing self.async.
# We must close_notify before re-firing self.async_.
if self.close_notify:
self.close_notify(self.userpath, self.parent, self.childname, self)
@ -841,7 +841,7 @@ class GeneralSFTPFile(PrefixingLogMixin):
# download.) Any reads that depended on file content that could not be downloaded
# will have failed. It is important that we don't close the consumer until
# previous read operations have completed.
self.async.addBoth(self._do_close)
self.async_.addBoth(self._do_close)
return defer.succeed(None)
# We must capture the abandoned, parent, and childname variables synchronously
@ -875,16 +875,16 @@ class GeneralSFTPFile(PrefixingLogMixin):
return d2
# If the file has been abandoned, we don't want the close operation to get "stuck",
# even if self.async fails to re-fire. Completing the close independently of self.async
# even if self.async_ fails to re-fire. Completing the close independently of self.async_
# in that case should ensure that dropping an ssh connection is sufficient to abandon
# any heisenfiles that were not explicitly closed in that connection.
if abandoned or not has_changed:
d = defer.succeed(None)
self.async.addBoth(self._do_close)
self.async_.addBoth(self._do_close)
else:
d = defer.Deferred()
self.async.addCallback(_commit)
self.async.addBoth(self._do_close, d)
self.async_.addCallback(_commit)
self.async_.addBoth(self._do_close, d)
d.addBoth(_convert_error, request)
return d
@ -908,7 +908,7 @@ class GeneralSFTPFile(PrefixingLogMixin):
attrs = _populate_attrs(self.filenode, self.metadata, size=self.consumer.get_current_size())
eventually_callback(d)(attrs)
return None
self.async.addCallbacks(_get, eventually_errback(d))
self.async_.addCallbacks(_get, eventually_errback(d))
d.addBoth(_convert_error, request)
return d
@ -946,7 +946,7 @@ class GeneralSFTPFile(PrefixingLogMixin):
self.consumer.set_current_size(size)
eventually_callback(d)(None)
return None
self.async.addCallbacks(_set, eventually_errback(d))
self.async_.addCallbacks(_set, eventually_errback(d))
d.addBoth(_convert_error, request)
return d

View File

@ -4,7 +4,7 @@ from foolscap.api import eventually
from allmydata.interfaces import NotEnoughSharesError, NoSharesError
from allmydata.util import log
from allmydata.util.dictutil import DictOfSets
from common import OVERDUE, COMPLETE, CORRUPT, DEAD, BADSEGNUM, \
from .common import OVERDUE, COMPLETE, CORRUPT, DEAD, BADSEGNUM, \
BadSegmentNumberError
class SegmentFetcher(object):

View File

@ -5,7 +5,7 @@ from foolscap.api import eventually
from allmydata.util import base32, log
from twisted.internet import reactor
from share import Share, CommonShare
from .share import Share, CommonShare
def incidentally(res, f, *args, **kwargs):
"""Add me to a Deferred chain like this:

View File

@ -13,10 +13,10 @@ from allmydata.hashtree import IncompleteHashTree, BadHashError, \
NotEnoughHashesError
# local imports
from finder import ShareFinder
from fetcher import SegmentFetcher
from segmentation import Segmentation
from common import BadCiphertextHashError
from .finder import ShareFinder
from .fetcher import SegmentFetcher
from .segmentation import Segmentation
from .common import BadCiphertextHashError
class IDownloadStatusHandlingConsumer(Interface):
def set_download_status_read_event(read_ev):

View File

@ -9,7 +9,7 @@ from allmydata.util import log
from allmydata.util.spans import overlap
from allmydata.interfaces import DownloadStopped
from common import BadSegmentNumberError, WrongSegmentError
from .common import BadSegmentNumberError, WrongSegmentError
@implementer(IPushProducer)
class Segmentation(object):

View File

@ -13,7 +13,7 @@ from allmydata.hashtree import IncompleteHashTree, BadHashError, \
from allmydata.immutable.layout import make_write_bucket_proxy
from allmydata.util.observer import EventStreamObserver
from common import COMPLETE, CORRUPT, DEAD, BADSEGNUM
from .common import COMPLETE, CORRUPT, DEAD, BADSEGNUM
class LayoutInvalid(Exception):

View File

@ -171,7 +171,7 @@ class WriteBucketProxy(object):
def put_block(self, segmentnum, data):
offset = self._offsets['data'] + segmentnum * self._block_size
assert offset + len(data) <= self._offsets['uri_extension']
assert isinstance(data, str)
assert isinstance(data, bytes)
if segmentnum < self._num_segments-1:
precondition(len(data) == self._block_size,
len(data), self._block_size)
@ -185,7 +185,7 @@ class WriteBucketProxy(object):
def put_crypttext_hashes(self, hashes):
offset = self._offsets['crypttext_hash_tree']
assert isinstance(hashes, list)
data = "".join(hashes)
data = b"".join(hashes)
precondition(len(data) == self._segment_hash_size,
len(data), self._segment_hash_size)
precondition(offset + len(data) <= self._offsets['block_hashes'],
@ -196,7 +196,7 @@ class WriteBucketProxy(object):
def put_block_hashes(self, blockhashes):
offset = self._offsets['block_hashes']
assert isinstance(blockhashes, list)
data = "".join(blockhashes)
data = b"".join(blockhashes)
precondition(len(data) == self._segment_hash_size,
len(data), self._segment_hash_size)
precondition(offset + len(data) <= self._offsets['share_hashes'],
@ -209,7 +209,7 @@ class WriteBucketProxy(object):
# as 2+32=34 bytes each
offset = self._offsets['share_hashes']
assert isinstance(sharehashes, list)
data = "".join([struct.pack(">H", hashnum) + hashvalue
data = b"".join([struct.pack(">H", hashnum) + hashvalue
for hashnum,hashvalue in sharehashes])
precondition(len(data) == self._share_hashtree_size,
len(data), self._share_hashtree_size)
@ -220,7 +220,7 @@ class WriteBucketProxy(object):
def put_uri_extension(self, data):
offset = self._offsets['uri_extension']
assert isinstance(data, str)
assert isinstance(data, bytes)
precondition(len(data) <= self._uri_extension_size_max,
len(data), self._uri_extension_size_max)
length = struct.pack(self.fieldstruct, len(data))

View File

@ -1,3 +1,5 @@
from past.builtins import long
import os, time, weakref, itertools
from zope.interface import implementer
from twisted.python import failure
@ -26,7 +28,7 @@ from allmydata.interfaces import IUploadable, IUploader, IUploadResults, \
from allmydata.immutable import layout
from six.moves import cStringIO as StringIO
from happiness_upload import share_placement, calculate_happiness
from .happiness_upload import share_placement, calculate_happiness
from ..util.eliotutil import (
log_call_deferred,

View File

@ -1,3 +1,19 @@
"""
Interfaces for Tahoe-LAFS.
Ported to Python 3.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from future.utils import PY2
if PY2:
# Don't import object/str/dict/etc. types, so we don't break any
# interfaces. Not importing open() because it triggers bogus flake8 error.
from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, pow, round, super, range, max, min # noqa: F401
from past.builtins import long
from zope.interface import Interface, Attribute
@ -58,7 +74,7 @@ class RIBucketReader(RemoteInterface):
def read(offset=Offset, length=ReadSize):
return ShareData
def advise_corrupt_share(reason=str):
def advise_corrupt_share(reason=bytes):
"""Clients who discover hash failures in shares that they have
downloaded from me will use this method to inform me about the
failures. I will record their concern so that my operator can
@ -71,7 +87,7 @@ class RIBucketReader(RemoteInterface):
"""
TestVector = ListOf(TupleOf(Offset, ReadSize, str, str))
TestVector = ListOf(TupleOf(Offset, ReadSize, bytes, bytes))
# elements are (offset, length, operator, specimen)
# operator is one of "lt, le, eq, ne, ge, gt"
# nop always passes and is used to fetch data while writing.
@ -89,13 +105,13 @@ ReadData = ListOf(ShareData)
class RIStorageServer(RemoteInterface):
__remote_name__ = "RIStorageServer.tahoe.allmydata.com"
__remote_name__ = b"RIStorageServer.tahoe.allmydata.com"
def get_version():
"""
Return a dictionary of version information.
"""
return DictOf(str, Any())
return DictOf(bytes, Any())
def allocate_buckets(storage_index=StorageIndex,
renew_secret=LeaseRenewSecret,
@ -277,8 +293,8 @@ class RIStorageServer(RemoteInterface):
"""
return TupleOf(bool, DictOf(int, ReadData))
def advise_corrupt_share(share_type=str, storage_index=StorageIndex,
shnum=int, reason=str):
def advise_corrupt_share(share_type=bytes, storage_index=StorageIndex,
shnum=int, reason=bytes):
"""Clients who discover hash failures in shares that they have
downloaded from me will use this method to inform me about the
failures. I will record their concern so that my operator can
@ -2859,7 +2875,7 @@ UploadResults = Any() #DictOf(str, str)
class RIEncryptedUploadable(RemoteInterface):
__remote_name__ = "RIEncryptedUploadable.tahoe.allmydata.com"
__remote_name__ = b"RIEncryptedUploadable.tahoe.allmydata.com"
def get_size():
return Offset
@ -2875,7 +2891,7 @@ class RIEncryptedUploadable(RemoteInterface):
class RICHKUploadHelper(RemoteInterface):
__remote_name__ = "RIUploadHelper.tahoe.allmydata.com"
__remote_name__ = b"RIUploadHelper.tahoe.allmydata.com"
def get_version():
"""
@ -2888,7 +2904,7 @@ class RICHKUploadHelper(RemoteInterface):
class RIHelper(RemoteInterface):
__remote_name__ = "RIHelper.tahoe.allmydata.com"
__remote_name__ = b"RIHelper.tahoe.allmydata.com"
def get_version():
"""
@ -2915,7 +2931,7 @@ class RIHelper(RemoteInterface):
class RIStatsProvider(RemoteInterface):
__remote_name__ = "RIStatsProvider.tahoe.allmydata.com"
__remote_name__ = b"RIStatsProvider.tahoe.allmydata.com"
"""
Provides access to statistics and monitoring information.
"""
@ -2932,7 +2948,7 @@ class RIStatsProvider(RemoteInterface):
class RIStatsGatherer(RemoteInterface):
__remote_name__ = "RIStatsGatherer.tahoe.allmydata.com"
__remote_name__ = b"RIStatsGatherer.tahoe.allmydata.com"
"""
Provides a monitoring service for centralised collection of stats
"""

View File

@ -1,7 +1,21 @@
"""
Manage status of long-running operations.
Ported to Python 3.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from future.utils import PY2
if PY2:
from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
from zope.interface import Interface, implementer
from allmydata.util import observer
class IMonitor(Interface):
"""I manage status, progress, and cancellation for long-running operations.

View File

@ -1,3 +1,4 @@
from past.utils import old_div
import struct
from allmydata.mutable.common import NeedMoreDataError, UnknownVersionError, \
@ -180,11 +181,11 @@ def pack_offsets(verification_key_length, signature_length,
def pack_share(prefix, verification_key, signature,
share_hash_chain, block_hash_tree,
share_data, encprivkey):
share_hash_chain_s = "".join([struct.pack(">H32s", i, share_hash_chain[i])
for i in sorted(share_hash_chain.keys())])
share_hash_chain_s = b"".join([struct.pack(">H32s", i, share_hash_chain[i])
for i in sorted(share_hash_chain.keys())])
for h in block_hash_tree:
assert len(h) == 32
block_hash_tree_s = "".join(block_hash_tree)
block_hash_tree_s = b"".join(block_hash_tree)
offsets = pack_offsets(len(verification_key),
len(signature),
@ -192,14 +193,14 @@ def pack_share(prefix, verification_key, signature,
len(block_hash_tree_s),
len(share_data),
len(encprivkey))
final_share = "".join([prefix,
offsets,
verification_key,
signature,
share_hash_chain_s,
block_hash_tree_s,
share_data,
encprivkey])
final_share = b"".join([prefix,
offsets,
verification_key,
signature,
share_hash_chain_s,
block_hash_tree_s,
share_data,
encprivkey])
return final_share
def pack_prefix(seqnum, root_hash, IV,
@ -255,7 +256,7 @@ class SDMFSlotWriteProxy(object):
self._required_shares)
assert expected_segment_size == segment_size
self._block_size = self._segment_size / self._required_shares
self._block_size = old_div(self._segment_size, self._required_shares)
# This is meant to mimic how SDMF files were built before MDMF
# entered the picture: we generate each share in its entirety,
@ -296,7 +297,7 @@ class SDMFSlotWriteProxy(object):
salt)
else:
checkstring = checkstring_or_seqnum
self._testvs = [(0, len(checkstring), "eq", checkstring)]
self._testvs = [(0, len(checkstring), b"eq", checkstring)]
def get_checkstring(self):
@ -306,7 +307,7 @@ class SDMFSlotWriteProxy(object):
"""
if self._testvs:
return self._testvs[0][3]
return ""
return b""
def put_block(self, data, segnum, salt):
@ -343,7 +344,7 @@ class SDMFSlotWriteProxy(object):
assert len(h) == HASH_SIZE
# serialize the blockhashes, then set them.
blockhashes_s = "".join(blockhashes)
blockhashes_s = b"".join(blockhashes)
self._share_pieces['block_hash_tree'] = blockhashes_s
return defer.succeed(None)
@ -354,12 +355,12 @@ class SDMFSlotWriteProxy(object):
Add the share hash chain to the share.
"""
assert isinstance(sharehashes, dict)
for h in sharehashes.itervalues():
for h in sharehashes.values():
assert len(h) == HASH_SIZE
# serialize the sharehashes, then set them.
sharehashes_s = "".join([struct.pack(">H32s", i, sharehashes[i])
for i in sorted(sharehashes.keys())])
sharehashes_s = b"".join([struct.pack(">H32s", i, sharehashes[i])
for i in sorted(sharehashes.keys())])
self._share_pieces['share_hash_chain'] = sharehashes_s
return defer.succeed(None)
@ -383,7 +384,7 @@ class SDMFSlotWriteProxy(object):
assert len(salt) == SALT_SIZE
self._share_pieces['salt'] = salt
self._share_pieces['sharedata'] = ""
self._share_pieces['sharedata'] = b""
def get_signable(self):
@ -519,14 +520,14 @@ class SDMFSlotWriteProxy(object):
# to the remote server in one write.
offsets = self._pack_offsets()
prefix = self.get_signable()
final_share = "".join([prefix,
offsets,
self._share_pieces['verification_key'],
self._share_pieces['signature'],
self._share_pieces['share_hash_chain'],
self._share_pieces['block_hash_tree'],
self._share_pieces['sharedata'],
self._share_pieces['encprivkey']])
final_share = b"".join([prefix,
offsets,
self._share_pieces['verification_key'],
self._share_pieces['signature'],
self._share_pieces['share_hash_chain'],
self._share_pieces['block_hash_tree'],
self._share_pieces['sharedata'],
self._share_pieces['encprivkey']])
# Our only data vector is going to be writing the final share,
# in its entirely.
@ -537,7 +538,7 @@ class SDMFSlotWriteProxy(object):
# yet, so we assume that we are writing a new share, and set
# a test vector that will allow a new share to be written.
self._testvs = []
self._testvs.append(tuple([0, 1, "eq", ""]))
self._testvs.append(tuple([0, 1, b"eq", b""]))
tw_vectors = {}
tw_vectors[self.shnum] = (self._testvs, datavs, None)
@ -788,7 +789,7 @@ class MDMFSlotWriteProxy(object):
# and also because it provides a useful amount of bounds checking.
self._num_segments = mathutil.div_ceil(self._data_length,
self._segment_size)
self._block_size = self._segment_size / self._required_shares
self._block_size = old_div(self._segment_size, self._required_shares)
# We also calculate the share size, to help us with block
# constraints later.
tail_size = self._data_length % self._segment_size
@ -797,7 +798,7 @@ class MDMFSlotWriteProxy(object):
else:
self._tail_block_size = mathutil.next_multiple(tail_size,
self._required_shares)
self._tail_block_size /= self._required_shares
self._tail_block_size = old_div(self._tail_block_size, self._required_shares)
# We already know where the sharedata starts; right after the end
# of the header (which is defined as the signable part + the offsets)
@ -868,7 +869,7 @@ class MDMFSlotWriteProxy(object):
else:
checkstring = seqnum_or_checkstring
if checkstring == "":
if checkstring == b"":
# We special-case this, since len("") = 0, but we need
# length of 1 for the case of an empty share to work on the
# storage server, which is what a checkstring that is the
@ -876,7 +877,7 @@ class MDMFSlotWriteProxy(object):
self._testvs = []
else:
self._testvs = []
self._testvs.append((0, len(checkstring), "eq", checkstring))
self._testvs.append((0, len(checkstring), b"eq", checkstring))
def __repr__(self):
@ -893,7 +894,7 @@ class MDMFSlotWriteProxy(object):
if self._root_hash:
roothash = self._root_hash
else:
roothash = "\x00" * 32
roothash = b"\x00" * 32
return struct.pack(MDMFCHECKSTRING,
1,
self._seqnum,
@ -964,7 +965,7 @@ class MDMFSlotWriteProxy(object):
assert isinstance(blockhashes, list)
blockhashes_s = "".join(blockhashes)
blockhashes_s = b"".join(blockhashes)
self._offsets['EOF'] = self._offsets['block_hash_tree'] + len(blockhashes_s)
self._writevs.append(tuple([self._offsets['block_hash_tree'],
@ -998,7 +999,7 @@ class MDMFSlotWriteProxy(object):
if "verification_key" in self._offsets:
raise LayoutInvalid("You must write the share hash chain "
"before you write the signature")
sharehashes_s = "".join([struct.pack(">H32s", i, sharehashes[i])
sharehashes_s = b"".join([struct.pack(">H32s", i, sharehashes[i])
for i in sorted(sharehashes.keys())])
self._offsets['signature'] = self._offsets['share_hash_chain'] + \
len(sharehashes_s)
@ -1149,7 +1150,7 @@ class MDMFSlotWriteProxy(object):
tw_vectors = {}
if not self._testvs:
self._testvs = []
self._testvs.append(tuple([0, 1, "eq", ""]))
self._testvs.append(tuple([0, 1, b"eq", b""]))
if not self._written:
# Write a new checkstring to the share when we write it, so
# that we have something to check later.
@ -1157,7 +1158,7 @@ class MDMFSlotWriteProxy(object):
datavs.append((0, new_checkstring))
def _first_write():
self._written = True
self._testvs = [(0, len(new_checkstring), "eq", new_checkstring)]
self._testvs = [(0, len(new_checkstring), b"eq", new_checkstring)]
on_success = _first_write
tw_vectors[self.shnum] = (self._testvs, datavs, None)
d = self._storage_server.slot_testv_and_readv_and_writev(
@ -1194,7 +1195,7 @@ class MDMFSlotReadProxy(object):
storage_server,
storage_index,
shnum,
data="",
data=b"",
data_is_everything=False):
# Start the initialization process.
self._storage_server = storage_server
@ -1238,7 +1239,7 @@ class MDMFSlotReadProxy(object):
# None if there isn't any cached data, but the way we index the
# cached data requires a string, so convert None to "".
if self._data == None:
self._data = ""
self._data = b""
def _maybe_fetch_offsets_and_header(self, force_remote=False):
@ -1317,7 +1318,7 @@ class MDMFSlotReadProxy(object):
self._segment_size = segsize
self._data_length = datalen
self._block_size = self._segment_size / self._required_shares
self._block_size = old_div(self._segment_size, self._required_shares)
# We can upload empty files, and need to account for this fact
# so as to avoid zero-division and zero-modulo errors.
if datalen > 0:
@ -1329,7 +1330,7 @@ class MDMFSlotReadProxy(object):
else:
self._tail_block_size = mathutil.next_multiple(tail_size,
self._required_shares)
self._tail_block_size /= self._required_shares
self._tail_block_size = old_div(self._tail_block_size, self._required_shares)
return encoding_parameters
@ -1416,7 +1417,7 @@ class MDMFSlotReadProxy(object):
# when we fetched the header
data = results[self.shnum]
if not data:
data = ""
data = b""
else:
if len(data) != 1:
raise BadShareError("got %d vectors, not 1" % len(data))
@ -1425,7 +1426,7 @@ class MDMFSlotReadProxy(object):
else:
data = results[self.shnum]
if not data:
salt = data = ""
salt = data = b""
else:
salt_and_data = results[self.shnum][0]
salt = salt_and_data[:SALT_SIZE]
@ -1743,7 +1744,7 @@ class MDMFSlotReadProxy(object):
def _read(self, readvs, force_remote=False):
unsatisfiable = filter(lambda x: x[0] + x[1] > len(self._data), readvs)
unsatisfiable = list(filter(lambda x: x[0] + x[1] > len(self._data), readvs))
# TODO: It's entirely possible to tweak this so that it just
# fulfills the requests that it can, and not demand that all
# requests are satisfiable before running it.

View File

@ -2,12 +2,14 @@
This module contains classes and functions to implement and manage
a node for Tahoe-LAFS.
"""
from past.builtins import unicode
import datetime
import os.path
import re
import types
import errno
import ConfigParser
from six.moves import configparser
import tempfile
from io import BytesIO
from base64 import b32decode, b32encode
@ -67,7 +69,7 @@ def _common_valid_config():
# Add our application versions to the data that Foolscap's LogPublisher
# reports.
for thing, things_version in get_package_versions().iteritems():
for thing, things_version in get_package_versions().items():
app_versions.add_version(thing, str(things_version))
# group 1 will be addr (dotted quad string), group 3 if any will be portnum (string)
@ -180,7 +182,7 @@ def read_config(basedir, portnumfile, generated_files=[], _valid_config=None):
# (try to) read the main config file
config_fname = os.path.join(basedir, "tahoe.cfg")
parser = ConfigParser.SafeConfigParser()
parser = configparser.SafeConfigParser()
try:
parser = configutil.get_config(config_fname)
except EnvironmentError as e:
@ -203,7 +205,7 @@ def config_from_string(basedir, portnumfile, config_str, _valid_config=None):
_valid_config = _common_valid_config()
# load configuration from in-memory string
parser = ConfigParser.SafeConfigParser()
parser = configparser.SafeConfigParser()
parser.readfp(BytesIO(config_str))
fname = "<in-memory>"
@ -272,7 +274,10 @@ class _Config(object):
self.config = configparser
nickname_utf8 = self.get_config("node", "nickname", "<unspecified>")
self.nickname = nickname_utf8.decode("utf-8")
if isinstance(nickname_utf8, bytes): # Python 2
self.nickname = nickname_utf8.decode("utf-8")
else:
self.nickname = nickname_utf8
assert type(self.nickname) is unicode
def validate(self, valid_config_sections):
@ -295,7 +300,7 @@ class _Config(object):
def items(self, section, default=_None):
try:
return self.config.items(section)
except ConfigParser.NoSectionError:
except configparser.NoSectionError:
if default is _None:
raise
return default
@ -310,7 +315,7 @@ class _Config(object):
raise UnescapedHashError(section, option, item)
return item
except (ConfigParser.NoOptionError, ConfigParser.NoSectionError):
except (configparser.NoOptionError, configparser.NoSectionError):
if default is _None:
raise MissingConfigEntry(
"{} is missing the [{}]{} entry".format(

View File

@ -2,7 +2,7 @@ from __future__ import print_function
import os, sys, urllib, textwrap
import codecs
from ConfigParser import NoSectionError
from six.moves.configparser import NoSectionError
from os.path import join
from twisted.python import usage
from allmydata.util.assertutil import precondition

View File

@ -2,7 +2,8 @@ from __future__ import print_function
import os
from six.moves import cStringIO as StringIO
import urlparse, httplib
from six.moves import urllib, http_client
import six
import allmydata # for __full_version__
from allmydata.util.encodingutil import quote_output
@ -12,9 +13,9 @@ from socket import error as socket_error
# copied from twisted/web/client.py
def parse_url(url, defaultPort=None):
url = url.strip()
parsed = urlparse.urlparse(url)
parsed = urllib.parse.urlparse(url)
scheme = parsed[0]
path = urlparse.urlunparse(('','')+parsed[2:])
path = urllib.parse.urlunparse(('','')+parsed[2:])
if defaultPort is None:
if scheme == 'https':
defaultPort = 443
@ -40,7 +41,7 @@ class BadResponse(object):
def do_http(method, url, body=""):
if isinstance(body, str):
body = StringIO(body)
elif isinstance(body, unicode):
elif isinstance(body, six.text_type):
raise TypeError("do_http body must be a bytestring, not unicode")
else:
# We must give a Content-Length header to twisted.web, otherwise it
@ -51,9 +52,9 @@ def do_http(method, url, body=""):
assert body.read
scheme, host, port, path = parse_url(url)
if scheme == "http":
c = httplib.HTTPConnection(host, port)
c = http_client.HTTPConnection(host, port)
elif scheme == "https":
c = httplib.HTTPSConnection(host, port)
c = http_client.HTTPSConnection(host, port)
else:
raise ValueError("unknown scheme '%s', need http or https" % scheme)
c.putrequest(method, path)

View File

@ -1,17 +1,19 @@
import sys
import six
from allmydata.util.assertutil import precondition
from allmydata.util.fileutil import abspath_expanduser_unicode
_default_nodedir = None
if sys.platform == 'win32':
from allmydata.windows import registry
path = registry.get_base_dir_path()
if path:
precondition(isinstance(path, unicode), path)
precondition(isinstance(path, six.text_type), path)
_default_nodedir = abspath_expanduser_unicode(path)
if _default_nodedir is None:
path = abspath_expanduser_unicode(u"~/.tahoe")
precondition(isinstance(path, unicode), path)
precondition(isinstance(path, six.text_type), path)
_default_nodedir = path

View File

@ -2,6 +2,7 @@ from __future__ import print_function
import os, sys
from six.moves import StringIO
import six
from twisted.python import usage
from twisted.internet import defer, task, threads
@ -71,8 +72,8 @@ class Options(usage.Options):
]
optParameters = [
["node-directory", "d", None, NODEDIR_HELP],
["wormhole-server", None, u"ws://wormhole.tahoe-lafs.org:4000/v1", "The magic wormhole server to use.", unicode],
["wormhole-invite-appid", None, u"tahoe-lafs.org/invite", "The appid to use on the wormhole server.", unicode],
["wormhole-server", None, u"ws://wormhole.tahoe-lafs.org:4000/v1", "The magic wormhole server to use.", six.text_type],
["wormhole-invite-appid", None, u"tahoe-lafs.org/invite", "The appid to use on the wormhole server.", six.text_type],
]
def opt_version(self):
@ -180,7 +181,9 @@ def _maybe_enable_eliot_logging(options, reactor):
return options
def run():
assert sys.version_info < (3,), u"Tahoe-LAFS does not run under Python 3. Please use Python 2.7.x."
# TODO(3035): Remove tox-check when error becomes a warning
if 'TOX_ENV_NAME' not in os.environ:
assert sys.version_info < (3,), u"Tahoe-LAFS does not run under Python 3. Please use Python 2.7.x."
if sys.platform == "win32":
from allmydata.windows.fixups import initialize

View File

@ -1,3 +1,5 @@
from future.utils import bytes_to_native_str
import os, stat, struct, time
from foolscap.api import Referenceable
@ -85,7 +87,7 @@ class ShareFile(object):
seekpos = self._data_offset+offset
actuallength = max(0, min(length, self._lease_offset-seekpos))
if actuallength == 0:
return ""
return b""
with open(self.home, 'rb') as f:
f.seek(seekpos)
return f.read(actuallength)
@ -298,7 +300,9 @@ class BucketReader(Referenceable):
def __repr__(self):
return "<%s %s %s>" % (self.__class__.__name__,
base32.b2a(self.storage_index[:8])[:12],
bytes_to_native_str(
base32.b2a(self.storage_index[:8])[:12]
),
self.shnum)
def remote_read(self, offset, length):
@ -309,7 +313,7 @@ class BucketReader(Referenceable):
return data
def remote_advise_corrupt_share(self, reason):
return self.ss.remote_advise_corrupt_share("immutable",
return self.ss.remote_advise_corrupt_share(b"immutable",
self.storage_index,
self.shnum,
reason)

View File

@ -113,7 +113,7 @@ class MutableShareFile(object):
# start beyond the end of the data return an empty string.
length = max(0, data_length-offset)
if length == 0:
return ""
return b""
precondition(offset+length <= data_length)
f.seek(self.DATA_OFFSET+offset)
data = f.read(length)
@ -421,18 +421,18 @@ class MutableShareFile(object):
# self._change_container_size() here.
def testv_compare(a, op, b):
assert op in ("lt", "le", "eq", "ne", "ge", "gt")
if op == "lt":
assert op in (b"lt", b"le", b"eq", b"ne", b"ge", b"gt")
if op == b"lt":
return a < b
if op == "le":
if op == b"le":
return a <= b
if op == "eq":
if op == b"eq":
return a == b
if op == "ne":
if op == b"ne":
return a != b
if op == "ge":
if op == b"ge":
return a >= b
if op == "gt":
if op == b"gt":
return a > b
# never reached
@ -441,7 +441,7 @@ class EmptyShare(object):
def check_testv(self, testv):
test_good = True
for (offset, length, operator, specimen) in testv:
data = ""
data = b""
if not testv_compare(data, operator, specimen):
test_good = False
break

View File

@ -1,3 +1,4 @@
from future.utils import bytes_to_native_str
import os, re, struct, time
import weakref
import six
@ -51,6 +52,7 @@ class StorageServer(service.MultiService, Referenceable):
service.MultiService.__init__(self)
assert isinstance(nodeid, bytes)
assert len(nodeid) == 20
assert isinstance(nodeid, bytes)
self.my_nodeid = nodeid
self.storedir = storedir
sharedir = os.path.join(storedir, "shares")
@ -398,7 +400,7 @@ class StorageServer(service.MultiService, Referenceable):
# since all shares get the same lease data, we just grab the leases
# from the first share
try:
shnum, filename = self._get_bucket_shares(storage_index).next()
shnum, filename = next(self._get_bucket_shares(storage_index))
sf = ShareFile(filename)
return sf.get_leases()
except StopIteration:
@ -676,6 +678,10 @@ class StorageServer(service.MultiService, Referenceable):
def remote_advise_corrupt_share(self, share_type, storage_index, shnum,
reason):
# This is a remote API, I believe, so this has to be bytes for legacy
# protocol backwards compatibility reasons.
assert isinstance(share_type, bytes)
assert isinstance(reason, bytes)
fileutil.make_dirs(self.corruption_advisory_dir)
now = time_format.iso_utc(sep="T")
si_s = si_b2a(storage_index)
@ -684,11 +690,11 @@ class StorageServer(service.MultiService, Referenceable):
"%s--%s-%d" % (now, si_s, shnum)).replace(":","")
with open(fn, "w") as f:
f.write("report: Share Corruption\n")
f.write("type: %s\n" % share_type)
f.write("storage_index: %s\n" % si_s)
f.write("type: %s\n" % bytes_to_native_str(share_type))
f.write("storage_index: %s\n" % bytes_to_native_str(si_s))
f.write("share_number: %d\n" % shnum)
f.write("\n")
f.write(reason)
f.write(bytes_to_native_str(reason))
f.write("\n")
log.msg(format=("client claims corruption in (%(share_type)s) " +
"%(si)s-%(shnum)d: %(reason)s"),

View File

@ -30,9 +30,12 @@ the foolscap-based server implemented in src/allmydata/storage/*.py .
import re, time, hashlib
from ConfigParser import (
NoSectionError,
)
try:
from ConfigParser import (
NoSectionError,
)
except ImportError:
from configparser import NoSectionError
import attr
from zope.interface import (
Attribute,
@ -534,11 +537,11 @@ class _NullStorage(object):
which we can't communicate.
"""
nickname = ""
permutation_seed = hashlib.sha256("").digest()
tubid = hashlib.sha256("").digest()
permutation_seed = hashlib.sha256(b"").digest()
tubid = hashlib.sha256(b"").digest()
storage_server = None
lease_seed = hashlib.sha256("").digest()
lease_seed = hashlib.sha256(b"").digest()
name = "<unsupported>"
longname = "<storage with unsupported protocol>"

View File

@ -809,7 +809,7 @@ class Errors(GridTestMixin, CLITestMixin, unittest.TestCase):
# Simulate a connection error
def _socket_error(*args, **kwargs):
raise socket_error('test error')
self.patch(allmydata.scripts.common_http.httplib.HTTPConnection,
self.patch(allmydata.scripts.common_http.http_client.HTTPConnection,
"endheaders", _socket_error)
d = self.do_cli("mkdir")

View File

@ -52,7 +52,6 @@ from twisted.internet.defer import inlineCallbacks, returnValue
from twisted.internet.interfaces import IPullProducer
from twisted.python import failure
from twisted.python.filepath import FilePath
from twisted.application import service
from twisted.web.error import Error as WebError
from twisted.internet.interfaces import (
IStreamServerEndpointStringParser,
@ -88,6 +87,8 @@ from ..crypto import (
from .eliotutil import (
EliotLoggedRunTest,
)
# Backwards compatibility imports:
from .common_py3 import LoggingServiceParent, ShouldFailMixin # noqa: F401
TEST_RSA_KEY_SIZE = 522
@ -780,53 +781,8 @@ def create_mutable_filenode(contents, mdmf=False, all_contents=None):
return filenode
class LoggingServiceParent(service.MultiService):
def log(self, *args, **kwargs):
return log.msg(*args, **kwargs)
TEST_DATA="\x02"*(Uploader.URI_LIT_SIZE_THRESHOLD+1)
class ShouldFailMixin(object):
def shouldFail(self, expected_failure, which, substring,
callable, *args, **kwargs):
"""Assert that a function call raises some exception. This is a
Deferred-friendly version of TestCase.assertRaises() .
Suppose you want to verify the following function:
def broken(a, b, c):
if a < 0:
raise TypeError('a must not be negative')
return defer.succeed(b+c)
You can use:
d = self.shouldFail(TypeError, 'test name',
'a must not be negative',
broken, -4, 5, c=12)
in your test method. The 'test name' string will be included in the
error message, if any, because Deferred chains frequently make it
difficult to tell which assertion was tripped.
The substring= argument, if not None, must appear in the 'repr'
of the message wrapped by this Failure, or the test will fail.
"""
assert substring is None or isinstance(substring, str)
d = defer.maybeDeferred(callable, *args, **kwargs)
def done(res):
if isinstance(res, failure.Failure):
res.trap(expected_failure)
if substring:
message = repr(res.value.args[0])
self.failUnless(substring in message,
"%s: substring '%s' not in '%s'"
% (which, substring, message))
else:
self.fail("%s was supposed to raise %s, not get '%s'" %
(which, expected_failure, res))
d.addBoth(done)
return d
class WebErrorMixin(object):
def explain_web_error(self, f):

View File

@ -19,11 +19,13 @@ import time
import signal
from twisted.internet import defer, reactor
from twisted.application import service
from twisted.python import failure
from twisted.trial import unittest
from ..util.assertutil import precondition
from ..util.encodingutil import unicode_platform, get_filesystem_encoding
from ..util import log
class TimezoneMixin(object):
@ -77,6 +79,28 @@ class ShouldFailMixin(object):
def shouldFail(self, expected_failure, which, substring,
callable, *args, **kwargs):
"""Assert that a function call raises some exception. This is a
Deferred-friendly version of TestCase.assertRaises() .
Suppose you want to verify the following function:
def broken(a, b, c):
if a < 0:
raise TypeError('a must not be negative')
return defer.succeed(b+c)
You can use:
d = self.shouldFail(TypeError, 'test name',
'a must not be negative',
broken, -4, 5, c=12)
in your test method. The 'test name' string will be included in the
error message, if any, because Deferred chains frequently make it
difficult to tell which assertion was tripped.
The substring= argument, if not None, must appear in the 'repr'
of the message wrapped by this Failure, or the test will fail.
"""
assert substring is None or isinstance(substring, (bytes, unicode))
d = defer.maybeDeferred(callable, *args, **kwargs)
def done(res):
@ -135,3 +159,9 @@ class FakeCanary(object):
if self.ignore:
return
del self.disconnectors[marker]
class LoggingServiceParent(service.MultiService):
def log(self, *args, **kwargs):
return log.msg(*args, **kwargs)

View File

@ -1,64 +1,7 @@
import re
import treq
from twisted.internet import defer
from twisted.web.error import Error
from nevow.testutil import FakeRequest
from nevow import inevow, context
class WebRenderingMixin(object):
# d=page.renderString() or s=page.renderSynchronously() will exercise
# docFactory, render_*/data_* . It won't exercise want_json(), or my
# renderHTTP() override which tests want_json(). To exercise args=, we
# must build a context. Pages which use a return_to= argument need a
# context.
# d=page.renderHTTP(ctx) will exercise my renderHTTP, want_json, and
# docFactory/render_*/data_*, but it requires building a context. Since
# we're already building a context, it is easy to exercise args= .
# so, use at least two d=page.renderHTTP(ctx) per page (one for json, one
# for html), then use lots of simple s=page.renderSynchronously() to
# exercise the fine details (the ones that don't require args=).
def make_context(self, req):
ctx = context.RequestContext(tag=req)
ctx.remember(req, inevow.IRequest)
ctx.remember(None, inevow.IData)
ctx = context.WovenContext(parent=ctx, precompile=False)
return ctx
def render1(self, page, **kwargs):
# use this to exercise an overridden renderHTTP, usually for
# output=json or render_GET. It always returns a Deferred.
req = FakeRequest(**kwargs)
req.fields = None
ctx = self.make_context(req)
d = defer.maybeDeferred(page.renderHTTP, ctx)
def _done(res):
if isinstance(res, str):
return res + req.v
return req.v
d.addCallback(_done)
return d
def render2(self, page, **kwargs):
# use this to exercise the normal Nevow docFactory rendering. It
# returns a string. If one of the render_* methods returns a
# Deferred, this will throw an exception. (note that
# page.renderString is the Deferred-returning equivalent)
req = FakeRequest(**kwargs)
req.fields = None
ctx = self.make_context(req)
return page.renderSynchronously(ctx)
def failUnlessIn(self, substring, s):
self.failUnless(substring in s, s)
def remove_tags(self, s):
s = re.sub(r'<[^>]*>', ' ', s)
s = re.sub(r'\s+', ' ', s)
return s
@defer.inlineCallbacks
def do_http(method, url, **kwargs):

View File

@ -2,6 +2,8 @@
Tools aimed at the interaction between tests and Eliot.
"""
from past.builtins import unicode
__all__ = [
"RUN_TEST",
"EliotLoggedRunTest",

View File

@ -1,8 +1,21 @@
"""
Tests for allmydata.codec.
Ported to Python 3.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from future.utils import PY2
if PY2:
from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
import os
from twisted.trial import unittest
from twisted.python import log
from allmydata.codec import CRSEncoder, CRSDecoder
from allmydata.codec import CRSEncoder, CRSDecoder, parse_params
import random
from allmydata.util import mathutil
@ -13,6 +26,8 @@ class T(unittest.TestCase):
enc.set_params(size, required_shares, max_shares)
params = enc.get_params()
assert params == (size, required_shares, max_shares)
serialized_params = enc.get_serialized_params()
self.assertEqual(parse_params(serialized_params), params)
log.msg("params: %s" % (params,))
d = enc.encode(data0s)
def _done_encoding_all(shares_and_shareids):
@ -23,7 +38,7 @@ class T(unittest.TestCase):
d.addCallback(_done_encoding_all)
if fewer_shares is not None:
# also validate that the desired_shareids= parameter works
desired_shareids = random.sample(range(max_shares), fewer_shares)
desired_shareids = random.sample(list(range(max_shares)), fewer_shares)
d.addCallback(lambda res: enc.encode(data0s, desired_shareids))
def _check_fewer_shares(some_shares_and_their_shareids):
(some_shares, their_shareids) = some_shares_and_their_shareids
@ -38,11 +53,11 @@ class T(unittest.TestCase):
return d1
def _check_data(decoded_shares):
self.failUnlessEqual(len(''.join(decoded_shares)), len(''.join(data0s)))
self.failUnlessEqual(len(b''.join(decoded_shares)), len(b''.join(data0s)))
self.failUnlessEqual(len(decoded_shares), len(data0s))
for (i, (x, y)) in enumerate(zip(data0s, decoded_shares)):
self.failUnlessEqual(x, y, "%s: %r != %r.... first share was %r" % (str(i), x, y, data0s[0],))
self.failUnless(''.join(decoded_shares) == ''.join(data0s), "%s" % ("???",))
self.failUnless(b''.join(decoded_shares) == b''.join(data0s), "%s" % ("???",))
# 0data0sclipped = tuple(data0s)
# data0sclipped[-1] =
# self.failUnless(tuple(decoded_shares) == tuple(data0s))
@ -59,7 +74,7 @@ class T(unittest.TestCase):
def _decode_some_random(res):
log.msg("_decode_some_random")
# use a randomly-selected minimal subset
l = random.sample(zip(self.shares, self.shareids), required_shares)
l = random.sample(list(zip(self.shares, self.shareids)), required_shares)
some_shares = [ x[0] for x in l ]
some_shareids = [ x[1] for x in l ]
return _decode((some_shares, some_shareids))
@ -70,10 +85,10 @@ class T(unittest.TestCase):
log.msg("_decode_multiple")
# make sure we can re-use the decoder object
shares1 = random.sample(self.shares, required_shares)
sharesl1 = random.sample(zip(self.shares, self.shareids), required_shares)
sharesl1 = random.sample(list(zip(self.shares, self.shareids)), required_shares)
shares1 = [ x[0] for x in sharesl1 ]
shareids1 = [ x[1] for x in sharesl1 ]
sharesl2 = random.sample(zip(self.shares, self.shareids), required_shares)
sharesl2 = random.sample(list(zip(self.shares, self.shareids)), required_shares)
shares2 = [ x[0] for x in sharesl2 ]
shareids2 = [ x[1] for x in sharesl2 ]
dec = CRSDecoder()

View File

@ -0,0 +1,52 @@
"""
Tests for allmydata.monitor.
"""
from __future__ import unicode_literals
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from future.utils import PY2
if PY2:
from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
from twisted.trial import unittest
from allmydata.monitor import Monitor, OperationCancelledError
class MonitorTests(unittest.TestCase):
"""Tests for the Monitor class."""
def test_cancellation(self):
"""The monitor can be cancelled."""
m = Monitor()
self.assertFalse(m.is_cancelled())
m.raise_if_cancelled()
m.cancel()
self.assertTrue(m.is_cancelled())
with self.assertRaises(OperationCancelledError):
m.raise_if_cancelled()
def test_status(self):
"""The monitor can have its status set."""
m = Monitor()
self.assertEqual(m.get_status(), None)
m.set_status("discombobulated")
self.assertEqual(m.get_status(), "discombobulated")
def test_finish(self):
"""The monitor can finish."""
m = Monitor()
self.assertFalse(m.is_finished())
d = m.when_done()
self.assertNoResult(d)
result = m.finish(300)
self.assertEqual(result, 300)
self.assertEqual(m.get_status(), 300)
self.assertTrue(m.is_finished())
d.addBoth(self.assertEqual, 300)
return d

File diff suppressed because it is too large Load Diff

View File

@ -980,6 +980,8 @@ class CountingDataUploadable(upload.Data):
class SystemTest(SystemTestMixin, RunBinTahoeMixin, unittest.TestCase):
timeout = 180
def test_connections(self):
self.basedir = "system/SystemTest/test_connections"
d = self.set_up_nodes()

View File

@ -24,6 +24,7 @@ if PY2:
# Keep these sorted alphabetically, to reduce merge conflicts:
PORTED_MODULES = [
"allmydata.codec",
"allmydata.crypto",
"allmydata.crypto.aes",
"allmydata.crypto.ed25519",
@ -32,6 +33,8 @@ PORTED_MODULES = [
"allmydata.crypto.util",
"allmydata.hashtree",
"allmydata.immutable.happiness_upload",
"allmydata.interfaces",
"allmydata.monitor",
"allmydata.storage.crawler",
"allmydata.storage.expirer",
"allmydata.test.common_py3",
@ -68,6 +71,7 @@ PORTED_TEST_MODULES = [
"allmydata.test.test_abbreviate",
"allmydata.test.test_base32",
"allmydata.test.test_base62",
"allmydata.test.test_codec",
"allmydata.test.test_configutil",
"allmydata.test.test_connection_status",
"allmydata.test.test_crawler",
@ -81,12 +85,14 @@ PORTED_TEST_MODULES = [
"allmydata.test.test_humanreadable",
"allmydata.test.test_iputil",
"allmydata.test.test_log",
"allmydata.test.test_monitor",
"allmydata.test.test_netstring",
"allmydata.test.test_observer",
"allmydata.test.test_pipeline",
"allmydata.test.test_python3",
"allmydata.test.test_spans",
"allmydata.test.test_statistics",
"allmydata.test.test_storage",
"allmydata.test.test_storage_web",
"allmydata.test.test_time_format",
"allmydata.test.test_uri",

View File

@ -1,31 +0,0 @@
"""
Implement a work-around for <https://github.com/twisted/nevow/issues/106>.
"""
from __future__ import (
print_function,
unicode_literals,
absolute_import,
division,
)
from nevow import inevow
from twisted.internet import defer
def renderHTTP(self, ctx):
request = inevow.IRequest(ctx)
if self.real_prepath_len is not None:
request.postpath = request.prepath + request.postpath
request.prepath = request.postpath[:self.real_prepath_len]
del request.postpath[:self.real_prepath_len]
result = defer.maybeDeferred(self.original.render, request).addCallback(
self._handle_NOT_DONE_YET, request)
return result
def patch():
"""
Monkey-patch the proposed fix into place.
"""
from nevow.appserver import OldResourceAdapter
OldResourceAdapter.renderHTTP = renderHTTP

View File

@ -54,11 +54,6 @@ from .logs import (
create_log_resources,
)
# Hotfix work-around https://github.com/twisted/nevow/issues/106
from . import _nevow_106
_nevow_106.patch()
del _nevow_106
SCHEME = b"tahoe-lafs"
class IToken(ICredentials):

View File

@ -58,9 +58,7 @@ commands =
# version pinning we do limits the variability of this output
pip freeze
# The tahoe script isn't sufficiently ported for this to succeed on
# Python 3.x yet.
!py36: tahoe --version
tahoe --version
!coverage: trial {env:TAHOE_LAFS_TRIAL_ARGS:--rterrors} {posargs:{env:TEST_SUITE}}