mirror of
https://github.com/tahoe-lafs/tahoe-lafs.git
synced 2024-12-20 05:28:04 +00:00
Merge remote-tracking branch 'origin/master' into 3396.storage-tests-python-3
This commit is contained in:
commit
af4814ee7b
@ -211,7 +211,8 @@ jobs:
|
||||
|
||||
environment:
|
||||
<<: *UTF_8_ENVIRONMENT
|
||||
TAHOE_LAFS_TOX_ENVIRONMENT: "pypy27-coverage"
|
||||
# We don't do coverage since it makes PyPy far too slow:
|
||||
TAHOE_LAFS_TOX_ENVIRONMENT: "pypy27"
|
||||
|
||||
|
||||
c-locale:
|
||||
|
@ -68,6 +68,10 @@ export SUBUNITREPORTER_OUTPUT_PATH="${SUBUNIT2}"
|
||||
export TAHOE_LAFS_TRIAL_ARGS="${TAHOE_LAFS_TRIAL_ARGS:---reporter=subunitv2-file --rterrors}"
|
||||
export PIP_NO_INDEX="1"
|
||||
|
||||
# Make output unbuffered, so progress reports from subunitv2-file get streamed
|
||||
# and notify CircleCI we're still alive.
|
||||
export PYTHONUNBUFFERED=1
|
||||
|
||||
if [ "${ALLOWED_FAILURE}" = "yes" ]; then
|
||||
alternative="true"
|
||||
else
|
||||
|
1
.gitignore
vendored
1
.gitignore
vendored
@ -9,6 +9,7 @@ venv*
|
||||
*~
|
||||
*.DS_Store
|
||||
.*.kate-swp
|
||||
*.bak
|
||||
|
||||
/build/
|
||||
/support/
|
||||
|
0
newsfragments/3374.minor
Normal file
0
newsfragments/3374.minor
Normal file
0
newsfragments/3401.minor
Normal file
0
newsfragments/3401.minor
Normal file
@ -1,4 +1,16 @@
|
||||
# -*- test-case-name: allmydata.test.test_encode_share -*-
|
||||
"""
|
||||
CRS encoding and decoding.
|
||||
|
||||
Ported to Python 3.
|
||||
"""
|
||||
from __future__ import absolute_import
|
||||
from __future__ import division
|
||||
from __future__ import print_function
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from future.utils import PY2
|
||||
if PY2:
|
||||
from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
|
||||
|
||||
from zope.interface import implementer
|
||||
from twisted.internet import defer
|
||||
@ -9,7 +21,7 @@ import zfec
|
||||
|
||||
@implementer(ICodecEncoder)
|
||||
class CRSEncoder(object):
|
||||
ENCODER_TYPE = "crs"
|
||||
ENCODER_TYPE = b"crs"
|
||||
|
||||
def set_params(self, data_size, required_shares, max_shares):
|
||||
assert required_shares <= max_shares
|
||||
@ -27,7 +39,7 @@ class CRSEncoder(object):
|
||||
return (self.data_size, self.required_shares, self.max_shares)
|
||||
|
||||
def get_serialized_params(self):
|
||||
return "%d-%d-%d" % (self.data_size, self.required_shares,
|
||||
return b"%d-%d-%d" % (self.data_size, self.required_shares,
|
||||
self.max_shares)
|
||||
|
||||
def get_block_size(self):
|
||||
@ -37,7 +49,7 @@ class CRSEncoder(object):
|
||||
precondition(desired_share_ids is None or len(desired_share_ids) <= self.max_shares, desired_share_ids, self.max_shares)
|
||||
|
||||
if desired_share_ids is None:
|
||||
desired_share_ids = range(self.max_shares)
|
||||
desired_share_ids = list(range(self.max_shares))
|
||||
|
||||
for inshare in inshares:
|
||||
assert len(inshare) == self.share_size, (len(inshare), self.share_size, self.data_size, self.required_shares)
|
||||
@ -71,5 +83,5 @@ class CRSDecoder(object):
|
||||
return defer.succeed(data)
|
||||
|
||||
def parse_params(serializedparams):
|
||||
pieces = serializedparams.split("-")
|
||||
pieces = serializedparams.split(b"-")
|
||||
return int(pieces[0]), int(pieces[1]), int(pieces[2])
|
||||
|
@ -1,3 +1,19 @@
|
||||
"""
|
||||
Interfaces for Tahoe-LAFS.
|
||||
|
||||
Ported to Python 3.
|
||||
"""
|
||||
from __future__ import absolute_import
|
||||
from __future__ import division
|
||||
from __future__ import print_function
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from future.utils import PY2
|
||||
if PY2:
|
||||
# Don't import object/str/dict/etc. types, so we don't break any
|
||||
# interfaces. Not importing open() because it triggers bogus flake8 error.
|
||||
from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, pow, round, super, range, max, min # noqa: F401
|
||||
|
||||
from past.builtins import long
|
||||
|
||||
from zope.interface import Interface, Attribute
|
||||
@ -58,7 +74,7 @@ class RIBucketReader(RemoteInterface):
|
||||
def read(offset=Offset, length=ReadSize):
|
||||
return ShareData
|
||||
|
||||
def advise_corrupt_share(reason=str):
|
||||
def advise_corrupt_share(reason=bytes):
|
||||
"""Clients who discover hash failures in shares that they have
|
||||
downloaded from me will use this method to inform me about the
|
||||
failures. I will record their concern so that my operator can
|
||||
@ -71,7 +87,7 @@ class RIBucketReader(RemoteInterface):
|
||||
"""
|
||||
|
||||
|
||||
TestVector = ListOf(TupleOf(Offset, ReadSize, str, str))
|
||||
TestVector = ListOf(TupleOf(Offset, ReadSize, bytes, bytes))
|
||||
# elements are (offset, length, operator, specimen)
|
||||
# operator is one of "lt, le, eq, ne, ge, gt"
|
||||
# nop always passes and is used to fetch data while writing.
|
||||
@ -89,13 +105,13 @@ ReadData = ListOf(ShareData)
|
||||
|
||||
|
||||
class RIStorageServer(RemoteInterface):
|
||||
__remote_name__ = "RIStorageServer.tahoe.allmydata.com"
|
||||
__remote_name__ = b"RIStorageServer.tahoe.allmydata.com"
|
||||
|
||||
def get_version():
|
||||
"""
|
||||
Return a dictionary of version information.
|
||||
"""
|
||||
return DictOf(str, Any())
|
||||
return DictOf(bytes, Any())
|
||||
|
||||
def allocate_buckets(storage_index=StorageIndex,
|
||||
renew_secret=LeaseRenewSecret,
|
||||
@ -277,8 +293,8 @@ class RIStorageServer(RemoteInterface):
|
||||
"""
|
||||
return TupleOf(bool, DictOf(int, ReadData))
|
||||
|
||||
def advise_corrupt_share(share_type=str, storage_index=StorageIndex,
|
||||
shnum=int, reason=str):
|
||||
def advise_corrupt_share(share_type=bytes, storage_index=StorageIndex,
|
||||
shnum=int, reason=bytes):
|
||||
"""Clients who discover hash failures in shares that they have
|
||||
downloaded from me will use this method to inform me about the
|
||||
failures. I will record their concern so that my operator can
|
||||
@ -2859,7 +2875,7 @@ UploadResults = Any() #DictOf(str, str)
|
||||
|
||||
|
||||
class RIEncryptedUploadable(RemoteInterface):
|
||||
__remote_name__ = "RIEncryptedUploadable.tahoe.allmydata.com"
|
||||
__remote_name__ = b"RIEncryptedUploadable.tahoe.allmydata.com"
|
||||
|
||||
def get_size():
|
||||
return Offset
|
||||
@ -2875,7 +2891,7 @@ class RIEncryptedUploadable(RemoteInterface):
|
||||
|
||||
|
||||
class RICHKUploadHelper(RemoteInterface):
|
||||
__remote_name__ = "RIUploadHelper.tahoe.allmydata.com"
|
||||
__remote_name__ = b"RIUploadHelper.tahoe.allmydata.com"
|
||||
|
||||
def get_version():
|
||||
"""
|
||||
@ -2888,7 +2904,7 @@ class RICHKUploadHelper(RemoteInterface):
|
||||
|
||||
|
||||
class RIHelper(RemoteInterface):
|
||||
__remote_name__ = "RIHelper.tahoe.allmydata.com"
|
||||
__remote_name__ = b"RIHelper.tahoe.allmydata.com"
|
||||
|
||||
def get_version():
|
||||
"""
|
||||
@ -2915,7 +2931,7 @@ class RIHelper(RemoteInterface):
|
||||
|
||||
|
||||
class RIStatsProvider(RemoteInterface):
|
||||
__remote_name__ = "RIStatsProvider.tahoe.allmydata.com"
|
||||
__remote_name__ = b"RIStatsProvider.tahoe.allmydata.com"
|
||||
"""
|
||||
Provides access to statistics and monitoring information.
|
||||
"""
|
||||
@ -2932,7 +2948,7 @@ class RIStatsProvider(RemoteInterface):
|
||||
|
||||
|
||||
class RIStatsGatherer(RemoteInterface):
|
||||
__remote_name__ = "RIStatsGatherer.tahoe.allmydata.com"
|
||||
__remote_name__ = b"RIStatsGatherer.tahoe.allmydata.com"
|
||||
"""
|
||||
Provides a monitoring service for centralised collection of stats
|
||||
"""
|
||||
|
@ -1,7 +1,21 @@
|
||||
"""
|
||||
Manage status of long-running operations.
|
||||
|
||||
Ported to Python 3.
|
||||
"""
|
||||
from __future__ import absolute_import
|
||||
from __future__ import division
|
||||
from __future__ import print_function
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from future.utils import PY2
|
||||
if PY2:
|
||||
from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
|
||||
|
||||
from zope.interface import Interface, implementer
|
||||
from allmydata.util import observer
|
||||
|
||||
|
||||
class IMonitor(Interface):
|
||||
"""I manage status, progress, and cancellation for long-running operations.
|
||||
|
||||
|
@ -1,8 +1,21 @@
|
||||
"""
|
||||
Tests for allmydata.codec.
|
||||
|
||||
Ported to Python 3.
|
||||
"""
|
||||
from __future__ import absolute_import
|
||||
from __future__ import division
|
||||
from __future__ import print_function
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from future.utils import PY2
|
||||
if PY2:
|
||||
from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
|
||||
|
||||
import os
|
||||
from twisted.trial import unittest
|
||||
from twisted.python import log
|
||||
from allmydata.codec import CRSEncoder, CRSDecoder
|
||||
from allmydata.codec import CRSEncoder, CRSDecoder, parse_params
|
||||
import random
|
||||
from allmydata.util import mathutil
|
||||
|
||||
@ -13,6 +26,8 @@ class T(unittest.TestCase):
|
||||
enc.set_params(size, required_shares, max_shares)
|
||||
params = enc.get_params()
|
||||
assert params == (size, required_shares, max_shares)
|
||||
serialized_params = enc.get_serialized_params()
|
||||
self.assertEqual(parse_params(serialized_params), params)
|
||||
log.msg("params: %s" % (params,))
|
||||
d = enc.encode(data0s)
|
||||
def _done_encoding_all(shares_and_shareids):
|
||||
@ -23,7 +38,7 @@ class T(unittest.TestCase):
|
||||
d.addCallback(_done_encoding_all)
|
||||
if fewer_shares is not None:
|
||||
# also validate that the desired_shareids= parameter works
|
||||
desired_shareids = random.sample(range(max_shares), fewer_shares)
|
||||
desired_shareids = random.sample(list(range(max_shares)), fewer_shares)
|
||||
d.addCallback(lambda res: enc.encode(data0s, desired_shareids))
|
||||
def _check_fewer_shares(some_shares_and_their_shareids):
|
||||
(some_shares, their_shareids) = some_shares_and_their_shareids
|
||||
@ -38,11 +53,11 @@ class T(unittest.TestCase):
|
||||
return d1
|
||||
|
||||
def _check_data(decoded_shares):
|
||||
self.failUnlessEqual(len(''.join(decoded_shares)), len(''.join(data0s)))
|
||||
self.failUnlessEqual(len(b''.join(decoded_shares)), len(b''.join(data0s)))
|
||||
self.failUnlessEqual(len(decoded_shares), len(data0s))
|
||||
for (i, (x, y)) in enumerate(zip(data0s, decoded_shares)):
|
||||
self.failUnlessEqual(x, y, "%s: %r != %r.... first share was %r" % (str(i), x, y, data0s[0],))
|
||||
self.failUnless(''.join(decoded_shares) == ''.join(data0s), "%s" % ("???",))
|
||||
self.failUnless(b''.join(decoded_shares) == b''.join(data0s), "%s" % ("???",))
|
||||
# 0data0sclipped = tuple(data0s)
|
||||
# data0sclipped[-1] =
|
||||
# self.failUnless(tuple(decoded_shares) == tuple(data0s))
|
||||
@ -59,7 +74,7 @@ class T(unittest.TestCase):
|
||||
def _decode_some_random(res):
|
||||
log.msg("_decode_some_random")
|
||||
# use a randomly-selected minimal subset
|
||||
l = random.sample(zip(self.shares, self.shareids), required_shares)
|
||||
l = random.sample(list(zip(self.shares, self.shareids)), required_shares)
|
||||
some_shares = [ x[0] for x in l ]
|
||||
some_shareids = [ x[1] for x in l ]
|
||||
return _decode((some_shares, some_shareids))
|
||||
@ -70,10 +85,10 @@ class T(unittest.TestCase):
|
||||
log.msg("_decode_multiple")
|
||||
# make sure we can re-use the decoder object
|
||||
shares1 = random.sample(self.shares, required_shares)
|
||||
sharesl1 = random.sample(zip(self.shares, self.shareids), required_shares)
|
||||
sharesl1 = random.sample(list(zip(self.shares, self.shareids)), required_shares)
|
||||
shares1 = [ x[0] for x in sharesl1 ]
|
||||
shareids1 = [ x[1] for x in sharesl1 ]
|
||||
sharesl2 = random.sample(zip(self.shares, self.shareids), required_shares)
|
||||
sharesl2 = random.sample(list(zip(self.shares, self.shareids)), required_shares)
|
||||
shares2 = [ x[0] for x in sharesl2 ]
|
||||
shareids2 = [ x[1] for x in sharesl2 ]
|
||||
dec = CRSDecoder()
|
||||
|
52
src/allmydata/test/test_monitor.py
Normal file
52
src/allmydata/test/test_monitor.py
Normal file
@ -0,0 +1,52 @@
|
||||
"""
|
||||
Tests for allmydata.monitor.
|
||||
"""
|
||||
|
||||
from __future__ import unicode_literals
|
||||
from __future__ import absolute_import
|
||||
from __future__ import division
|
||||
from __future__ import print_function
|
||||
|
||||
from future.utils import PY2
|
||||
if PY2:
|
||||
from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
|
||||
|
||||
from twisted.trial import unittest
|
||||
|
||||
from allmydata.monitor import Monitor, OperationCancelledError
|
||||
|
||||
|
||||
class MonitorTests(unittest.TestCase):
|
||||
"""Tests for the Monitor class."""
|
||||
|
||||
def test_cancellation(self):
|
||||
"""The monitor can be cancelled."""
|
||||
m = Monitor()
|
||||
self.assertFalse(m.is_cancelled())
|
||||
m.raise_if_cancelled()
|
||||
m.cancel()
|
||||
self.assertTrue(m.is_cancelled())
|
||||
with self.assertRaises(OperationCancelledError):
|
||||
m.raise_if_cancelled()
|
||||
|
||||
def test_status(self):
|
||||
"""The monitor can have its status set."""
|
||||
m = Monitor()
|
||||
self.assertEqual(m.get_status(), None)
|
||||
m.set_status("discombobulated")
|
||||
self.assertEqual(m.get_status(), "discombobulated")
|
||||
|
||||
def test_finish(self):
|
||||
"""The monitor can finish."""
|
||||
m = Monitor()
|
||||
self.assertFalse(m.is_finished())
|
||||
d = m.when_done()
|
||||
self.assertNoResult(d)
|
||||
|
||||
result = m.finish(300)
|
||||
self.assertEqual(result, 300)
|
||||
self.assertEqual(m.get_status(), 300)
|
||||
self.assertTrue(m.is_finished())
|
||||
|
||||
d.addBoth(self.assertEqual, 300)
|
||||
return d
|
@ -24,6 +24,7 @@ if PY2:
|
||||
|
||||
# Keep these sorted alphabetically, to reduce merge conflicts:
|
||||
PORTED_MODULES = [
|
||||
"allmydata.codec",
|
||||
"allmydata.crypto",
|
||||
"allmydata.crypto.aes",
|
||||
"allmydata.crypto.ed25519",
|
||||
@ -32,6 +33,8 @@ PORTED_MODULES = [
|
||||
"allmydata.crypto.util",
|
||||
"allmydata.hashtree",
|
||||
"allmydata.immutable.happiness_upload",
|
||||
"allmydata.interfaces",
|
||||
"allmydata.monitor",
|
||||
"allmydata.storage.crawler",
|
||||
"allmydata.storage.expirer",
|
||||
"allmydata.test.common_py3",
|
||||
@ -68,6 +71,7 @@ PORTED_TEST_MODULES = [
|
||||
"allmydata.test.test_abbreviate",
|
||||
"allmydata.test.test_base32",
|
||||
"allmydata.test.test_base62",
|
||||
"allmydata.test.test_codec",
|
||||
"allmydata.test.test_configutil",
|
||||
"allmydata.test.test_connection_status",
|
||||
"allmydata.test.test_crawler",
|
||||
@ -81,6 +85,7 @@ PORTED_TEST_MODULES = [
|
||||
"allmydata.test.test_humanreadable",
|
||||
"allmydata.test.test_iputil",
|
||||
"allmydata.test.test_log",
|
||||
"allmydata.test.test_monitor",
|
||||
"allmydata.test.test_netstring",
|
||||
"allmydata.test.test_observer",
|
||||
"allmydata.test.test_pipeline",
|
||||
|
Loading…
Reference in New Issue
Block a user