mirror of
https://github.com/tahoe-lafs/tahoe-lafs.git
synced 2025-01-20 19:49:06 +00:00
Merge pull request #1423 from a-detiste/master
Some checks failed
CI / coverage (macos-14, 3.12) (push) Has been cancelled
CI / coverage (ubuntu-22.04, 3.12) (push) Has been cancelled
CI / coverage (ubuntu-22.04, pypy-3.9) (push) Has been cancelled
CI / coverage (windows-2022, 3.12) (push) Has been cancelled
CI / integration (false, macos-14, 3.11) (push) Has been cancelled
CI / integration (false, ubuntu-20.04, 3.11) (push) Has been cancelled
CI / integration (false, windows-2022, 3.11) (push) Has been cancelled
CI / integration (true, ubuntu-20.04, 3.12) (push) Has been cancelled
CI / packaging (macos-14, 3.9) (push) Has been cancelled
CI / packaging (ubuntu-22.04, 3.9) (push) Has been cancelled
CI / packaging (windows-2022, 3.9) (push) Has been cancelled
CI / finish-coverage-report (push) Has been cancelled
Some checks failed
CI / coverage (macos-14, 3.12) (push) Has been cancelled
CI / coverage (ubuntu-22.04, 3.12) (push) Has been cancelled
CI / coverage (ubuntu-22.04, pypy-3.9) (push) Has been cancelled
CI / coverage (windows-2022, 3.12) (push) Has been cancelled
CI / integration (false, macos-14, 3.11) (push) Has been cancelled
CI / integration (false, ubuntu-20.04, 3.11) (push) Has been cancelled
CI / integration (false, windows-2022, 3.11) (push) Has been cancelled
CI / integration (true, ubuntu-20.04, 3.12) (push) Has been cancelled
CI / packaging (macos-14, 3.9) (push) Has been cancelled
CI / packaging (ubuntu-22.04, 3.9) (push) Has been cancelled
CI / packaging (windows-2022, 3.9) (push) Has been cancelled
CI / finish-coverage-report (push) Has been cancelled
clean Python2 syntax
This commit is contained in:
commit
f45175569e
@ -69,7 +69,7 @@ import pytest_twisted
|
||||
|
||||
|
||||
@attr.s
|
||||
class FlogGatherer(object):
|
||||
class FlogGatherer:
|
||||
"""
|
||||
Flog Gatherer process.
|
||||
"""
|
||||
@ -148,7 +148,7 @@ def create_flog_gatherer(reactor, request, temp_dir, flog_binary):
|
||||
|
||||
|
||||
@attr.s
|
||||
class StorageServer(object):
|
||||
class StorageServer:
|
||||
"""
|
||||
Represents a Tahoe Storage Server
|
||||
"""
|
||||
@ -200,7 +200,7 @@ def create_storage_server(reactor, request, temp_dir, introducer, flog_gatherer,
|
||||
|
||||
|
||||
@attr.s
|
||||
class Client(object):
|
||||
class Client:
|
||||
"""
|
||||
Represents a Tahoe client
|
||||
"""
|
||||
@ -328,7 +328,7 @@ def create_client(reactor, request, temp_dir, introducer, flog_gatherer, name, w
|
||||
|
||||
|
||||
@attr.s
|
||||
class Introducer(object):
|
||||
class Introducer:
|
||||
"""
|
||||
Reprsents a running introducer
|
||||
"""
|
||||
@ -434,7 +434,7 @@ def create_introducer(reactor, request, temp_dir, flog_gatherer, port):
|
||||
|
||||
|
||||
@attr.s
|
||||
class Grid(object):
|
||||
class Grid:
|
||||
"""
|
||||
Represents an entire Tahoe Grid setup
|
||||
|
||||
|
@ -103,7 +103,7 @@ def _race(left, right):
|
||||
|
||||
|
||||
@attr.s
|
||||
class Left(object):
|
||||
class Left:
|
||||
value = attr.ib()
|
||||
|
||||
@classmethod
|
||||
@ -112,7 +112,7 @@ class Left(object):
|
||||
|
||||
|
||||
@attr.s
|
||||
class Right(object):
|
||||
class Right:
|
||||
value = attr.ib()
|
||||
|
||||
@classmethod
|
||||
|
@ -253,7 +253,7 @@ def _tahoe_runner_optional_coverage(proto, reactor, request, other_args):
|
||||
)
|
||||
|
||||
|
||||
class TahoeProcess(object):
|
||||
class TahoeProcess:
|
||||
"""
|
||||
A running Tahoe process, with associated information.
|
||||
"""
|
||||
|
@ -67,7 +67,7 @@ class GridTesterOptions(usage.Options):
|
||||
class CommandFailed(Exception):
|
||||
pass
|
||||
|
||||
class GridTester(object):
|
||||
class GridTester:
|
||||
def __init__(self, config):
|
||||
self.config = config
|
||||
self.tahoe = config.tahoe
|
||||
|
@ -9,7 +9,7 @@ DAY=24*60*60
|
||||
MONTH=31*DAY
|
||||
YEAR=365*DAY
|
||||
|
||||
class ReliabilityModel(object):
|
||||
class ReliabilityModel:
|
||||
"""Generate a model of system-wide reliability, given several input
|
||||
parameters.
|
||||
|
||||
@ -208,7 +208,7 @@ class ReliabilityModel(object):
|
||||
repair = matrix(new_repair_rows)
|
||||
return repair
|
||||
|
||||
class ReliabilityReport(object):
|
||||
class ReliabilityReport:
|
||||
def __init__(self):
|
||||
self.samples = []
|
||||
|
||||
|
@ -10,7 +10,7 @@ except ImportError:
|
||||
from nevow import inevow
|
||||
from zope.interface import implements
|
||||
|
||||
class MyRequest(object):
|
||||
class MyRequest:
|
||||
implements(inevow.IRequest)
|
||||
pass
|
||||
|
||||
|
@ -23,7 +23,7 @@ GET_SPANS_S='_received spans trace .get_spans()'
|
||||
ADD_R=re.compile('_received spans trace .add\(([0-9]*), len=([0-9]*)\)')
|
||||
INIT_S='_received spans trace = DataSpans'
|
||||
|
||||
class B(object):
|
||||
class B:
|
||||
def __init__(self, inf):
|
||||
self.inf = inf
|
||||
|
||||
|
@ -53,7 +53,7 @@ print("average file size:", abbreviate_space(avg_filesize))
|
||||
|
||||
SERVER_CAPACITY = 10**12
|
||||
|
||||
class Server(object):
|
||||
class Server:
|
||||
def __init__(self, nodeid, capacity):
|
||||
self.nodeid = nodeid
|
||||
self.used = 0
|
||||
@ -74,7 +74,7 @@ class Server(object):
|
||||
else:
|
||||
return "<%s %s>" % (self.__class__.__name__, self.nodeid)
|
||||
|
||||
class Ring(object):
|
||||
class Ring:
|
||||
SHOW_MINMAX = False
|
||||
def __init__(self, numservers, seed, permute):
|
||||
self.servers = []
|
||||
|
@ -9,7 +9,7 @@ SERVER_CAPACITY = 10**12
|
||||
def cmp(a, b):
|
||||
return (a > b) - (a < b)
|
||||
|
||||
class Server(object):
|
||||
class Server:
|
||||
def __init__(self):
|
||||
self.si = random.randrange(0, 2**31)
|
||||
self.used = 0
|
||||
|
@ -16,7 +16,7 @@ def sha(s):
|
||||
def randomid():
|
||||
return os.urandom(20)
|
||||
|
||||
class Node(object):
|
||||
class Node:
|
||||
def __init__(self, nid, introducer, simulator):
|
||||
self.nid = nid
|
||||
self.introducer = introducer
|
||||
@ -111,7 +111,7 @@ class Node(object):
|
||||
self.introducer.delete(fileid)
|
||||
return True
|
||||
|
||||
class Introducer(object):
|
||||
class Introducer:
|
||||
def __init__(self, simulator):
|
||||
self.living_files = {}
|
||||
self.utilization = 0 # total size of all active files
|
||||
@ -148,7 +148,7 @@ class Introducer(object):
|
||||
self.simulator.stamp_utilization(self.utilization)
|
||||
del self.living_files[fileid]
|
||||
|
||||
class Simulator(object):
|
||||
class Simulator:
|
||||
NUM_NODES = 1000
|
||||
EVENTS = ["ADDFILE", "DELFILE", "ADDNODE", "DELNODE"]
|
||||
RATE_ADDFILE = 1.0 / 10
|
||||
|
@ -35,7 +35,7 @@ GiB=1024*MiB
|
||||
TiB=1024*GiB
|
||||
PiB=1024*TiB
|
||||
|
||||
class Sizes(object):
|
||||
class Sizes:
|
||||
def __init__(self, mode, file_size, arity=2):
|
||||
MAX_SEGSIZE = 128*KiB
|
||||
self.mode = mode
|
||||
|
@ -11,7 +11,7 @@ def roundup(size, blocksize=4096):
|
||||
return blocksize * mathutil.div_ceil(size, blocksize)
|
||||
|
||||
|
||||
class BigFakeString(object):
|
||||
class BigFakeString:
|
||||
def __init__(self, length):
|
||||
self.length = length
|
||||
self.fp = 0
|
||||
|
0
newsfragments/4149.minor
Normal file
0
newsfragments/4149.minor
Normal file
@ -20,7 +20,7 @@ class FileProhibited(Exception):
|
||||
self.reason = reason
|
||||
|
||||
|
||||
class Blacklist(object):
|
||||
class Blacklist:
|
||||
def __init__(self, blacklist_fn):
|
||||
self.blacklist_fn = blacklist_fn
|
||||
self.last_mtime = None
|
||||
@ -61,7 +61,7 @@ class Blacklist(object):
|
||||
|
||||
|
||||
@implementer(IFileNode)
|
||||
class ProhibitedNode(object):
|
||||
class ProhibitedNode:
|
||||
|
||||
def __init__(self, wrapped_node, reason):
|
||||
assert IFilesystemNode.providedBy(wrapped_node), wrapped_node
|
||||
|
@ -7,7 +7,7 @@ from allmydata.interfaces import ICheckResults, ICheckAndRepairResults, \
|
||||
from allmydata.util import base32
|
||||
|
||||
@implementer(ICheckResults)
|
||||
class CheckResults(object):
|
||||
class CheckResults:
|
||||
|
||||
def __init__(self, uri, storage_index,
|
||||
healthy, recoverable, count_happiness,
|
||||
@ -153,7 +153,7 @@ class CheckResults(object):
|
||||
return self._servermap
|
||||
|
||||
@implementer(ICheckAndRepairResults)
|
||||
class CheckAndRepairResults(object):
|
||||
class CheckAndRepairResults:
|
||||
|
||||
def __init__(self, storage_index):
|
||||
self.storage_index = storage_index
|
||||
@ -175,7 +175,7 @@ class CheckAndRepairResults(object):
|
||||
return self.post_repair_results
|
||||
|
||||
|
||||
class DeepResultsBase(object):
|
||||
class DeepResultsBase:
|
||||
|
||||
def __init__(self, root_storage_index):
|
||||
self.root_storage_index = root_storage_index
|
||||
|
@ -50,7 +50,7 @@ def grid_manager(ctx, config):
|
||||
signing key) and should be kept safe.
|
||||
"""
|
||||
|
||||
class Config(object):
|
||||
class Config:
|
||||
"""
|
||||
Available to all sub-commands as Click's context.obj
|
||||
"""
|
||||
|
@ -156,7 +156,7 @@ def _make_secret():
|
||||
return base32.b2a(os.urandom(hashutil.CRYPTO_VAL_SIZE)) + b"\n"
|
||||
|
||||
|
||||
class SecretHolder(object):
|
||||
class SecretHolder:
|
||||
def __init__(self, lease_secret, convergence_secret):
|
||||
self._lease_secret = lease_secret
|
||||
self._convergence_secret = convergence_secret
|
||||
@ -170,7 +170,7 @@ class SecretHolder(object):
|
||||
def get_convergence_secret(self):
|
||||
return self._convergence_secret
|
||||
|
||||
class KeyGenerator(object):
|
||||
class KeyGenerator:
|
||||
"""I create RSA keys for mutable files. Each call to generate() returns a
|
||||
single keypair."""
|
||||
|
||||
@ -314,7 +314,7 @@ def create_client_from_config(config, _client_factory=None, _introducer_factory=
|
||||
|
||||
|
||||
@attr.s
|
||||
class _StoragePlugins(object):
|
||||
class _StoragePlugins:
|
||||
"""
|
||||
Functionality related to getting storage plugins set up and ready for use.
|
||||
|
||||
@ -559,7 +559,7 @@ def _register_reference(key, config, tub, referenceable):
|
||||
|
||||
@implementer(IAnnounceableStorageServer)
|
||||
@attr.s
|
||||
class AnnounceableStorageServer(object):
|
||||
class AnnounceableStorageServer:
|
||||
announcement = attr.ib()
|
||||
storage_server = attr.ib()
|
||||
|
||||
|
@ -13,7 +13,7 @@ from allmydata.interfaces import ICodecEncoder, ICodecDecoder
|
||||
import zfec
|
||||
|
||||
@implementer(ICodecEncoder)
|
||||
class CRSEncoder(object):
|
||||
class CRSEncoder:
|
||||
ENCODER_TYPE = b"crs"
|
||||
|
||||
def set_params(self, data_size, required_shares, max_shares):
|
||||
@ -55,7 +55,7 @@ class CRSEncoder(object):
|
||||
|
||||
|
||||
@implementer(ICodecDecoder)
|
||||
class CRSDecoder(object):
|
||||
class CRSDecoder:
|
||||
|
||||
def set_params(self, data_size, required_shares, max_shares):
|
||||
self.data_size = data_size
|
||||
|
@ -13,7 +13,7 @@ from allmydata.uri import LiteralFileURI
|
||||
from allmydata.uri import from_string
|
||||
from allmydata.util import mathutil
|
||||
|
||||
class DeepStats(object):
|
||||
class DeepStats:
|
||||
"""Deep stats object.
|
||||
|
||||
Holds results of the deep-stats operation.
|
||||
|
@ -63,7 +63,7 @@ ADD_FILE = ActionType(
|
||||
)
|
||||
|
||||
|
||||
class _OnlyFiles(object):
|
||||
class _OnlyFiles:
|
||||
"""Marker for replacement option of only replacing files."""
|
||||
|
||||
ONLY_FILES = _OnlyFiles()
|
||||
@ -115,7 +115,7 @@ def update_metadata(metadata, new_metadata, now):
|
||||
# contents and end by repacking them. It might be better to apply them to
|
||||
# the unpacked contents.
|
||||
|
||||
class Deleter(object):
|
||||
class Deleter:
|
||||
def __init__(self, node, namex, must_exist=True, must_be_directory=False, must_be_file=False):
|
||||
self.node = node
|
||||
self.name = normalize(namex)
|
||||
@ -143,7 +143,7 @@ class Deleter(object):
|
||||
return new_contents
|
||||
|
||||
|
||||
class MetadataSetter(object):
|
||||
class MetadataSetter:
|
||||
def __init__(self, node, namex, metadata, create_readonly_node=None):
|
||||
self.node = node
|
||||
self.name = normalize(namex)
|
||||
@ -168,7 +168,7 @@ class MetadataSetter(object):
|
||||
return new_contents
|
||||
|
||||
|
||||
class Adder(object):
|
||||
class Adder:
|
||||
def __init__(self, node, entries=None, overwrite=True, create_readonly_node=None):
|
||||
"""
|
||||
:param overwrite: Either True (allow overwriting anything existing),
|
||||
@ -300,7 +300,7 @@ def _pack_normalized_children(children, writekey, deep_immutable=False):
|
||||
return b"".join(entries)
|
||||
|
||||
@implementer(IDirectoryNode, ICheckable, IDeepCheckable)
|
||||
class DirectoryNode(object):
|
||||
class DirectoryNode:
|
||||
filenode_class = MutableFileNode
|
||||
|
||||
def __init__(self, filenode, nodemaker, uploader):
|
||||
@ -873,7 +873,7 @@ class ManifestWalker(DeepStats):
|
||||
}
|
||||
|
||||
|
||||
class DeepChecker(object):
|
||||
class DeepChecker:
|
||||
def __init__(self, root, verify, repair, add_lease):
|
||||
root_si = root.get_storage_index()
|
||||
if root_si:
|
||||
|
@ -17,13 +17,13 @@ class NeedRootcapLookupScheme(Exception):
|
||||
mechanism to translate name+passwd pairs into a rootcap, either a file of
|
||||
name/passwd/rootcap tuples, or a server to do the translation."""
|
||||
|
||||
class FTPAvatarID(object):
|
||||
class FTPAvatarID:
|
||||
def __init__(self, username, rootcap):
|
||||
self.username = username
|
||||
self.rootcap = rootcap
|
||||
|
||||
@implementer(checkers.ICredentialsChecker)
|
||||
class AccountFileChecker(object):
|
||||
class AccountFileChecker:
|
||||
credentialInterfaces = (credentials.ISSHPrivateKey,)
|
||||
|
||||
def __init__(self, client, accountfile):
|
||||
|
@ -963,7 +963,7 @@ class GeneralSFTPFile(PrefixingLogMixin):
|
||||
return d
|
||||
|
||||
|
||||
class StoppableList(object):
|
||||
class StoppableList:
|
||||
def __init__(self, items):
|
||||
self.items = items
|
||||
def __iter__(self):
|
||||
@ -973,7 +973,7 @@ class StoppableList(object):
|
||||
pass
|
||||
|
||||
|
||||
class Reason(object):
|
||||
class Reason:
|
||||
def __init__(self, value):
|
||||
self.value = value
|
||||
|
||||
@ -1904,7 +1904,7 @@ class SFTPUserHandler(ConchUser, PrefixingLogMixin):
|
||||
|
||||
|
||||
@implementer(ITransport)
|
||||
class FakeTransport(object):
|
||||
class FakeTransport:
|
||||
def write(self, data):
|
||||
logmsg("FakeTransport.write(<data of length %r>)" % (len(data),), level=NOISY)
|
||||
|
||||
@ -1979,7 +1979,7 @@ components.registerAdapter(ShellSession, SFTPUserHandler, ISession)
|
||||
from allmydata.frontends.auth import AccountFileChecker, NeedRootcapLookupScheme
|
||||
|
||||
@implementer(portal.IRealm)
|
||||
class Dispatcher(object):
|
||||
class Dispatcher:
|
||||
def __init__(self, client):
|
||||
self._client = client
|
||||
|
||||
|
@ -32,7 +32,7 @@ from attrs import (
|
||||
|
||||
|
||||
@frozen
|
||||
class SignedCertificate(object):
|
||||
class SignedCertificate:
|
||||
"""
|
||||
A signed certificate.
|
||||
"""
|
||||
@ -62,7 +62,7 @@ class SignedCertificate(object):
|
||||
|
||||
|
||||
@frozen
|
||||
class _GridManagerStorageServer(object):
|
||||
class _GridManagerStorageServer:
|
||||
"""
|
||||
A Grid Manager's notion of a storage server
|
||||
"""
|
||||
@ -93,7 +93,7 @@ class _GridManagerStorageServer(object):
|
||||
|
||||
|
||||
@frozen
|
||||
class _GridManagerCertificate(object):
|
||||
class _GridManagerCertificate:
|
||||
"""
|
||||
Represents a single certificate for a single storage-server
|
||||
"""
|
||||
@ -224,7 +224,7 @@ def load_grid_manager(config_path: Optional[FilePath]):
|
||||
return _GridManager(private_key_bytes, storage_servers)
|
||||
|
||||
|
||||
class _GridManager(object):
|
||||
class _GridManager:
|
||||
"""
|
||||
A Grid Manager's configuration.
|
||||
"""
|
||||
|
@ -69,7 +69,7 @@ def roundup_pow2(x):
|
||||
return ans
|
||||
|
||||
|
||||
class CompleteBinaryTreeMixin(object):
|
||||
class CompleteBinaryTreeMixin:
|
||||
"""
|
||||
Adds convenience methods to a complete binary tree.
|
||||
|
||||
|
@ -3,7 +3,7 @@
|
||||
|
||||
import weakref
|
||||
|
||||
class History(object):
|
||||
class History:
|
||||
"""Keep track of recent operations, for a status display."""
|
||||
|
||||
name = "history"
|
||||
|
@ -32,7 +32,7 @@ class UnsupportedErasureCodec(BadURIExtension):
|
||||
pass
|
||||
|
||||
@implementer(IValidatedThingProxy)
|
||||
class ValidatedExtendedURIProxy(object):
|
||||
class ValidatedExtendedURIProxy:
|
||||
""" I am a front-end for a remote UEB (using a local ReadBucketProxy),
|
||||
responsible for retrieving and validating the elements from the UEB."""
|
||||
|
||||
|
@ -10,7 +10,7 @@ from allmydata.util.dictutil import DictOfSets
|
||||
from .common import OVERDUE, COMPLETE, CORRUPT, DEAD, BADSEGNUM, \
|
||||
BadSegmentNumberError
|
||||
|
||||
class SegmentFetcher(object):
|
||||
class SegmentFetcher:
|
||||
"""I am responsible for acquiring blocks for a single segment. I will use
|
||||
the Share instances passed to my add_shares() method to locate, retrieve,
|
||||
and validate those blocks. I expect my parent node to call my
|
||||
|
@ -25,11 +25,11 @@ def incidentally(res, f, *args, **kwargs):
|
||||
f(*args, **kwargs)
|
||||
return res
|
||||
|
||||
class RequestToken(object):
|
||||
class RequestToken:
|
||||
def __init__(self, server):
|
||||
self.server = server
|
||||
|
||||
class ShareFinder(object):
|
||||
class ShareFinder:
|
||||
OVERDUE_TIMEOUT = 10.0
|
||||
|
||||
def __init__(self, storage_broker, verifycap, node, download_status,
|
||||
|
@ -26,7 +26,7 @@ class IDownloadStatusHandlingConsumer(Interface):
|
||||
"""Record the DownloadStatus 'read event', to be updated with the
|
||||
time it takes to decrypt each chunk of data."""
|
||||
|
||||
class Cancel(object):
|
||||
class Cancel:
|
||||
def __init__(self, f):
|
||||
self._f = f
|
||||
self.active = True
|
||||
@ -37,7 +37,7 @@ class Cancel(object):
|
||||
self._f(self)
|
||||
|
||||
|
||||
class DownloadNode(object):
|
||||
class DownloadNode:
|
||||
"""Internal class which manages downloads and holds state. External
|
||||
callers use CiphertextFileNode instead."""
|
||||
|
||||
|
@ -15,7 +15,7 @@ from allmydata.interfaces import DownloadStopped
|
||||
from .common import BadSegmentNumberError, WrongSegmentError
|
||||
|
||||
@implementer(IPushProducer)
|
||||
class Segmentation(object):
|
||||
class Segmentation:
|
||||
"""I am responsible for a single offset+size read of the file. I handle
|
||||
segmentation: I figure out which segments are necessary, request them
|
||||
(from my CiphertextDownloader) in order, and trim the segments down to
|
||||
|
@ -27,7 +27,7 @@ class DataUnavailable(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class Share(object):
|
||||
class Share:
|
||||
"""I represent a single instance of a single share (e.g. I reference the
|
||||
shnum2 for share SI=abcde on server xy12t, not the one on server ab45q).
|
||||
I am associated with a CommonShare that remembers data that is held in
|
||||
@ -830,7 +830,7 @@ class Share(object):
|
||||
o.notify(state=DEAD, f=f)
|
||||
|
||||
|
||||
class CommonShare(object):
|
||||
class CommonShare:
|
||||
# TODO: defer creation of the hashtree until somebody uses us. There will
|
||||
# be a lot of unused shares, and we shouldn't spend the memory on a large
|
||||
# hashtree unless necessary.
|
||||
|
@ -6,7 +6,7 @@ import itertools
|
||||
from zope.interface import implementer
|
||||
from allmydata.interfaces import IDownloadStatus
|
||||
|
||||
class ReadEvent(object):
|
||||
class ReadEvent:
|
||||
|
||||
def __init__(self, ev, ds):
|
||||
self._ev = ev
|
||||
@ -22,7 +22,7 @@ class ReadEvent(object):
|
||||
self._ds.update_last_timestamp(finishtime)
|
||||
|
||||
|
||||
class SegmentEvent(object):
|
||||
class SegmentEvent:
|
||||
|
||||
def __init__(self, ev, ds):
|
||||
self._ev = ev
|
||||
@ -47,7 +47,7 @@ class SegmentEvent(object):
|
||||
self._ds.update_last_timestamp(when)
|
||||
|
||||
|
||||
class DYHBEvent(object):
|
||||
class DYHBEvent:
|
||||
|
||||
def __init__(self, ev, ds):
|
||||
self._ev = ev
|
||||
@ -65,7 +65,7 @@ class DYHBEvent(object):
|
||||
self._ds.update_last_timestamp(when)
|
||||
|
||||
|
||||
class BlockRequestEvent(object):
|
||||
class BlockRequestEvent:
|
||||
|
||||
def __init__(self, ev, ds):
|
||||
self._ev = ev
|
||||
@ -84,7 +84,7 @@ class BlockRequestEvent(object):
|
||||
|
||||
|
||||
@implementer(IDownloadStatus)
|
||||
class DownloadStatus(object):
|
||||
class DownloadStatus:
|
||||
# There is one DownloadStatus for each CiphertextFileNode. The status
|
||||
# object will keep track of all activity for that node.
|
||||
statusid_counter = itertools.count(0)
|
||||
|
@ -79,7 +79,7 @@ TiB=1024*GiB
|
||||
PiB=1024*TiB
|
||||
|
||||
@implementer(IEncoder)
|
||||
class Encoder(object):
|
||||
class Encoder:
|
||||
|
||||
def __init__(self, log_parent=None, upload_status=None):
|
||||
object.__init__(self)
|
||||
|
@ -25,7 +25,7 @@ from allmydata.immutable.downloader.node import DownloadNode, \
|
||||
IDownloadStatusHandlingConsumer
|
||||
from allmydata.immutable.downloader.status import DownloadStatus
|
||||
|
||||
class CiphertextFileNode(object):
|
||||
class CiphertextFileNode:
|
||||
def __init__(self, verifycap, storage_broker, secret_holder,
|
||||
terminator, history):
|
||||
assert isinstance(verifycap, uri.CHKFileVerifierURI)
|
||||
@ -186,7 +186,7 @@ class CiphertextFileNode(object):
|
||||
return v.start()
|
||||
|
||||
@implementer(IConsumer, IDownloadStatusHandlingConsumer)
|
||||
class DecryptingConsumer(object):
|
||||
class DecryptingConsumer:
|
||||
"""I sit between a CiphertextDownloader (which acts as a Producer) and
|
||||
the real Consumer, decrypting everything that passes by. The real
|
||||
Consumer sees the real Producer, but the Producer sees us instead of the
|
||||
@ -232,7 +232,7 @@ class DecryptingConsumer(object):
|
||||
self._consumer.write(plaintext)
|
||||
|
||||
@implementer(IImmutableFileNode)
|
||||
class ImmutableFileNode(object):
|
||||
class ImmutableFileNode:
|
||||
|
||||
# I wrap a CiphertextFileNode with a decryption key
|
||||
def __init__(self, filecap, storage_broker, secret_holder, terminator,
|
||||
|
@ -141,7 +141,7 @@ class _WriteBuffer:
|
||||
|
||||
|
||||
@implementer(IStorageBucketWriter)
|
||||
class WriteBucketProxy(object):
|
||||
class WriteBucketProxy:
|
||||
"""
|
||||
Note: The various ``put_`` methods need to be called in the order in which the
|
||||
bytes will get written.
|
||||
@ -364,7 +364,7 @@ class WriteBucketProxy_v2(WriteBucketProxy):
|
||||
self._offset_data = offset_data
|
||||
|
||||
@implementer(IStorageBucketReader)
|
||||
class ReadBucketProxy(object):
|
||||
class ReadBucketProxy:
|
||||
|
||||
def __init__(self, rref, server, storage_index):
|
||||
self._rref = rref
|
||||
|
@ -11,7 +11,7 @@ from allmydata.interfaces import IImmutableFileNode, ICheckable
|
||||
from allmydata.uri import LiteralFileURI
|
||||
|
||||
|
||||
class _ImmutableFileNodeBase(object):
|
||||
class _ImmutableFileNodeBase:
|
||||
|
||||
def get_write_uri(self):
|
||||
return None
|
||||
|
@ -19,7 +19,7 @@ class NotEnoughWritersError(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class CHKCheckerAndUEBFetcher(object):
|
||||
class CHKCheckerAndUEBFetcher:
|
||||
"""I check to see if a file is already present in the grid. I also fetch
|
||||
the URI Extension Block, which is useful for an uploading client who
|
||||
wants to avoid the work of encryption and encoding.
|
||||
@ -254,7 +254,7 @@ class CHKUploadHelper(Referenceable, upload.CHKUploader): # type: ignore # warn
|
||||
self._helper.upload_finished(self._storage_index, 0)
|
||||
del self._reader
|
||||
|
||||
class AskUntilSuccessMixin(object):
|
||||
class AskUntilSuccessMixin:
|
||||
# create me with a _reader array
|
||||
_last_failure = None
|
||||
|
||||
|
@ -183,7 +183,7 @@ class HelperUploadResults(Copyable, RemoteCopy):
|
||||
self.pushed_shares = None # count of shares we pushed
|
||||
|
||||
@implementer(IUploadResults)
|
||||
class UploadResults(object):
|
||||
class UploadResults:
|
||||
|
||||
def __init__(self, file_size,
|
||||
ciphertext_fetched, # how much the helper fetched
|
||||
@ -235,7 +235,7 @@ def pretty_print_shnum_to_servers(s):
|
||||
return ', '.join([ "sh%s: %s" % (k, '+'.join([idlib.shortnodeid_b2a(x) for x in v])) for k, v in s.items() ])
|
||||
|
||||
|
||||
class ServerTracker(object):
|
||||
class ServerTracker:
|
||||
def __init__(self, server,
|
||||
sharesize, blocksize, num_segments, num_share_hashes,
|
||||
storage_index,
|
||||
@ -325,7 +325,7 @@ def str_shareloc(shnum, bucketwriter):
|
||||
|
||||
|
||||
@implementer(IPeerSelector)
|
||||
class PeerSelector(object):
|
||||
class PeerSelector:
|
||||
|
||||
def __init__(self, num_segments, total_shares, needed_shares, min_happiness):
|
||||
self.num_segments = num_segments
|
||||
@ -384,7 +384,7 @@ class PeerSelector(object):
|
||||
raise NotImplementedError
|
||||
|
||||
|
||||
class _QueryStatistics(object):
|
||||
class _QueryStatistics:
|
||||
|
||||
def __init__(self):
|
||||
self.total = 0
|
||||
@ -896,7 +896,7 @@ class Tahoe2ServerSelector(log.PrefixingLogMixin):
|
||||
|
||||
|
||||
@attr.s
|
||||
class _Accum(object):
|
||||
class _Accum:
|
||||
"""
|
||||
Accumulate up to some known amount of ciphertext.
|
||||
|
||||
@ -924,7 +924,7 @@ class _Accum(object):
|
||||
|
||||
|
||||
@implementer(IEncryptedUploadable)
|
||||
class EncryptAnUploadable(object):
|
||||
class EncryptAnUploadable:
|
||||
"""This is a wrapper that takes an IUploadable and provides
|
||||
IEncryptedUploadable."""
|
||||
CHUNKSIZE = 50*1024
|
||||
@ -1159,7 +1159,7 @@ class EncryptAnUploadable(object):
|
||||
return self.original.close()
|
||||
|
||||
@implementer(IUploadStatus)
|
||||
class UploadStatus(object):
|
||||
class UploadStatus:
|
||||
statusid_counter = itertools.count(0)
|
||||
|
||||
def __init__(self):
|
||||
@ -1208,7 +1208,7 @@ class UploadStatus(object):
|
||||
def set_results(self, value):
|
||||
self.results = value
|
||||
|
||||
class CHKUploader(object):
|
||||
class CHKUploader:
|
||||
|
||||
def __init__(self, storage_broker, secret_holder, reactor=None):
|
||||
# server_selector needs storage_broker and secret_holder
|
||||
@ -1408,7 +1408,7 @@ def read_this_many_bytes(uploadable, size, prepend_data=None):
|
||||
d.addCallback(_got)
|
||||
return d
|
||||
|
||||
class LiteralUploader(object):
|
||||
class LiteralUploader:
|
||||
|
||||
def __init__(self):
|
||||
self._status = s = UploadStatus()
|
||||
@ -1525,7 +1525,7 @@ class RemoteEncryptedUploadable(Referenceable): # type: ignore # warner/foolsca
|
||||
return self._eu.close()
|
||||
|
||||
|
||||
class AssistedUploader(object):
|
||||
class AssistedUploader:
|
||||
|
||||
def __init__(self, helper, storage_broker):
|
||||
self._helper = helper
|
||||
@ -1680,7 +1680,7 @@ class AssistedUploader(object):
|
||||
def get_upload_status(self):
|
||||
return self._upload_status
|
||||
|
||||
class BaseUploadable(object):
|
||||
class BaseUploadable:
|
||||
# this is overridden by max_segment_size
|
||||
default_max_segment_size = DEFAULT_IMMUTABLE_MAX_SEGMENT_SIZE
|
||||
default_params_set = False
|
||||
|
@ -64,7 +64,7 @@ def unsign_from_foolscap(ann_t):
|
||||
return (ann, key_vs)
|
||||
|
||||
|
||||
class SubscriberDescriptor(object):
|
||||
class SubscriberDescriptor:
|
||||
"""This describes a subscriber, for status display purposes. It contains
|
||||
the following attributes:
|
||||
|
||||
@ -88,7 +88,7 @@ class SubscriberDescriptor(object):
|
||||
self.remote_address = remote_address
|
||||
self.tubid = tubid
|
||||
|
||||
class AnnouncementDescriptor(object):
|
||||
class AnnouncementDescriptor:
|
||||
"""This describes an announcement, for status display purposes. It
|
||||
contains the following attributes, which will be empty ("" for
|
||||
strings) if the client did not provide them:
|
||||
|
@ -87,7 +87,7 @@ class OperationCancelledError(Exception):
|
||||
|
||||
|
||||
@implementer(IMonitor)
|
||||
class Monitor(object):
|
||||
class Monitor:
|
||||
|
||||
def __init__(self):
|
||||
self.cancelled = False
|
||||
|
@ -13,7 +13,7 @@ from allmydata.mutable.common import MODE_CHECK, MODE_WRITE, CorruptShareError
|
||||
from allmydata.mutable.servermap import ServerMap, ServermapUpdater
|
||||
from allmydata.mutable.retrieve import Retrieve # for verifying
|
||||
|
||||
class MutableChecker(object):
|
||||
class MutableChecker:
|
||||
SERVERMAP_MODE = MODE_CHECK
|
||||
|
||||
def __init__(self, node, storage_broker, history, monitor):
|
||||
|
@ -34,7 +34,7 @@ from allmydata.mutable.checker import MutableChecker, MutableCheckAndRepairer
|
||||
from allmydata.mutable.repairer import Repairer
|
||||
|
||||
|
||||
class BackoffAgent(object):
|
||||
class BackoffAgent:
|
||||
# these parameters are copied from foolscap.reconnector, which gets them
|
||||
# from twisted.internet.protocol.ReconnectingClientFactory
|
||||
initialDelay = 1.0
|
||||
@ -59,7 +59,7 @@ class BackoffAgent(object):
|
||||
# use nodemaker.create_mutable_file() to make one of these
|
||||
|
||||
@implementer(IMutableFileNode, ICheckable)
|
||||
class MutableFileNode(object):
|
||||
class MutableFileNode:
|
||||
|
||||
def __init__(self, storage_broker, secret_holder,
|
||||
default_encoding_parameters, history):
|
||||
@ -698,7 +698,7 @@ class MutableFileNode(object):
|
||||
|
||||
|
||||
@implementer(IMutableFileVersion, IWriteable)
|
||||
class MutableFileVersion(object):
|
||||
class MutableFileVersion:
|
||||
"""
|
||||
I represent a specific version (most likely the best version) of a
|
||||
mutable file.
|
||||
|
@ -222,7 +222,7 @@ def pack_prefix(seqnum, root_hash, IV,
|
||||
|
||||
|
||||
@implementer(IMutableSlotWriter)
|
||||
class SDMFSlotWriteProxy(object):
|
||||
class SDMFSlotWriteProxy:
|
||||
"""
|
||||
I represent a remote write slot for an SDMF mutable file. I build a
|
||||
share in memory, and then write it in one piece to the remote
|
||||
@ -571,7 +571,7 @@ VERIFICATION_KEY_SIZE = 292
|
||||
SHARE_HASH_CHAIN_SIZE = (2+HASH_SIZE)*mathutil.log_ceil(256, 2)
|
||||
|
||||
@implementer(IMutableSlotWriter)
|
||||
class MDMFSlotWriteProxy(object):
|
||||
class MDMFSlotWriteProxy:
|
||||
|
||||
"""
|
||||
I represent a remote write slot for an MDMF mutable file.
|
||||
@ -1187,7 +1187,7 @@ def _handle_bad_struct(f):
|
||||
f.trap(struct.error)
|
||||
raise BadShareError(f.value.args[0])
|
||||
|
||||
class MDMFSlotReadProxy(object):
|
||||
class MDMFSlotReadProxy:
|
||||
"""
|
||||
I read from a mutable slot filled with data written in the MDMF data
|
||||
format (which is described above).
|
||||
|
@ -41,7 +41,7 @@ PUSHING_EVERYTHING_ELSE_STATE = 1
|
||||
DONE_STATE = 2
|
||||
|
||||
@implementer(IPublishStatus)
|
||||
class PublishStatus(object):
|
||||
class PublishStatus:
|
||||
statusid_counter = count(0)
|
||||
def __init__(self):
|
||||
self.timings = {}
|
||||
@ -112,7 +112,7 @@ class PublishStatus(object):
|
||||
class LoopLimitExceededError(Exception):
|
||||
pass
|
||||
|
||||
class Publish(object):
|
||||
class Publish:
|
||||
"""I represent a single act of publishing the mutable file to the grid. I
|
||||
will only publish my data if the servermap I am using still represents
|
||||
the current state of the world.
|
||||
@ -1229,7 +1229,7 @@ class Publish(object):
|
||||
|
||||
|
||||
@implementer(IMutableUploadable)
|
||||
class MutableFileHandle(object):
|
||||
class MutableFileHandle:
|
||||
"""
|
||||
I am a mutable uploadable built around a filehandle-like object,
|
||||
usually either a BytesIO instance or a handle to an actual file.
|
||||
@ -1313,7 +1313,7 @@ class MutableData(MutableFileHandle):
|
||||
|
||||
|
||||
@implementer(IMutableUploadable)
|
||||
class TransformingUploadable(object):
|
||||
class TransformingUploadable:
|
||||
"""
|
||||
I am an IMutableUploadable that wraps another IMutableUploadable,
|
||||
and some segments that are already on the grid. When I am called to
|
||||
|
@ -10,7 +10,7 @@ from allmydata.mutable.common import MODE_REPAIR
|
||||
from allmydata.mutable.servermap import ServerMap, ServermapUpdater
|
||||
|
||||
@implementer(IRepairResults)
|
||||
class RepairResults(object):
|
||||
class RepairResults:
|
||||
|
||||
def __init__(self, smap):
|
||||
self.servermap = smap
|
||||
@ -27,7 +27,7 @@ class RepairRequiresWritecapError(Exception):
|
||||
class MustForceRepairError(Exception):
|
||||
pass
|
||||
|
||||
class Repairer(object):
|
||||
class Repairer:
|
||||
def __init__(self, node, check_results, storage_broker, history, monitor):
|
||||
self.node = node
|
||||
self.check_results = ICheckResults(check_results)
|
||||
|
@ -29,7 +29,7 @@ from allmydata.mutable.common import CorruptShareError, BadShareError, \
|
||||
from allmydata.mutable.layout import MDMFSlotReadProxy
|
||||
|
||||
@implementer(IRetrieveStatus)
|
||||
class RetrieveStatus(object):
|
||||
class RetrieveStatus:
|
||||
statusid_counter = count(0)
|
||||
def __init__(self):
|
||||
self.timings = {}
|
||||
@ -95,11 +95,11 @@ class RetrieveStatus(object):
|
||||
serverid = server.get_serverid()
|
||||
self._problems[serverid] = f
|
||||
|
||||
class Marker(object):
|
||||
class Marker:
|
||||
pass
|
||||
|
||||
@implementer(IPushProducer)
|
||||
class Retrieve(object):
|
||||
class Retrieve:
|
||||
# this class is currently single-use. Eventually (in MDMF) we will make
|
||||
# it multi-use, in which case you can call download(range) multiple
|
||||
# times, and each will have a separate response chain. However the
|
||||
|
@ -25,7 +25,7 @@ from allmydata.mutable.common import MODE_CHECK, MODE_ANYTHING, MODE_WRITE, \
|
||||
from allmydata.mutable.layout import SIGNED_PREFIX_LENGTH, MDMFSlotReadProxy
|
||||
|
||||
@implementer(IServermapUpdaterStatus)
|
||||
class UpdateStatus(object):
|
||||
class UpdateStatus:
|
||||
statusid_counter = count(0)
|
||||
def __init__(self):
|
||||
self.timings = {}
|
||||
@ -86,7 +86,7 @@ class UpdateStatus(object):
|
||||
def set_finished(self, when):
|
||||
self.finished = when
|
||||
|
||||
class ServerMap(object):
|
||||
class ServerMap:
|
||||
"""I record the placement of mutable shares.
|
||||
|
||||
This object records which shares (of various versions) are located on
|
||||
@ -385,7 +385,7 @@ class ServerMap(object):
|
||||
self.update_data.setdefault(shnum , []).append((verinfo, data))
|
||||
|
||||
|
||||
class ServermapUpdater(object):
|
||||
class ServermapUpdater:
|
||||
def __init__(self, filenode, storage_broker, monitor, servermap,
|
||||
mode=MODE_READ, add_lease=False, update_range=None):
|
||||
"""I update a servermap, locating a sufficient number of useful
|
||||
|
@ -121,7 +121,7 @@ such as private keys. On Unix-like systems, the permissions on this directory
|
||||
are set to disallow users other than its owner from reading the contents of
|
||||
the files. See the 'configuration.rst' documentation file for details."""
|
||||
|
||||
class _None(object):
|
||||
class _None:
|
||||
"""
|
||||
This class is to be used as a marker in get_config()
|
||||
"""
|
||||
@ -276,7 +276,7 @@ def ensure_text_and_abspath_expanduser_unicode(basedir: Union[bytes, str]) -> st
|
||||
|
||||
|
||||
@attr.s
|
||||
class _Config(object):
|
||||
class _Config:
|
||||
"""
|
||||
Manages configuration of a Tahoe 'node directory'.
|
||||
|
||||
|
@ -22,7 +22,7 @@ from allmydata import uri
|
||||
|
||||
|
||||
@implementer(INodeMaker)
|
||||
class NodeMaker(object):
|
||||
class NodeMaker:
|
||||
|
||||
def __init__(self, storage_broker, secret_holder, history,
|
||||
uploader, terminator,
|
||||
|
@ -80,7 +80,7 @@ def get_backupdb(dbfile, stderr=sys.stderr,
|
||||
return None
|
||||
|
||||
|
||||
class FileResult(object):
|
||||
class FileResult:
|
||||
def __init__(self, bdb, filecap, should_check,
|
||||
path, mtime, ctime, size):
|
||||
self.bdb = bdb
|
||||
@ -108,7 +108,7 @@ class FileResult(object):
|
||||
self.bdb.did_check_file_healthy(self.filecap, results)
|
||||
|
||||
|
||||
class DirectoryResult(object):
|
||||
class DirectoryResult:
|
||||
def __init__(self, bdb, dirhash, dircap, should_check):
|
||||
self.bdb = bdb
|
||||
self.dircap = dircap
|
||||
@ -130,7 +130,7 @@ class DirectoryResult(object):
|
||||
self.bdb.did_check_directory_healthy(self.dircap, results)
|
||||
|
||||
|
||||
class BackupDB_v2(object):
|
||||
class BackupDB_v2:
|
||||
VERSION = 2
|
||||
NO_CHECK_BEFORE = 1*MONTH
|
||||
ALWAYS_CHECK_AFTER = 2*MONTH
|
||||
|
@ -185,7 +185,7 @@ def get_aliases(nodedir):
|
||||
pass
|
||||
return aliases
|
||||
|
||||
class DefaultAliasMarker(object):
|
||||
class DefaultAliasMarker:
|
||||
pass
|
||||
|
||||
pretend_platform_uses_lettercolon = False # for tests
|
||||
|
@ -31,7 +31,7 @@ def parse_url(url, defaultPort=None):
|
||||
path = "/"
|
||||
return scheme, host, port, path
|
||||
|
||||
class BadResponse(object):
|
||||
class BadResponse:
|
||||
def __init__(self, url, err):
|
||||
self.status = -1
|
||||
self.reason = "Error trying to connect to %s: %s" % (url, err)
|
||||
|
@ -13,7 +13,7 @@ from allmydata.util.encodingutil import quote_output, is_printable_ascii
|
||||
from urllib.parse import quote as url_quote
|
||||
import json
|
||||
|
||||
class SlowOperationRunner(object):
|
||||
class SlowOperationRunner:
|
||||
|
||||
def run(self, options):
|
||||
stderr = options.stderr
|
||||
|
@ -58,7 +58,7 @@ def put_child(dirurl, childname, childcap):
|
||||
if resp.status not in (200, 201):
|
||||
raise HTTPError("Error during put_child", resp)
|
||||
|
||||
class BackerUpper(object):
|
||||
class BackerUpper:
|
||||
"""
|
||||
:ivar int _files_checked: The number of files which the backup process has
|
||||
so-far inspected on the grid to determine if they need to be
|
||||
@ -332,7 +332,7 @@ def run_backup(
|
||||
return progress.backup_finished()
|
||||
|
||||
|
||||
class FileTarget(object):
|
||||
class FileTarget:
|
||||
def __init__(self, path):
|
||||
self._path = path
|
||||
|
||||
@ -352,7 +352,7 @@ class FileTarget(object):
|
||||
return progress.reused_file(self._path, childcap, metadata)
|
||||
|
||||
|
||||
class DirectoryTarget(object):
|
||||
class DirectoryTarget:
|
||||
def __init__(self, path):
|
||||
self._path = path
|
||||
|
||||
@ -368,7 +368,7 @@ class DirectoryTarget(object):
|
||||
return progress.reused_directory(self._path, dircap, metadata)
|
||||
|
||||
|
||||
class _ErrorTarget(object):
|
||||
class _ErrorTarget:
|
||||
def __init__(self, path, isdir=False):
|
||||
self._path = path
|
||||
self._quoted_path = quote_local_unicode_path(path)
|
||||
@ -403,7 +403,7 @@ class SpecialTarget(_ErrorTarget):
|
||||
)
|
||||
|
||||
|
||||
class BackupComplete(object):
|
||||
class BackupComplete:
|
||||
def __init__(self,
|
||||
start_timestamp,
|
||||
end_timestamp,
|
||||
@ -462,7 +462,7 @@ class BackupComplete(object):
|
||||
return "\n".join(result)
|
||||
|
||||
|
||||
class BackupProgress(object):
|
||||
class BackupProgress:
|
||||
# Would be nice if this data structure were immutable and its methods were
|
||||
# transformations that created a new slightly different object. Not there
|
||||
# yet, though.
|
||||
|
@ -14,7 +14,7 @@ from allmydata.scripts.common import get_alias, DEFAULT_ALIAS, escape_path, \
|
||||
from allmydata.scripts.common_http import do_http, format_http_error
|
||||
from allmydata.util.encodingutil import quote_output, quote_path, get_io_encoding
|
||||
|
||||
class Checker(object):
|
||||
class Checker:
|
||||
pass
|
||||
|
||||
def _quote_serverid_index_share(serverid, storage_index, sharenum):
|
||||
@ -119,7 +119,7 @@ def check(options):
|
||||
return errno
|
||||
return 0
|
||||
|
||||
class FakeTransport(object):
|
||||
class FakeTransport:
|
||||
disconnecting = False
|
||||
|
||||
class DeepCheckOutput(LineOnlyReceiver, object):
|
||||
|
@ -73,7 +73,7 @@ def make_tahoe_subdirectory(nodeurl, parent_writecap, name):
|
||||
raise HTTPError("Error during mkdir", resp)
|
||||
|
||||
|
||||
class LocalFileSource(object):
|
||||
class LocalFileSource:
|
||||
def __init__(self, pathname, basename):
|
||||
precondition_abspath(pathname)
|
||||
self.pathname = pathname
|
||||
@ -88,7 +88,7 @@ class LocalFileSource(object):
|
||||
def open(self, caps_only):
|
||||
return open(self.pathname, "rb")
|
||||
|
||||
class LocalFileTarget(object):
|
||||
class LocalFileTarget:
|
||||
def __init__(self, pathname):
|
||||
precondition_abspath(pathname)
|
||||
self.pathname = pathname
|
||||
@ -96,7 +96,7 @@ class LocalFileTarget(object):
|
||||
def put_file(self, inf):
|
||||
fileutil.put_file(self.pathname, inf)
|
||||
|
||||
class LocalMissingTarget(object):
|
||||
class LocalMissingTarget:
|
||||
def __init__(self, pathname):
|
||||
precondition_abspath(pathname)
|
||||
self.pathname = pathname
|
||||
@ -104,7 +104,7 @@ class LocalMissingTarget(object):
|
||||
def put_file(self, inf):
|
||||
fileutil.put_file(self.pathname, inf)
|
||||
|
||||
class LocalDirectorySource(object):
|
||||
class LocalDirectorySource:
|
||||
def __init__(self, progressfunc, pathname, basename):
|
||||
precondition_abspath(pathname)
|
||||
|
||||
@ -136,7 +136,7 @@ class LocalDirectorySource(object):
|
||||
# TODO: output a warning
|
||||
pass
|
||||
|
||||
class LocalDirectoryTarget(object):
|
||||
class LocalDirectoryTarget:
|
||||
def __init__(self, progressfunc, pathname):
|
||||
precondition_abspath(pathname)
|
||||
|
||||
@ -183,7 +183,7 @@ class LocalDirectoryTarget(object):
|
||||
pass
|
||||
|
||||
|
||||
class TahoeFileSource(object):
|
||||
class TahoeFileSource:
|
||||
def __init__(self, nodeurl, mutable, writecap, readcap, basename):
|
||||
self.nodeurl = nodeurl
|
||||
self.mutable = mutable
|
||||
@ -216,7 +216,7 @@ def seekable(file_like):
|
||||
)
|
||||
|
||||
|
||||
class TahoeFileTarget(object):
|
||||
class TahoeFileTarget:
|
||||
def __init__(self, nodeurl, mutable, writecap, readcap, url):
|
||||
self.nodeurl = nodeurl
|
||||
self.mutable = mutable
|
||||
@ -236,7 +236,7 @@ class TahoeFileTarget(object):
|
||||
# to always create mutable files, or to copy mutable files into new
|
||||
# mutable files. ticket #835
|
||||
|
||||
class TahoeDirectorySource(object):
|
||||
class TahoeDirectorySource:
|
||||
def __init__(self, nodeurl, cache, progressfunc, basename):
|
||||
self.nodeurl = nodeurl
|
||||
self.cache = cache
|
||||
@ -309,7 +309,7 @@ class TahoeDirectorySource(object):
|
||||
"You probably need to use a later version of "
|
||||
"Tahoe-LAFS to copy this directory.")
|
||||
|
||||
class TahoeMissingTarget(object):
|
||||
class TahoeMissingTarget:
|
||||
def __init__(self, url):
|
||||
self.url = url
|
||||
|
||||
@ -326,7 +326,7 @@ class TahoeMissingTarget(object):
|
||||
# I'm not sure this will always work
|
||||
return PUT(self.url + "?t=uri", filecap)
|
||||
|
||||
class TahoeDirectoryTarget(object):
|
||||
class TahoeDirectoryTarget:
|
||||
def __init__(self, nodeurl, cache, progressfunc):
|
||||
self.nodeurl = nodeurl
|
||||
self.cache = cache
|
||||
@ -470,7 +470,7 @@ FileTargets = (LocalFileTarget, TahoeFileTarget)
|
||||
DirectoryTargets = (LocalDirectoryTarget, TahoeDirectoryTarget)
|
||||
MissingTargets = (LocalMissingTarget, TahoeMissingTarget)
|
||||
|
||||
class Copier(object):
|
||||
class Copier:
|
||||
|
||||
def do_copy(self, options, progressfunc=None):
|
||||
if options['quiet']:
|
||||
|
@ -12,7 +12,7 @@ from allmydata.scripts.common import get_alias, DEFAULT_ALIAS, escape_path, \
|
||||
from allmydata.scripts.common_http import do_http, format_http_error
|
||||
from allmydata.util.encodingutil import quote_output, quote_path
|
||||
|
||||
class FakeTransport(object):
|
||||
class FakeTransport:
|
||||
disconnecting = False
|
||||
|
||||
class ManifestStreamer(LineOnlyReceiver, object):
|
||||
|
@ -225,7 +225,7 @@ class DaemonizeTheRealService(Service, HookMixin):
|
||||
reactor.callWhenRunning(start)
|
||||
|
||||
|
||||
class DaemonizeTahoeNodePlugin(object):
|
||||
class DaemonizeTahoeNodePlugin:
|
||||
tapname = "tahoenode"
|
||||
def __init__(self, nodetype, basedir, allow_stdin_close):
|
||||
self.nodetype = nodetype
|
||||
|
@ -150,7 +150,7 @@ def _dump_json_to_file(js, afile):
|
||||
f.write(data.encode("utf8"))
|
||||
|
||||
|
||||
class _LeaseStateSerializer(object):
|
||||
class _LeaseStateSerializer:
|
||||
"""
|
||||
Read and write state for LeaseCheckingCrawler. This understands
|
||||
how to read the legacy pickle format files and upgrade them to the
|
||||
|
@ -30,7 +30,7 @@ def _convert_pickle_state_to_json(state):
|
||||
}
|
||||
|
||||
|
||||
class _HistorySerializer(object):
|
||||
class _HistorySerializer:
|
||||
"""
|
||||
Serialize the 'history' file of the lease-crawler state. This is
|
||||
"storage/lease_checker.history" for the pickle or
|
||||
|
@ -215,7 +215,7 @@ def limited_content(
|
||||
|
||||
|
||||
@define
|
||||
class ImmutableCreateResult(object):
|
||||
class ImmutableCreateResult:
|
||||
"""Result of creating a storage index for an immutable."""
|
||||
|
||||
already_have: set[int]
|
||||
@ -422,7 +422,7 @@ class StorageClientFactory:
|
||||
|
||||
|
||||
@define(hash=True)
|
||||
class StorageClient(object):
|
||||
class StorageClient:
|
||||
"""
|
||||
Low-level HTTP client that talks to the HTTP storage server.
|
||||
|
||||
@ -580,7 +580,7 @@ class StorageClient(object):
|
||||
|
||||
|
||||
@define(hash=True)
|
||||
class StorageClientGeneral(object):
|
||||
class StorageClientGeneral:
|
||||
"""
|
||||
High-level HTTP APIs that aren't immutable- or mutable-specific.
|
||||
"""
|
||||
@ -659,7 +659,7 @@ class StorageClientGeneral(object):
|
||||
|
||||
|
||||
@define
|
||||
class UploadProgress(object):
|
||||
class UploadProgress:
|
||||
"""
|
||||
Progress of immutable upload, per the server.
|
||||
"""
|
||||
@ -780,7 +780,7 @@ async def advise_corrupt_share(
|
||||
|
||||
|
||||
@define(hash=True)
|
||||
class StorageClientImmutables(object):
|
||||
class StorageClientImmutables:
|
||||
"""
|
||||
APIs for interacting with immutables.
|
||||
"""
|
||||
|
@ -251,7 +251,7 @@ def _authorized_route(
|
||||
|
||||
|
||||
@define
|
||||
class StorageIndexUploads(object):
|
||||
class StorageIndexUploads:
|
||||
"""
|
||||
In-progress upload to storage index.
|
||||
"""
|
||||
@ -265,7 +265,7 @@ class StorageIndexUploads(object):
|
||||
|
||||
|
||||
@define
|
||||
class UploadsInProgress(object):
|
||||
class UploadsInProgress:
|
||||
"""
|
||||
Keep track of uploads for storage indexes.
|
||||
"""
|
||||
@ -1072,7 +1072,7 @@ class HTTPServer(BaseApp):
|
||||
|
||||
@implementer(IStreamServerEndpoint)
|
||||
@define
|
||||
class _TLSEndpointWrapper(object):
|
||||
class _TLSEndpointWrapper:
|
||||
"""
|
||||
Wrap an existing endpoint with the server-side storage TLS policy. This is
|
||||
useful because not all Tahoe-LAFS endpoints might be plain TCP+TLS, for
|
||||
|
@ -103,7 +103,7 @@ def _fix_lease_count_format(lease_count_format):
|
||||
return fixed
|
||||
|
||||
|
||||
class ShareFile(object):
|
||||
class ShareFile:
|
||||
"""
|
||||
Support interaction with persistent storage of a share.
|
||||
|
||||
@ -351,7 +351,7 @@ class ShareFile(object):
|
||||
return space_freed
|
||||
|
||||
|
||||
class BucketWriter(object):
|
||||
class BucketWriter:
|
||||
"""
|
||||
Keep track of the process of writing to a ShareFile.
|
||||
"""
|
||||
@ -518,7 +518,7 @@ class FoolscapBucketWriter(Referenceable): # type: ignore # warner/foolscap#78
|
||||
return self._bucket_writer.abort()
|
||||
|
||||
|
||||
class BucketReader(object):
|
||||
class BucketReader:
|
||||
"""
|
||||
Manage the process for reading from a ``ShareFile``.
|
||||
"""
|
||||
|
@ -12,7 +12,7 @@ from .lease_schema import (
|
||||
)
|
||||
|
||||
@attr.s(frozen=True)
|
||||
class _Schema(object):
|
||||
class _Schema:
|
||||
"""
|
||||
Implement encoding and decoding for multiple versions of the immutable
|
||||
container schema.
|
||||
|
@ -115,7 +115,7 @@ class ILeaseInfo(Interface):
|
||||
|
||||
@implementer(ILeaseInfo)
|
||||
@attr.s(frozen=True)
|
||||
class LeaseInfo(object):
|
||||
class LeaseInfo:
|
||||
"""
|
||||
Represent the details of one lease, a marker which is intended to inform
|
||||
the storage server how long to store a particular share.
|
||||
@ -369,7 +369,7 @@ class HashedLeaseInfo(proxyForInterface(ILeaseInfo, "_lease_info")): # type: ign
|
||||
|
||||
|
||||
@attr.s(frozen=True)
|
||||
class _HashedCancelSecret(object):
|
||||
class _HashedCancelSecret:
|
||||
"""
|
||||
``_HashedCancelSecret`` is a marker type for an already-hashed lease
|
||||
cancel secret that lets internal lease cancellers bypass the hash-based
|
||||
|
@ -15,7 +15,7 @@ from .lease import (
|
||||
)
|
||||
|
||||
@attr.s(frozen=True)
|
||||
class CleartextLeaseSerializer(object):
|
||||
class CleartextLeaseSerializer:
|
||||
"""
|
||||
Serialize and unserialize leases with cleartext secrets.
|
||||
"""
|
||||
@ -46,7 +46,7 @@ class CleartextLeaseSerializer(object):
|
||||
return self._from_data(data)
|
||||
|
||||
@attr.s(frozen=True)
|
||||
class HashedLeaseSerializer(object):
|
||||
class HashedLeaseSerializer:
|
||||
_to_data = attr.ib()
|
||||
_from_data = attr.ib()
|
||||
|
||||
|
@ -45,7 +45,7 @@ from .mutable_schema import (
|
||||
assert struct.calcsize(">L") == 4, struct.calcsize(">L")
|
||||
assert struct.calcsize(">Q") == 8, struct.calcsize(">Q")
|
||||
|
||||
class MutableShareFile(object):
|
||||
class MutableShareFile:
|
||||
|
||||
sharetype = "mutable"
|
||||
DATA_LENGTH_OFFSET = struct.calcsize(">32s20s32s")
|
||||
@ -457,7 +457,7 @@ def testv_compare(a, op, b):
|
||||
return a == b
|
||||
|
||||
|
||||
class EmptyShare(object):
|
||||
class EmptyShare:
|
||||
|
||||
def check_testv(self, testv):
|
||||
test_good = True
|
||||
|
@ -89,7 +89,7 @@ _EXTRA_LEASE_OFFSET = _HEADER_SIZE + 4 * LeaseInfo().mutable_size()
|
||||
|
||||
|
||||
@attr.s(frozen=True)
|
||||
class _Schema(object):
|
||||
class _Schema:
|
||||
"""
|
||||
Implement encoding and decoding for the mutable container.
|
||||
|
||||
|
@ -121,7 +121,7 @@ ANONYMOUS_STORAGE_NURLS = "anonymous-storage-NURLs"
|
||||
# don't pass signatures: only pass validated blessed-objects
|
||||
|
||||
@attr.s
|
||||
class StorageClientConfig(object):
|
||||
class StorageClientConfig:
|
||||
"""
|
||||
Configuration for a node acting as a storage client.
|
||||
|
||||
@ -578,7 +578,7 @@ class StorageFarmBroker(service.MultiService):
|
||||
return StubServer(serverid)
|
||||
|
||||
@implementer(IDisplayableServer)
|
||||
class StubServer(object):
|
||||
class StubServer:
|
||||
def __init__(self, serverid):
|
||||
assert isinstance(serverid, bytes)
|
||||
self.serverid = serverid # binary tubid
|
||||
@ -680,7 +680,7 @@ def _parse_announcement(server_id: bytes, furl: bytes, ann: dict) -> tuple[str,
|
||||
|
||||
@implementer(IFoolscapStorageServer)
|
||||
@attr.s(frozen=True)
|
||||
class _FoolscapStorage(object):
|
||||
class _FoolscapStorage:
|
||||
"""
|
||||
Abstraction for connecting to a storage server exposed via Foolscap.
|
||||
"""
|
||||
@ -739,7 +739,7 @@ class _FoolscapStorage(object):
|
||||
|
||||
@implementer(IFoolscapStorageServer)
|
||||
@define
|
||||
class _NullStorage(object):
|
||||
class _NullStorage:
|
||||
"""
|
||||
Abstraction for *not* communicating with a storage server of a type with
|
||||
which we can't communicate.
|
||||
@ -758,7 +758,7 @@ class _NullStorage(object):
|
||||
return NonReconnector()
|
||||
|
||||
|
||||
class NonReconnector(object):
|
||||
class NonReconnector:
|
||||
"""
|
||||
A ``foolscap.reconnector.Reconnector``-alike that doesn't do anything.
|
||||
"""
|
||||
@ -1379,7 +1379,7 @@ class UnknownServerTypeError(Exception):
|
||||
|
||||
@implementer(IStorageServer)
|
||||
@attr.s
|
||||
class _StorageServer(object):
|
||||
class _StorageServer:
|
||||
"""
|
||||
``_StorageServer`` is a direct pass-through to an ``RIStorageServer`` via
|
||||
a ``RemoteReference``.
|
||||
@ -1490,7 +1490,7 @@ class _StorageServer(object):
|
||||
|
||||
|
||||
@attr.s(hash=True)
|
||||
class _FakeRemoteReference(object):
|
||||
class _FakeRemoteReference:
|
||||
"""
|
||||
Emulate a Foolscap RemoteReference, calling a local object instead.
|
||||
"""
|
||||
@ -1506,7 +1506,7 @@ class _FakeRemoteReference(object):
|
||||
|
||||
|
||||
@attr.s
|
||||
class _HTTPBucketWriter(object):
|
||||
class _HTTPBucketWriter:
|
||||
"""
|
||||
Emulate a ``RIBucketWriter``, but use HTTP protocol underneath.
|
||||
"""
|
||||
@ -1547,7 +1547,7 @@ def _ignore_404(failure: Failure) -> Optional[Failure]:
|
||||
|
||||
|
||||
@attr.s(hash=True)
|
||||
class _HTTPBucketReader(object):
|
||||
class _HTTPBucketReader:
|
||||
"""
|
||||
Emulate a ``RIBucketReader``, but use HTTP protocol underneath.
|
||||
"""
|
||||
@ -1570,7 +1570,7 @@ class _HTTPBucketReader(object):
|
||||
# WORK IN PROGRESS, for now it doesn't actually implement whole thing.
|
||||
@implementer(IStorageServer) # type: ignore
|
||||
@attr.s
|
||||
class _HTTPStorageServer(object):
|
||||
class _HTTPStorageServer:
|
||||
"""
|
||||
Talk to remote storage server over HTTP.
|
||||
"""
|
||||
|
@ -522,7 +522,7 @@ class CLI(CLITestMixin, unittest.TestCase):
|
||||
self.basedir = "cli/exception_catcher"
|
||||
|
||||
exc = Exception("canary")
|
||||
class BrokenOptions(object):
|
||||
class BrokenOptions:
|
||||
def parseOptions(self, argv):
|
||||
raise exc
|
||||
|
||||
|
@ -115,7 +115,7 @@ class Mv(GridTestMixin, CLITestMixin, unittest.TestCase):
|
||||
original_do_http = tahoe_mv.do_http
|
||||
def mock_do_http(method, url, body=b""):
|
||||
if method == "DELETE":
|
||||
class FakeResponse(object):
|
||||
class FakeResponse:
|
||||
def read(self):
|
||||
return "response"
|
||||
resp = FakeResponse()
|
||||
|
@ -30,7 +30,7 @@ from ..common_web import do_http
|
||||
from .common import CLITestMixin
|
||||
|
||||
|
||||
class FakeStatus(object):
|
||||
class FakeStatus:
|
||||
def __init__(self):
|
||||
self.status = []
|
||||
|
||||
|
@ -52,7 +52,7 @@ RelayURL = str
|
||||
ApplicationKey = Tuple[RelayURL, AppId]
|
||||
|
||||
@define
|
||||
class MemoryWormholeServer(object):
|
||||
class MemoryWormholeServer:
|
||||
"""
|
||||
A factory for in-memory wormholes.
|
||||
|
||||
@ -105,7 +105,7 @@ class MemoryWormholeServer(object):
|
||||
|
||||
|
||||
@frozen
|
||||
class TestingHelper(object):
|
||||
class TestingHelper:
|
||||
"""
|
||||
Provide extra functionality for interacting with an in-memory wormhole
|
||||
implementation.
|
||||
@ -160,7 +160,7 @@ _verify()
|
||||
|
||||
|
||||
@define
|
||||
class _WormholeApp(object):
|
||||
class _WormholeApp:
|
||||
"""
|
||||
Represent a collection of wormholes that belong to the same
|
||||
appid/relay_url scope.
|
||||
@ -204,7 +204,7 @@ class _WormholeApp(object):
|
||||
|
||||
|
||||
@frozen
|
||||
class _WormholeServerView(object):
|
||||
class _WormholeServerView:
|
||||
"""
|
||||
Present an interface onto the server to be consumed by individual
|
||||
wormholes.
|
||||
@ -235,7 +235,7 @@ class _WormholeServerView(object):
|
||||
|
||||
@implementer(IWormhole)
|
||||
@define
|
||||
class _MemoryWormhole(object):
|
||||
class _MemoryWormhole:
|
||||
"""
|
||||
Represent one side of a wormhole as conceived by ``MemoryWormholeServer``.
|
||||
"""
|
||||
|
@ -119,7 +119,7 @@ def on_different(fd_mapping):
|
||||
return _ProcessProtocolAdapter(fd_mapping)
|
||||
|
||||
@attr.s
|
||||
class CLINodeAPI(object):
|
||||
class CLINodeAPI:
|
||||
reactor = attr.ib()
|
||||
basedir = attr.ib(type=FilePath)
|
||||
process = attr.ib(default=None)
|
||||
|
@ -129,7 +129,7 @@ def byteschr(x):
|
||||
return bytes([x])
|
||||
|
||||
@attr.s
|
||||
class FakeDisk(object):
|
||||
class FakeDisk:
|
||||
"""
|
||||
Just enough of a disk to be able to report free / used information.
|
||||
"""
|
||||
@ -166,7 +166,7 @@ class FakeDisk(object):
|
||||
|
||||
|
||||
@attr.s
|
||||
class MemoryIntroducerClient(object):
|
||||
class MemoryIntroducerClient:
|
||||
"""
|
||||
A model-only (no behavior) stand-in for ``IntroducerClient``.
|
||||
"""
|
||||
@ -199,7 +199,7 @@ class MemoryIntroducerClient(object):
|
||||
|
||||
|
||||
@attr.s
|
||||
class Subscription(object):
|
||||
class Subscription:
|
||||
"""
|
||||
A model of an introducer subscription.
|
||||
"""
|
||||
@ -210,7 +210,7 @@ class Subscription(object):
|
||||
|
||||
|
||||
@attr.s
|
||||
class Announcement(object):
|
||||
class Announcement:
|
||||
"""
|
||||
A model of an introducer announcement.
|
||||
"""
|
||||
@ -237,7 +237,7 @@ def get_published_announcements(client):
|
||||
)
|
||||
|
||||
|
||||
class UseTestPlugins(object):
|
||||
class UseTestPlugins:
|
||||
"""
|
||||
A fixture which enables loading Twisted plugins from the Tahoe-LAFS test
|
||||
suite.
|
||||
@ -265,7 +265,7 @@ class UseTestPlugins(object):
|
||||
|
||||
|
||||
@attr.s
|
||||
class UseNode(object):
|
||||
class UseNode:
|
||||
"""
|
||||
A fixture which creates a client node.
|
||||
|
||||
@ -374,7 +374,7 @@ class UseNode(object):
|
||||
|
||||
|
||||
@implementer(IPlugin, IStreamServerEndpointStringParser)
|
||||
class AdoptedServerPort(object):
|
||||
class AdoptedServerPort:
|
||||
"""
|
||||
Parse an ``adopt-socket:<fd>`` endpoint description by adopting ``fd`` as
|
||||
a listening TCP port.
|
||||
@ -414,7 +414,7 @@ def really_bind(s, addr):
|
||||
raise Exception("Many bind attempts failed with EADDRINUSE")
|
||||
|
||||
|
||||
class SameProcessStreamEndpointAssigner(object):
|
||||
class SameProcessStreamEndpointAssigner:
|
||||
"""
|
||||
A fixture which can assign streaming server endpoints for use *in this
|
||||
process only*.
|
||||
@ -479,7 +479,7 @@ class SameProcessStreamEndpointAssigner(object):
|
||||
return location_hint, port_endpoint
|
||||
|
||||
@implementer(IPullProducer)
|
||||
class DummyProducer(object):
|
||||
class DummyProducer:
|
||||
def resumeProducing(self):
|
||||
pass
|
||||
|
||||
@ -916,7 +916,7 @@ class LoggingServiceParent(service.MultiService):
|
||||
TEST_DATA=b"\x02"*(Uploader.URI_LIT_SIZE_THRESHOLD+1)
|
||||
|
||||
|
||||
class WebErrorMixin(object):
|
||||
class WebErrorMixin:
|
||||
def explain_web_error(self, f):
|
||||
# an error on the server side causes the client-side getPage() to
|
||||
# return a failure(t.web.error.Error), and its str() doesn't show the
|
||||
@ -1276,7 +1276,7 @@ def _corrupt_uri_extension(data, debug=False):
|
||||
|
||||
@attr.s
|
||||
@implementer(IAddressFamily)
|
||||
class ConstantAddresses(object):
|
||||
class ConstantAddresses:
|
||||
"""
|
||||
Pretend to provide support for some address family but just hand out
|
||||
canned responses.
|
||||
@ -1318,7 +1318,7 @@ def disable_modules(*names):
|
||||
else:
|
||||
sys.modules[n] = original
|
||||
|
||||
class _TestCaseMixin(object):
|
||||
class _TestCaseMixin:
|
||||
"""
|
||||
A mixin for ``TestCase`` which collects helpful behaviors for subclasses.
|
||||
|
||||
|
@ -226,7 +226,7 @@ def flip_one_bit(s, offset=0, size=None):
|
||||
return result
|
||||
|
||||
|
||||
class ReallyEqualMixin(object):
|
||||
class ReallyEqualMixin:
|
||||
def failUnlessReallyEqual(self, a, b, msg=None):
|
||||
self.assertEqual(a, b, msg)
|
||||
if a.__class__ == str:
|
||||
@ -236,7 +236,7 @@ class ReallyEqualMixin(object):
|
||||
self.assertEqual(type(a), type(b), "a :: %r (%s), b :: %r (%s), %r" % (a, type(a), b, type(b), msg))
|
||||
|
||||
|
||||
class SignalMixin(object):
|
||||
class SignalMixin:
|
||||
# This class is necessary for any code which wants to use Processes
|
||||
# outside the usual reactor.run() environment. It is copied from
|
||||
# Twisted's twisted.test.test_process . Note that Twisted-8.2.0 uses
|
||||
@ -258,17 +258,17 @@ class SignalMixin(object):
|
||||
return super(SignalMixin, self).tearDown()
|
||||
|
||||
|
||||
class StallMixin(object):
|
||||
class StallMixin:
|
||||
def stall(self, res=None, delay=1):
|
||||
d = defer.Deferred()
|
||||
reactor.callLater(delay, d.callback, res)
|
||||
return d
|
||||
|
||||
|
||||
class Marker(object):
|
||||
class Marker:
|
||||
pass
|
||||
|
||||
class FakeCanary(object):
|
||||
class FakeCanary:
|
||||
"""For use in storage tests.
|
||||
"""
|
||||
def __init__(self, ignore_disconnectors=False):
|
||||
@ -300,7 +300,7 @@ class FakeCanary(object):
|
||||
self.disconnectors = None
|
||||
|
||||
|
||||
class ShouldFailMixin(object):
|
||||
class ShouldFailMixin:
|
||||
|
||||
def shouldFail(self, expected_failure, which, substring,
|
||||
callable, *args, **kwargs):
|
||||
@ -392,7 +392,7 @@ class TestMixin(SignalMixin):
|
||||
self.fail("Reactor was still active when it was required to be quiescent.")
|
||||
|
||||
|
||||
class TimezoneMixin(object):
|
||||
class TimezoneMixin:
|
||||
|
||||
def setTimezone(self, timezone):
|
||||
def tzset_if_possible():
|
||||
|
@ -57,7 +57,7 @@ RUN_TEST = ActionType(
|
||||
|
||||
|
||||
@attr.s
|
||||
class EliotLoggedRunTest(object):
|
||||
class EliotLoggedRunTest:
|
||||
"""
|
||||
A *RunTest* implementation which surrounds test invocation with an
|
||||
Eliot-based action.
|
||||
@ -162,7 +162,7 @@ def with_logging(
|
||||
|
||||
|
||||
@implementer(ILogger)
|
||||
class _TwoLoggers(object):
|
||||
class _TwoLoggers:
|
||||
"""
|
||||
Log to two loggers.
|
||||
|
||||
|
@ -33,7 +33,7 @@ from allmydata.crypto import (
|
||||
)
|
||||
|
||||
@attr.s
|
||||
class MatchesNodePublicKey(object):
|
||||
class MatchesNodePublicKey:
|
||||
"""
|
||||
Match an object representing the node's private key.
|
||||
|
||||
@ -124,7 +124,7 @@ def matches_base32():
|
||||
|
||||
|
||||
|
||||
class MatchesSameElements(object):
|
||||
class MatchesSameElements:
|
||||
"""
|
||||
Match if the two-tuple value given contains two elements that are equal to
|
||||
each other.
|
||||
|
@ -22,14 +22,14 @@ from ..no_network import GridTestMixin
|
||||
from .. import common_util as testutil
|
||||
from ..common_util import DevNullDictionary
|
||||
|
||||
class SameKeyGenerator(object):
|
||||
class SameKeyGenerator:
|
||||
def __init__(self, pubkey, privkey):
|
||||
self.pubkey = pubkey
|
||||
self.privkey = privkey
|
||||
def generate(self, keysize=None):
|
||||
return defer.succeed( (self.pubkey, self.privkey) )
|
||||
|
||||
class FirstServerGetsKilled(object):
|
||||
class FirstServerGetsKilled:
|
||||
done = False
|
||||
def notify(self, retval, wrapper, methname):
|
||||
if not self.done:
|
||||
@ -37,7 +37,7 @@ class FirstServerGetsKilled(object):
|
||||
self.done = True
|
||||
return retval
|
||||
|
||||
class FirstServerGetsDeleted(object):
|
||||
class FirstServerGetsDeleted:
|
||||
def __init__(self):
|
||||
self.done = False
|
||||
self.silenced = None
|
||||
|
@ -32,7 +32,7 @@ def eventuaaaaaly(res=None):
|
||||
# network connections, both to speed up the tests and to reduce the amount of
|
||||
# non-mutable.py code being exercised.
|
||||
|
||||
class FakeStorage(object):
|
||||
class FakeStorage:
|
||||
# this class replaces the collection of storage servers, allowing the
|
||||
# tests to examine and manipulate the published shares. It also lets us
|
||||
# control the order in which read queries are answered, to exercise more
|
||||
@ -92,7 +92,7 @@ class FakeStorage(object):
|
||||
# This doesn't actually implement the whole interface, but adding a commented
|
||||
# interface implementation annotation for grepping purposes.
|
||||
#@implementer(RIStorageServer)
|
||||
class FakeStorageServer(object):
|
||||
class FakeStorageServer:
|
||||
"""
|
||||
A fake Foolscap remote object, implemented by overriding callRemote() to
|
||||
call local methods.
|
||||
@ -217,7 +217,7 @@ def corrupt(res, s, offset, shnums_to_corrupt=None, offset_offset=0):
|
||||
return dl
|
||||
|
||||
@attr.s
|
||||
class Peer(object):
|
||||
class Peer:
|
||||
peerid = attr.ib()
|
||||
storage_server = attr.ib()
|
||||
announcement = attr.ib()
|
||||
@ -318,7 +318,7 @@ def make_nodemaker_with_storage_broker(storage_broker):
|
||||
return nodemaker
|
||||
|
||||
|
||||
class PublishMixin(object):
|
||||
class PublishMixin:
|
||||
def publish_one(self):
|
||||
# publish a file and create shares, which can then be manipulated
|
||||
# later.
|
||||
@ -428,7 +428,7 @@ class PublishMixin(object):
|
||||
index = versionmap[shnum]
|
||||
shares[peerid][shnum] = oldshares[index][peerid][shnum]
|
||||
|
||||
class CheckerMixin(object):
|
||||
class CheckerMixin:
|
||||
def check_good(self, r, where):
|
||||
self.failUnless(r.is_healthy(), where)
|
||||
return r
|
||||
|
@ -57,13 +57,13 @@ from .common import (
|
||||
class IntentionalError(Exception):
|
||||
pass
|
||||
|
||||
class Marker(object):
|
||||
class Marker:
|
||||
pass
|
||||
|
||||
fireNow = partial(defer.succeed, None)
|
||||
|
||||
@implementer(IRemoteReference) # type: ignore # warner/foolscap#79
|
||||
class LocalWrapper(object):
|
||||
class LocalWrapper:
|
||||
"""
|
||||
A ``LocalWrapper`` presents the remote reference interface to a local
|
||||
object which implements a ``RemoteInterface``.
|
||||
@ -171,7 +171,7 @@ def wrap_storage_server(original):
|
||||
return wrapper
|
||||
|
||||
@implementer(IServer)
|
||||
class NoNetworkServer(object):
|
||||
class NoNetworkServer:
|
||||
def __init__(self, serverid, rref):
|
||||
self.serverid = serverid
|
||||
self.rref = rref
|
||||
@ -293,7 +293,7 @@ class _NoNetworkClient(_Client): # type: ignore # tahoe-lafs/ticket/3573
|
||||
#._servers will be set by the NoNetworkGrid which creates us
|
||||
|
||||
|
||||
class SimpleStats(object):
|
||||
class SimpleStats:
|
||||
def __init__(self):
|
||||
self.counters = {}
|
||||
self.stats_producers = []
|
||||
@ -463,7 +463,7 @@ class NoNetworkGrid(service.MultiService):
|
||||
fileutil.rm_dir(os.path.join(server.sharedir, prefixdir))
|
||||
|
||||
|
||||
class GridTestMixin(object):
|
||||
class GridTestMixin:
|
||||
def setUp(self):
|
||||
self.s = service.MultiService()
|
||||
self.s.startService()
|
||||
|
@ -52,7 +52,7 @@ class RIDummy(RemoteInterface):
|
||||
# https://twistedmatrix.com/trac/ticket/9717
|
||||
@implementer(IFoolscapStoragePlugin) # type: ignore
|
||||
@attr.s
|
||||
class DummyStorage(object):
|
||||
class DummyStorage:
|
||||
name = attr.ib()
|
||||
|
||||
@property
|
||||
|
@ -46,7 +46,7 @@ from .web.common import (
|
||||
assert_soup_has_tag_with_content,
|
||||
)
|
||||
|
||||
class FakeClient(object):
|
||||
class FakeClient:
|
||||
def get_storage_broker(self):
|
||||
return self.storage_broker
|
||||
|
||||
@ -781,7 +781,7 @@ class AddLease(GridTestMixin, unittest.TestCase):
|
||||
d.addCallback(lambda ign: self.failUnless(really_did_break))
|
||||
return d
|
||||
|
||||
class CounterHolder(object):
|
||||
class CounterHolder:
|
||||
def __init__(self):
|
||||
self._num_active_block_fetches = 0
|
||||
self._max_active_block_fetches = 0
|
||||
|
@ -19,7 +19,7 @@ from testtools.matchers import (
|
||||
|
||||
@implementer(IPushProducer)
|
||||
@implementer(IPullProducer)
|
||||
class Producer(object):
|
||||
class Producer:
|
||||
"""Can be used as either streaming or non-streaming producer.
|
||||
|
||||
If used as streaming, the test should call iterate() manually.
|
||||
|
@ -47,7 +47,7 @@ from hypothesis.strategies import text
|
||||
|
||||
|
||||
@implementer(IConsumer)
|
||||
class MemAccum(object):
|
||||
class MemAccum:
|
||||
def registerProducer(self, producer, streaming):
|
||||
self.producer = producer
|
||||
self.producer.resumeProducing()
|
||||
@ -1395,7 +1395,7 @@ class Dirnode(GridTestMixin, unittest.TestCase,
|
||||
self.set_up_grid(oneshare=True)
|
||||
return self._do_initial_children_test(mdmf=True)
|
||||
|
||||
class MinimalFakeMutableFile(object):
|
||||
class MinimalFakeMutableFile:
|
||||
def get_writekey(self):
|
||||
return b"writekey"
|
||||
|
||||
|
@ -1347,7 +1347,7 @@ def make_servers(clientids):
|
||||
servers[clientid] = make_server(clientid)
|
||||
return servers
|
||||
|
||||
class MyShare(object):
|
||||
class MyShare:
|
||||
def __init__(self, shnum, server, rtt):
|
||||
self._shnum = shnum
|
||||
self._server = server
|
||||
@ -1363,7 +1363,7 @@ class MySegmentFetcher(SegmentFetcher):
|
||||
def _start_share(self, share, shnum):
|
||||
self._test_start_shares.append(share)
|
||||
|
||||
class FakeNode(object):
|
||||
class FakeNode:
|
||||
def __init__(self):
|
||||
self.want_more = 0
|
||||
self.failed = None
|
||||
|
@ -99,7 +99,7 @@ class EliotLoggedTestTests(TestCase):
|
||||
A test method of an ``AsyncTestCase`` subclass that logs an unserializable
|
||||
value with Eliot fails.
|
||||
"""
|
||||
class world(object):
|
||||
class world:
|
||||
"""
|
||||
an unserializable object
|
||||
"""
|
||||
|
@ -25,7 +25,7 @@ def flip_bit(good): # flips the last bit
|
||||
return good[:-1] + byteschr(ord(good[-1]) ^ 0x01)
|
||||
|
||||
@implementer(IStorageBucketWriter, IStorageBucketReader)
|
||||
class FakeBucketReaderWriterProxy(object):
|
||||
class FakeBucketReaderWriterProxy:
|
||||
# these are used for both reading and writing
|
||||
def __init__(self, mode="good", peerid="peer"):
|
||||
self.mode = mode
|
||||
|
@ -70,7 +70,7 @@ from allmydata.util.encodingutil import unicode_to_url, \
|
||||
to_filepath, extend_filepath, unicode_from_filepath, unicode_segments_from, \
|
||||
unicode_to_argv
|
||||
|
||||
class MockStdout(object):
|
||||
class MockStdout:
|
||||
pass
|
||||
|
||||
|
||||
|
@ -11,10 +11,10 @@ from allmydata.mutable.filenode import MutableFileNode
|
||||
from allmydata.util import hashutil
|
||||
from allmydata.util.consumer import download_to_data
|
||||
|
||||
class NotANode(object):
|
||||
class NotANode:
|
||||
pass
|
||||
|
||||
class FakeClient(object):
|
||||
class FakeClient:
|
||||
# just enough to let the node acquire a downloader (which it won't use),
|
||||
# and to get default encoding parameters
|
||||
def getServiceNamed(self, name):
|
||||
|
@ -282,7 +282,7 @@ class PlacementTests(unittest.TestCase):
|
||||
assert happiness == min(len(peers), len(shares))
|
||||
|
||||
|
||||
class FakeServerTracker(object):
|
||||
class FakeServerTracker:
|
||||
def __init__(self, serverid, buckets):
|
||||
self._serverid = serverid
|
||||
self.buckets = buckets
|
||||
|
@ -96,7 +96,7 @@ class CHKUploadHelper_fake(offloaded.CHKUploadHelper):
|
||||
return d
|
||||
|
||||
@attr.s
|
||||
class FakeCHKCheckerAndUEBFetcher(object):
|
||||
class FakeCHKCheckerAndUEBFetcher:
|
||||
"""
|
||||
A fake of ``CHKCheckerAndUEBFetcher`` which hard-codes some check result.
|
||||
"""
|
||||
|
@ -24,11 +24,11 @@ from .no_network import (
|
||||
NoNetworkServer,
|
||||
)
|
||||
|
||||
class MockShareHashTree(object):
|
||||
class MockShareHashTree:
|
||||
def needed_hashes(self):
|
||||
return False
|
||||
|
||||
class MockNode(object):
|
||||
class MockNode:
|
||||
def __init__(self, check_reneging, check_fetch_failed):
|
||||
self.got = 0
|
||||
self.finished_d = defer.Deferred()
|
||||
@ -86,10 +86,10 @@ class TestShareFinder(unittest.TestCase):
|
||||
rcap = uri.CHKFileURI(b'a'*32, b'a'*32, 3, 99, 100)
|
||||
vcap = rcap.get_verify_cap()
|
||||
|
||||
class MockBuckets(object):
|
||||
class MockBuckets:
|
||||
pass
|
||||
|
||||
class MockServer(object):
|
||||
class MockServer:
|
||||
def __init__(self, buckets):
|
||||
self.version = {
|
||||
b'http://allmydata.org/tahoe/protocols/storage/v1': {
|
||||
@ -113,17 +113,17 @@ class TestShareFinder(unittest.TestCase):
|
||||
eventually(_give_buckets_and_hunger_again)
|
||||
return d
|
||||
|
||||
class MockStorageBroker(object):
|
||||
class MockStorageBroker:
|
||||
def __init__(self, servers):
|
||||
self.servers = servers
|
||||
def get_servers_for_psi(self, si):
|
||||
return self.servers
|
||||
|
||||
class MockDownloadStatus(object):
|
||||
class MockDownloadStatus:
|
||||
def add_dyhb_request(self, server, when):
|
||||
return MockDYHBEvent()
|
||||
|
||||
class MockDYHBEvent(object):
|
||||
class MockDYHBEvent:
|
||||
def finished(self, shnums, when):
|
||||
pass
|
||||
|
||||
|
@ -146,7 +146,7 @@ class Node(testutil.SignalMixin, testutil.ReallyEqualMixin, AsyncTestCase):
|
||||
self.failUnlessReallyEqual(w.staticdir, expected)
|
||||
|
||||
|
||||
class ServiceMixin(object):
|
||||
class ServiceMixin:
|
||||
def setUp(self):
|
||||
self.parent = LoggingMultiService()
|
||||
self.parent.startService()
|
||||
@ -732,7 +732,7 @@ class SystemTest(SystemTestMixin, AsyncTestCase):
|
||||
os.makedirs(self.basedir)
|
||||
return self.do_system_test()
|
||||
|
||||
class FakeRemoteReference(object):
|
||||
class FakeRemoteReference:
|
||||
def notifyOnDisconnect(self, *args, **kwargs): pass
|
||||
def getRemoteTubID(self): return "62ubehyunnyhzs7r6vdonnm2hpi52w6y"
|
||||
def getPeer(self): return address.IPv4Address("TCP", "remote.example.com",
|
||||
|
@ -50,7 +50,7 @@ def new_secret():
|
||||
return _randbytes(32)
|
||||
|
||||
|
||||
class IStorageServerSharedAPIsTestsMixin(object):
|
||||
class IStorageServerSharedAPIsTestsMixin:
|
||||
"""
|
||||
Tests for ``IStorageServer``'s shared APIs.
|
||||
|
||||
@ -68,7 +68,7 @@ class IStorageServerSharedAPIsTestsMixin(object):
|
||||
self.assertIn(b"http://allmydata.org/tahoe/protocols/storage/v1", result)
|
||||
|
||||
|
||||
class IStorageServerImmutableAPIsTestsMixin(object):
|
||||
class IStorageServerImmutableAPIsTestsMixin:
|
||||
"""
|
||||
Tests for ``IStorageServer``'s immutable APIs.
|
||||
|
||||
@ -515,7 +515,7 @@ class IStorageServerImmutableAPIsTestsMixin(object):
|
||||
self.assertEqual(lease2.get_expiration_time() - initial_expiration_time, 167)
|
||||
|
||||
|
||||
class IStorageServerMutableAPIsTestsMixin(object):
|
||||
class IStorageServerMutableAPIsTestsMixin:
|
||||
"""
|
||||
Tests for ``IStorageServer``'s mutable APIs.
|
||||
|
||||
|
@ -7,7 +7,7 @@ from twisted.trial.unittest import TestCase
|
||||
from allmydata.web.common import get_filenode_metadata, SDMF_VERSION, MDMF_VERSION
|
||||
|
||||
|
||||
class MockFileNode(object):
|
||||
class MockFileNode:
|
||||
def __init__(self, size, mutable_version=None):
|
||||
self.size = size
|
||||
self.mutable_version = mutable_version
|
||||
@ -24,7 +24,7 @@ class MockFileNode(object):
|
||||
return self.mutable_version
|
||||
|
||||
|
||||
class CommonFixture(object):
|
||||
class CommonFixture:
|
||||
def test_size_is_0(self):
|
||||
"""If get_size doesn't return None the returned metadata must contain "size"."""
|
||||
mockfilenode = MockFileNode(0, mutable_version=self.mutable_version)
|
||||
|
@ -159,7 +159,7 @@ class TestCase(testutil.SignalMixin, unittest.TestCase):
|
||||
|
||||
tub = testing_tub(reactor, config_data)
|
||||
|
||||
class Foo(object):
|
||||
class Foo:
|
||||
pass
|
||||
|
||||
furl = tub.registerReference(Foo())
|
||||
@ -831,7 +831,7 @@ ENABLE_HELPER = """
|
||||
enabled = true
|
||||
"""
|
||||
|
||||
class FakeTub(object):
|
||||
class FakeTub:
|
||||
def __init__(self):
|
||||
self.tubID = base64.b32encode(b"foo")
|
||||
self.listening_ports = []
|
||||
|
@ -26,7 +26,7 @@ from allmydata.web.status import Statistics
|
||||
from allmydata.test.common import SyncTestCase
|
||||
|
||||
|
||||
class FakeStatsProvider(object):
|
||||
class FakeStatsProvider:
|
||||
"""
|
||||
A stats provider that hands backed a canned collection of performance
|
||||
statistics.
|
||||
|
@ -21,7 +21,7 @@ MAX_DELTA_READS = 10 * READ_LEEWAY # N = 10
|
||||
|
||||
timeout=240 # François's ARM box timed out after 120 seconds of Verifier.test_corrupt_crypttext_hashtree
|
||||
|
||||
class RepairTestMixin(object):
|
||||
class RepairTestMixin:
|
||||
def _count_reads(self):
|
||||
sum_of_read_counts = 0
|
||||
for (i, ss, storedir) in self.iterate_servers():
|
||||
|
@ -1391,7 +1391,7 @@ class Handler(GridTestMixin, ShouldFailMixin, ReallyEqualMixin, unittest.TestCas
|
||||
return d
|
||||
|
||||
def test_execCommand_and_openShell(self):
|
||||
class MockProtocol(object):
|
||||
class MockProtocol:
|
||||
def __init__(self):
|
||||
self.output = ""
|
||||
self.error = ""
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user