Merge pull request #1423 from a-detiste/master
Some checks failed
CI / coverage (macos-14, 3.12) (push) Has been cancelled
CI / coverage (ubuntu-22.04, 3.12) (push) Has been cancelled
CI / coverage (ubuntu-22.04, pypy-3.9) (push) Has been cancelled
CI / coverage (windows-2022, 3.12) (push) Has been cancelled
CI / integration (false, macos-14, 3.11) (push) Has been cancelled
CI / integration (false, ubuntu-20.04, 3.11) (push) Has been cancelled
CI / integration (false, windows-2022, 3.11) (push) Has been cancelled
CI / integration (true, ubuntu-20.04, 3.12) (push) Has been cancelled
CI / packaging (macos-14, 3.9) (push) Has been cancelled
CI / packaging (ubuntu-22.04, 3.9) (push) Has been cancelled
CI / packaging (windows-2022, 3.9) (push) Has been cancelled
CI / finish-coverage-report (push) Has been cancelled

clean Python2 syntax
This commit is contained in:
meejah 2025-01-08 22:04:48 -07:00 committed by GitHub
commit f45175569e
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
133 changed files with 307 additions and 310 deletions

View File

@ -69,7 +69,7 @@ import pytest_twisted
@attr.s @attr.s
class FlogGatherer(object): class FlogGatherer:
""" """
Flog Gatherer process. Flog Gatherer process.
""" """
@ -148,7 +148,7 @@ def create_flog_gatherer(reactor, request, temp_dir, flog_binary):
@attr.s @attr.s
class StorageServer(object): class StorageServer:
""" """
Represents a Tahoe Storage Server Represents a Tahoe Storage Server
""" """
@ -200,7 +200,7 @@ def create_storage_server(reactor, request, temp_dir, introducer, flog_gatherer,
@attr.s @attr.s
class Client(object): class Client:
""" """
Represents a Tahoe client Represents a Tahoe client
""" """
@ -328,7 +328,7 @@ def create_client(reactor, request, temp_dir, introducer, flog_gatherer, name, w
@attr.s @attr.s
class Introducer(object): class Introducer:
""" """
Reprsents a running introducer Reprsents a running introducer
""" """
@ -434,7 +434,7 @@ def create_introducer(reactor, request, temp_dir, flog_gatherer, port):
@attr.s @attr.s
class Grid(object): class Grid:
""" """
Represents an entire Tahoe Grid setup Represents an entire Tahoe Grid setup

View File

@ -103,7 +103,7 @@ def _race(left, right):
@attr.s @attr.s
class Left(object): class Left:
value = attr.ib() value = attr.ib()
@classmethod @classmethod
@ -112,7 +112,7 @@ class Left(object):
@attr.s @attr.s
class Right(object): class Right:
value = attr.ib() value = attr.ib()
@classmethod @classmethod

View File

@ -253,7 +253,7 @@ def _tahoe_runner_optional_coverage(proto, reactor, request, other_args):
) )
class TahoeProcess(object): class TahoeProcess:
""" """
A running Tahoe process, with associated information. A running Tahoe process, with associated information.
""" """

View File

@ -67,7 +67,7 @@ class GridTesterOptions(usage.Options):
class CommandFailed(Exception): class CommandFailed(Exception):
pass pass
class GridTester(object): class GridTester:
def __init__(self, config): def __init__(self, config):
self.config = config self.config = config
self.tahoe = config.tahoe self.tahoe = config.tahoe

View File

@ -9,7 +9,7 @@ DAY=24*60*60
MONTH=31*DAY MONTH=31*DAY
YEAR=365*DAY YEAR=365*DAY
class ReliabilityModel(object): class ReliabilityModel:
"""Generate a model of system-wide reliability, given several input """Generate a model of system-wide reliability, given several input
parameters. parameters.
@ -208,7 +208,7 @@ class ReliabilityModel(object):
repair = matrix(new_repair_rows) repair = matrix(new_repair_rows)
return repair return repair
class ReliabilityReport(object): class ReliabilityReport:
def __init__(self): def __init__(self):
self.samples = [] self.samples = []

View File

@ -10,7 +10,7 @@ except ImportError:
from nevow import inevow from nevow import inevow
from zope.interface import implements from zope.interface import implements
class MyRequest(object): class MyRequest:
implements(inevow.IRequest) implements(inevow.IRequest)
pass pass

View File

@ -23,7 +23,7 @@ GET_SPANS_S='_received spans trace .get_spans()'
ADD_R=re.compile('_received spans trace .add\(([0-9]*), len=([0-9]*)\)') ADD_R=re.compile('_received spans trace .add\(([0-9]*), len=([0-9]*)\)')
INIT_S='_received spans trace = DataSpans' INIT_S='_received spans trace = DataSpans'
class B(object): class B:
def __init__(self, inf): def __init__(self, inf):
self.inf = inf self.inf = inf

View File

@ -53,7 +53,7 @@ print("average file size:", abbreviate_space(avg_filesize))
SERVER_CAPACITY = 10**12 SERVER_CAPACITY = 10**12
class Server(object): class Server:
def __init__(self, nodeid, capacity): def __init__(self, nodeid, capacity):
self.nodeid = nodeid self.nodeid = nodeid
self.used = 0 self.used = 0
@ -74,7 +74,7 @@ class Server(object):
else: else:
return "<%s %s>" % (self.__class__.__name__, self.nodeid) return "<%s %s>" % (self.__class__.__name__, self.nodeid)
class Ring(object): class Ring:
SHOW_MINMAX = False SHOW_MINMAX = False
def __init__(self, numservers, seed, permute): def __init__(self, numservers, seed, permute):
self.servers = [] self.servers = []

View File

@ -9,7 +9,7 @@ SERVER_CAPACITY = 10**12
def cmp(a, b): def cmp(a, b):
return (a > b) - (a < b) return (a > b) - (a < b)
class Server(object): class Server:
def __init__(self): def __init__(self):
self.si = random.randrange(0, 2**31) self.si = random.randrange(0, 2**31)
self.used = 0 self.used = 0

View File

@ -16,7 +16,7 @@ def sha(s):
def randomid(): def randomid():
return os.urandom(20) return os.urandom(20)
class Node(object): class Node:
def __init__(self, nid, introducer, simulator): def __init__(self, nid, introducer, simulator):
self.nid = nid self.nid = nid
self.introducer = introducer self.introducer = introducer
@ -111,7 +111,7 @@ class Node(object):
self.introducer.delete(fileid) self.introducer.delete(fileid)
return True return True
class Introducer(object): class Introducer:
def __init__(self, simulator): def __init__(self, simulator):
self.living_files = {} self.living_files = {}
self.utilization = 0 # total size of all active files self.utilization = 0 # total size of all active files
@ -148,7 +148,7 @@ class Introducer(object):
self.simulator.stamp_utilization(self.utilization) self.simulator.stamp_utilization(self.utilization)
del self.living_files[fileid] del self.living_files[fileid]
class Simulator(object): class Simulator:
NUM_NODES = 1000 NUM_NODES = 1000
EVENTS = ["ADDFILE", "DELFILE", "ADDNODE", "DELNODE"] EVENTS = ["ADDFILE", "DELFILE", "ADDNODE", "DELNODE"]
RATE_ADDFILE = 1.0 / 10 RATE_ADDFILE = 1.0 / 10

View File

@ -35,7 +35,7 @@ GiB=1024*MiB
TiB=1024*GiB TiB=1024*GiB
PiB=1024*TiB PiB=1024*TiB
class Sizes(object): class Sizes:
def __init__(self, mode, file_size, arity=2): def __init__(self, mode, file_size, arity=2):
MAX_SEGSIZE = 128*KiB MAX_SEGSIZE = 128*KiB
self.mode = mode self.mode = mode

View File

@ -11,7 +11,7 @@ def roundup(size, blocksize=4096):
return blocksize * mathutil.div_ceil(size, blocksize) return blocksize * mathutil.div_ceil(size, blocksize)
class BigFakeString(object): class BigFakeString:
def __init__(self, length): def __init__(self, length):
self.length = length self.length = length
self.fp = 0 self.fp = 0

0
newsfragments/4149.minor Normal file
View File

View File

@ -20,7 +20,7 @@ class FileProhibited(Exception):
self.reason = reason self.reason = reason
class Blacklist(object): class Blacklist:
def __init__(self, blacklist_fn): def __init__(self, blacklist_fn):
self.blacklist_fn = blacklist_fn self.blacklist_fn = blacklist_fn
self.last_mtime = None self.last_mtime = None
@ -61,7 +61,7 @@ class Blacklist(object):
@implementer(IFileNode) @implementer(IFileNode)
class ProhibitedNode(object): class ProhibitedNode:
def __init__(self, wrapped_node, reason): def __init__(self, wrapped_node, reason):
assert IFilesystemNode.providedBy(wrapped_node), wrapped_node assert IFilesystemNode.providedBy(wrapped_node), wrapped_node

View File

@ -7,7 +7,7 @@ from allmydata.interfaces import ICheckResults, ICheckAndRepairResults, \
from allmydata.util import base32 from allmydata.util import base32
@implementer(ICheckResults) @implementer(ICheckResults)
class CheckResults(object): class CheckResults:
def __init__(self, uri, storage_index, def __init__(self, uri, storage_index,
healthy, recoverable, count_happiness, healthy, recoverable, count_happiness,
@ -153,7 +153,7 @@ class CheckResults(object):
return self._servermap return self._servermap
@implementer(ICheckAndRepairResults) @implementer(ICheckAndRepairResults)
class CheckAndRepairResults(object): class CheckAndRepairResults:
def __init__(self, storage_index): def __init__(self, storage_index):
self.storage_index = storage_index self.storage_index = storage_index
@ -175,7 +175,7 @@ class CheckAndRepairResults(object):
return self.post_repair_results return self.post_repair_results
class DeepResultsBase(object): class DeepResultsBase:
def __init__(self, root_storage_index): def __init__(self, root_storage_index):
self.root_storage_index = root_storage_index self.root_storage_index = root_storage_index

View File

@ -50,7 +50,7 @@ def grid_manager(ctx, config):
signing key) and should be kept safe. signing key) and should be kept safe.
""" """
class Config(object): class Config:
""" """
Available to all sub-commands as Click's context.obj Available to all sub-commands as Click's context.obj
""" """

View File

@ -156,7 +156,7 @@ def _make_secret():
return base32.b2a(os.urandom(hashutil.CRYPTO_VAL_SIZE)) + b"\n" return base32.b2a(os.urandom(hashutil.CRYPTO_VAL_SIZE)) + b"\n"
class SecretHolder(object): class SecretHolder:
def __init__(self, lease_secret, convergence_secret): def __init__(self, lease_secret, convergence_secret):
self._lease_secret = lease_secret self._lease_secret = lease_secret
self._convergence_secret = convergence_secret self._convergence_secret = convergence_secret
@ -170,7 +170,7 @@ class SecretHolder(object):
def get_convergence_secret(self): def get_convergence_secret(self):
return self._convergence_secret return self._convergence_secret
class KeyGenerator(object): class KeyGenerator:
"""I create RSA keys for mutable files. Each call to generate() returns a """I create RSA keys for mutable files. Each call to generate() returns a
single keypair.""" single keypair."""
@ -314,7 +314,7 @@ def create_client_from_config(config, _client_factory=None, _introducer_factory=
@attr.s @attr.s
class _StoragePlugins(object): class _StoragePlugins:
""" """
Functionality related to getting storage plugins set up and ready for use. Functionality related to getting storage plugins set up and ready for use.
@ -559,7 +559,7 @@ def _register_reference(key, config, tub, referenceable):
@implementer(IAnnounceableStorageServer) @implementer(IAnnounceableStorageServer)
@attr.s @attr.s
class AnnounceableStorageServer(object): class AnnounceableStorageServer:
announcement = attr.ib() announcement = attr.ib()
storage_server = attr.ib() storage_server = attr.ib()

View File

@ -13,7 +13,7 @@ from allmydata.interfaces import ICodecEncoder, ICodecDecoder
import zfec import zfec
@implementer(ICodecEncoder) @implementer(ICodecEncoder)
class CRSEncoder(object): class CRSEncoder:
ENCODER_TYPE = b"crs" ENCODER_TYPE = b"crs"
def set_params(self, data_size, required_shares, max_shares): def set_params(self, data_size, required_shares, max_shares):
@ -55,7 +55,7 @@ class CRSEncoder(object):
@implementer(ICodecDecoder) @implementer(ICodecDecoder)
class CRSDecoder(object): class CRSDecoder:
def set_params(self, data_size, required_shares, max_shares): def set_params(self, data_size, required_shares, max_shares):
self.data_size = data_size self.data_size = data_size

View File

@ -13,7 +13,7 @@ from allmydata.uri import LiteralFileURI
from allmydata.uri import from_string from allmydata.uri import from_string
from allmydata.util import mathutil from allmydata.util import mathutil
class DeepStats(object): class DeepStats:
"""Deep stats object. """Deep stats object.
Holds results of the deep-stats operation. Holds results of the deep-stats operation.

View File

@ -63,7 +63,7 @@ ADD_FILE = ActionType(
) )
class _OnlyFiles(object): class _OnlyFiles:
"""Marker for replacement option of only replacing files.""" """Marker for replacement option of only replacing files."""
ONLY_FILES = _OnlyFiles() ONLY_FILES = _OnlyFiles()
@ -115,7 +115,7 @@ def update_metadata(metadata, new_metadata, now):
# contents and end by repacking them. It might be better to apply them to # contents and end by repacking them. It might be better to apply them to
# the unpacked contents. # the unpacked contents.
class Deleter(object): class Deleter:
def __init__(self, node, namex, must_exist=True, must_be_directory=False, must_be_file=False): def __init__(self, node, namex, must_exist=True, must_be_directory=False, must_be_file=False):
self.node = node self.node = node
self.name = normalize(namex) self.name = normalize(namex)
@ -143,7 +143,7 @@ class Deleter(object):
return new_contents return new_contents
class MetadataSetter(object): class MetadataSetter:
def __init__(self, node, namex, metadata, create_readonly_node=None): def __init__(self, node, namex, metadata, create_readonly_node=None):
self.node = node self.node = node
self.name = normalize(namex) self.name = normalize(namex)
@ -168,7 +168,7 @@ class MetadataSetter(object):
return new_contents return new_contents
class Adder(object): class Adder:
def __init__(self, node, entries=None, overwrite=True, create_readonly_node=None): def __init__(self, node, entries=None, overwrite=True, create_readonly_node=None):
""" """
:param overwrite: Either True (allow overwriting anything existing), :param overwrite: Either True (allow overwriting anything existing),
@ -300,7 +300,7 @@ def _pack_normalized_children(children, writekey, deep_immutable=False):
return b"".join(entries) return b"".join(entries)
@implementer(IDirectoryNode, ICheckable, IDeepCheckable) @implementer(IDirectoryNode, ICheckable, IDeepCheckable)
class DirectoryNode(object): class DirectoryNode:
filenode_class = MutableFileNode filenode_class = MutableFileNode
def __init__(self, filenode, nodemaker, uploader): def __init__(self, filenode, nodemaker, uploader):
@ -873,7 +873,7 @@ class ManifestWalker(DeepStats):
} }
class DeepChecker(object): class DeepChecker:
def __init__(self, root, verify, repair, add_lease): def __init__(self, root, verify, repair, add_lease):
root_si = root.get_storage_index() root_si = root.get_storage_index()
if root_si: if root_si:

View File

@ -17,13 +17,13 @@ class NeedRootcapLookupScheme(Exception):
mechanism to translate name+passwd pairs into a rootcap, either a file of mechanism to translate name+passwd pairs into a rootcap, either a file of
name/passwd/rootcap tuples, or a server to do the translation.""" name/passwd/rootcap tuples, or a server to do the translation."""
class FTPAvatarID(object): class FTPAvatarID:
def __init__(self, username, rootcap): def __init__(self, username, rootcap):
self.username = username self.username = username
self.rootcap = rootcap self.rootcap = rootcap
@implementer(checkers.ICredentialsChecker) @implementer(checkers.ICredentialsChecker)
class AccountFileChecker(object): class AccountFileChecker:
credentialInterfaces = (credentials.ISSHPrivateKey,) credentialInterfaces = (credentials.ISSHPrivateKey,)
def __init__(self, client, accountfile): def __init__(self, client, accountfile):

View File

@ -963,7 +963,7 @@ class GeneralSFTPFile(PrefixingLogMixin):
return d return d
class StoppableList(object): class StoppableList:
def __init__(self, items): def __init__(self, items):
self.items = items self.items = items
def __iter__(self): def __iter__(self):
@ -973,7 +973,7 @@ class StoppableList(object):
pass pass
class Reason(object): class Reason:
def __init__(self, value): def __init__(self, value):
self.value = value self.value = value
@ -1904,7 +1904,7 @@ class SFTPUserHandler(ConchUser, PrefixingLogMixin):
@implementer(ITransport) @implementer(ITransport)
class FakeTransport(object): class FakeTransport:
def write(self, data): def write(self, data):
logmsg("FakeTransport.write(<data of length %r>)" % (len(data),), level=NOISY) logmsg("FakeTransport.write(<data of length %r>)" % (len(data),), level=NOISY)
@ -1979,7 +1979,7 @@ components.registerAdapter(ShellSession, SFTPUserHandler, ISession)
from allmydata.frontends.auth import AccountFileChecker, NeedRootcapLookupScheme from allmydata.frontends.auth import AccountFileChecker, NeedRootcapLookupScheme
@implementer(portal.IRealm) @implementer(portal.IRealm)
class Dispatcher(object): class Dispatcher:
def __init__(self, client): def __init__(self, client):
self._client = client self._client = client

View File

@ -32,7 +32,7 @@ from attrs import (
@frozen @frozen
class SignedCertificate(object): class SignedCertificate:
""" """
A signed certificate. A signed certificate.
""" """
@ -62,7 +62,7 @@ class SignedCertificate(object):
@frozen @frozen
class _GridManagerStorageServer(object): class _GridManagerStorageServer:
""" """
A Grid Manager's notion of a storage server A Grid Manager's notion of a storage server
""" """
@ -93,7 +93,7 @@ class _GridManagerStorageServer(object):
@frozen @frozen
class _GridManagerCertificate(object): class _GridManagerCertificate:
""" """
Represents a single certificate for a single storage-server Represents a single certificate for a single storage-server
""" """
@ -224,7 +224,7 @@ def load_grid_manager(config_path: Optional[FilePath]):
return _GridManager(private_key_bytes, storage_servers) return _GridManager(private_key_bytes, storage_servers)
class _GridManager(object): class _GridManager:
""" """
A Grid Manager's configuration. A Grid Manager's configuration.
""" """

View File

@ -69,7 +69,7 @@ def roundup_pow2(x):
return ans return ans
class CompleteBinaryTreeMixin(object): class CompleteBinaryTreeMixin:
""" """
Adds convenience methods to a complete binary tree. Adds convenience methods to a complete binary tree.

View File

@ -3,7 +3,7 @@
import weakref import weakref
class History(object): class History:
"""Keep track of recent operations, for a status display.""" """Keep track of recent operations, for a status display."""
name = "history" name = "history"

View File

@ -32,7 +32,7 @@ class UnsupportedErasureCodec(BadURIExtension):
pass pass
@implementer(IValidatedThingProxy) @implementer(IValidatedThingProxy)
class ValidatedExtendedURIProxy(object): class ValidatedExtendedURIProxy:
""" I am a front-end for a remote UEB (using a local ReadBucketProxy), """ I am a front-end for a remote UEB (using a local ReadBucketProxy),
responsible for retrieving and validating the elements from the UEB.""" responsible for retrieving and validating the elements from the UEB."""

View File

@ -10,7 +10,7 @@ from allmydata.util.dictutil import DictOfSets
from .common import OVERDUE, COMPLETE, CORRUPT, DEAD, BADSEGNUM, \ from .common import OVERDUE, COMPLETE, CORRUPT, DEAD, BADSEGNUM, \
BadSegmentNumberError BadSegmentNumberError
class SegmentFetcher(object): class SegmentFetcher:
"""I am responsible for acquiring blocks for a single segment. I will use """I am responsible for acquiring blocks for a single segment. I will use
the Share instances passed to my add_shares() method to locate, retrieve, the Share instances passed to my add_shares() method to locate, retrieve,
and validate those blocks. I expect my parent node to call my and validate those blocks. I expect my parent node to call my

View File

@ -25,11 +25,11 @@ def incidentally(res, f, *args, **kwargs):
f(*args, **kwargs) f(*args, **kwargs)
return res return res
class RequestToken(object): class RequestToken:
def __init__(self, server): def __init__(self, server):
self.server = server self.server = server
class ShareFinder(object): class ShareFinder:
OVERDUE_TIMEOUT = 10.0 OVERDUE_TIMEOUT = 10.0
def __init__(self, storage_broker, verifycap, node, download_status, def __init__(self, storage_broker, verifycap, node, download_status,

View File

@ -26,7 +26,7 @@ class IDownloadStatusHandlingConsumer(Interface):
"""Record the DownloadStatus 'read event', to be updated with the """Record the DownloadStatus 'read event', to be updated with the
time it takes to decrypt each chunk of data.""" time it takes to decrypt each chunk of data."""
class Cancel(object): class Cancel:
def __init__(self, f): def __init__(self, f):
self._f = f self._f = f
self.active = True self.active = True
@ -37,7 +37,7 @@ class Cancel(object):
self._f(self) self._f(self)
class DownloadNode(object): class DownloadNode:
"""Internal class which manages downloads and holds state. External """Internal class which manages downloads and holds state. External
callers use CiphertextFileNode instead.""" callers use CiphertextFileNode instead."""

View File

@ -15,7 +15,7 @@ from allmydata.interfaces import DownloadStopped
from .common import BadSegmentNumberError, WrongSegmentError from .common import BadSegmentNumberError, WrongSegmentError
@implementer(IPushProducer) @implementer(IPushProducer)
class Segmentation(object): class Segmentation:
"""I am responsible for a single offset+size read of the file. I handle """I am responsible for a single offset+size read of the file. I handle
segmentation: I figure out which segments are necessary, request them segmentation: I figure out which segments are necessary, request them
(from my CiphertextDownloader) in order, and trim the segments down to (from my CiphertextDownloader) in order, and trim the segments down to

View File

@ -27,7 +27,7 @@ class DataUnavailable(Exception):
pass pass
class Share(object): class Share:
"""I represent a single instance of a single share (e.g. I reference the """I represent a single instance of a single share (e.g. I reference the
shnum2 for share SI=abcde on server xy12t, not the one on server ab45q). shnum2 for share SI=abcde on server xy12t, not the one on server ab45q).
I am associated with a CommonShare that remembers data that is held in I am associated with a CommonShare that remembers data that is held in
@ -830,7 +830,7 @@ class Share(object):
o.notify(state=DEAD, f=f) o.notify(state=DEAD, f=f)
class CommonShare(object): class CommonShare:
# TODO: defer creation of the hashtree until somebody uses us. There will # TODO: defer creation of the hashtree until somebody uses us. There will
# be a lot of unused shares, and we shouldn't spend the memory on a large # be a lot of unused shares, and we shouldn't spend the memory on a large
# hashtree unless necessary. # hashtree unless necessary.

View File

@ -6,7 +6,7 @@ import itertools
from zope.interface import implementer from zope.interface import implementer
from allmydata.interfaces import IDownloadStatus from allmydata.interfaces import IDownloadStatus
class ReadEvent(object): class ReadEvent:
def __init__(self, ev, ds): def __init__(self, ev, ds):
self._ev = ev self._ev = ev
@ -22,7 +22,7 @@ class ReadEvent(object):
self._ds.update_last_timestamp(finishtime) self._ds.update_last_timestamp(finishtime)
class SegmentEvent(object): class SegmentEvent:
def __init__(self, ev, ds): def __init__(self, ev, ds):
self._ev = ev self._ev = ev
@ -47,7 +47,7 @@ class SegmentEvent(object):
self._ds.update_last_timestamp(when) self._ds.update_last_timestamp(when)
class DYHBEvent(object): class DYHBEvent:
def __init__(self, ev, ds): def __init__(self, ev, ds):
self._ev = ev self._ev = ev
@ -65,7 +65,7 @@ class DYHBEvent(object):
self._ds.update_last_timestamp(when) self._ds.update_last_timestamp(when)
class BlockRequestEvent(object): class BlockRequestEvent:
def __init__(self, ev, ds): def __init__(self, ev, ds):
self._ev = ev self._ev = ev
@ -84,7 +84,7 @@ class BlockRequestEvent(object):
@implementer(IDownloadStatus) @implementer(IDownloadStatus)
class DownloadStatus(object): class DownloadStatus:
# There is one DownloadStatus for each CiphertextFileNode. The status # There is one DownloadStatus for each CiphertextFileNode. The status
# object will keep track of all activity for that node. # object will keep track of all activity for that node.
statusid_counter = itertools.count(0) statusid_counter = itertools.count(0)

View File

@ -79,7 +79,7 @@ TiB=1024*GiB
PiB=1024*TiB PiB=1024*TiB
@implementer(IEncoder) @implementer(IEncoder)
class Encoder(object): class Encoder:
def __init__(self, log_parent=None, upload_status=None): def __init__(self, log_parent=None, upload_status=None):
object.__init__(self) object.__init__(self)

View File

@ -25,7 +25,7 @@ from allmydata.immutable.downloader.node import DownloadNode, \
IDownloadStatusHandlingConsumer IDownloadStatusHandlingConsumer
from allmydata.immutable.downloader.status import DownloadStatus from allmydata.immutable.downloader.status import DownloadStatus
class CiphertextFileNode(object): class CiphertextFileNode:
def __init__(self, verifycap, storage_broker, secret_holder, def __init__(self, verifycap, storage_broker, secret_holder,
terminator, history): terminator, history):
assert isinstance(verifycap, uri.CHKFileVerifierURI) assert isinstance(verifycap, uri.CHKFileVerifierURI)
@ -186,7 +186,7 @@ class CiphertextFileNode(object):
return v.start() return v.start()
@implementer(IConsumer, IDownloadStatusHandlingConsumer) @implementer(IConsumer, IDownloadStatusHandlingConsumer)
class DecryptingConsumer(object): class DecryptingConsumer:
"""I sit between a CiphertextDownloader (which acts as a Producer) and """I sit between a CiphertextDownloader (which acts as a Producer) and
the real Consumer, decrypting everything that passes by. The real the real Consumer, decrypting everything that passes by. The real
Consumer sees the real Producer, but the Producer sees us instead of the Consumer sees the real Producer, but the Producer sees us instead of the
@ -232,7 +232,7 @@ class DecryptingConsumer(object):
self._consumer.write(plaintext) self._consumer.write(plaintext)
@implementer(IImmutableFileNode) @implementer(IImmutableFileNode)
class ImmutableFileNode(object): class ImmutableFileNode:
# I wrap a CiphertextFileNode with a decryption key # I wrap a CiphertextFileNode with a decryption key
def __init__(self, filecap, storage_broker, secret_holder, terminator, def __init__(self, filecap, storage_broker, secret_holder, terminator,

View File

@ -141,7 +141,7 @@ class _WriteBuffer:
@implementer(IStorageBucketWriter) @implementer(IStorageBucketWriter)
class WriteBucketProxy(object): class WriteBucketProxy:
""" """
Note: The various ``put_`` methods need to be called in the order in which the Note: The various ``put_`` methods need to be called in the order in which the
bytes will get written. bytes will get written.
@ -364,7 +364,7 @@ class WriteBucketProxy_v2(WriteBucketProxy):
self._offset_data = offset_data self._offset_data = offset_data
@implementer(IStorageBucketReader) @implementer(IStorageBucketReader)
class ReadBucketProxy(object): class ReadBucketProxy:
def __init__(self, rref, server, storage_index): def __init__(self, rref, server, storage_index):
self._rref = rref self._rref = rref

View File

@ -11,7 +11,7 @@ from allmydata.interfaces import IImmutableFileNode, ICheckable
from allmydata.uri import LiteralFileURI from allmydata.uri import LiteralFileURI
class _ImmutableFileNodeBase(object): class _ImmutableFileNodeBase:
def get_write_uri(self): def get_write_uri(self):
return None return None

View File

@ -19,7 +19,7 @@ class NotEnoughWritersError(Exception):
pass pass
class CHKCheckerAndUEBFetcher(object): class CHKCheckerAndUEBFetcher:
"""I check to see if a file is already present in the grid. I also fetch """I check to see if a file is already present in the grid. I also fetch
the URI Extension Block, which is useful for an uploading client who the URI Extension Block, which is useful for an uploading client who
wants to avoid the work of encryption and encoding. wants to avoid the work of encryption and encoding.
@ -254,7 +254,7 @@ class CHKUploadHelper(Referenceable, upload.CHKUploader): # type: ignore # warn
self._helper.upload_finished(self._storage_index, 0) self._helper.upload_finished(self._storage_index, 0)
del self._reader del self._reader
class AskUntilSuccessMixin(object): class AskUntilSuccessMixin:
# create me with a _reader array # create me with a _reader array
_last_failure = None _last_failure = None

View File

@ -183,7 +183,7 @@ class HelperUploadResults(Copyable, RemoteCopy):
self.pushed_shares = None # count of shares we pushed self.pushed_shares = None # count of shares we pushed
@implementer(IUploadResults) @implementer(IUploadResults)
class UploadResults(object): class UploadResults:
def __init__(self, file_size, def __init__(self, file_size,
ciphertext_fetched, # how much the helper fetched ciphertext_fetched, # how much the helper fetched
@ -235,7 +235,7 @@ def pretty_print_shnum_to_servers(s):
return ', '.join([ "sh%s: %s" % (k, '+'.join([idlib.shortnodeid_b2a(x) for x in v])) for k, v in s.items() ]) return ', '.join([ "sh%s: %s" % (k, '+'.join([idlib.shortnodeid_b2a(x) for x in v])) for k, v in s.items() ])
class ServerTracker(object): class ServerTracker:
def __init__(self, server, def __init__(self, server,
sharesize, blocksize, num_segments, num_share_hashes, sharesize, blocksize, num_segments, num_share_hashes,
storage_index, storage_index,
@ -325,7 +325,7 @@ def str_shareloc(shnum, bucketwriter):
@implementer(IPeerSelector) @implementer(IPeerSelector)
class PeerSelector(object): class PeerSelector:
def __init__(self, num_segments, total_shares, needed_shares, min_happiness): def __init__(self, num_segments, total_shares, needed_shares, min_happiness):
self.num_segments = num_segments self.num_segments = num_segments
@ -384,7 +384,7 @@ class PeerSelector(object):
raise NotImplementedError raise NotImplementedError
class _QueryStatistics(object): class _QueryStatistics:
def __init__(self): def __init__(self):
self.total = 0 self.total = 0
@ -896,7 +896,7 @@ class Tahoe2ServerSelector(log.PrefixingLogMixin):
@attr.s @attr.s
class _Accum(object): class _Accum:
""" """
Accumulate up to some known amount of ciphertext. Accumulate up to some known amount of ciphertext.
@ -924,7 +924,7 @@ class _Accum(object):
@implementer(IEncryptedUploadable) @implementer(IEncryptedUploadable)
class EncryptAnUploadable(object): class EncryptAnUploadable:
"""This is a wrapper that takes an IUploadable and provides """This is a wrapper that takes an IUploadable and provides
IEncryptedUploadable.""" IEncryptedUploadable."""
CHUNKSIZE = 50*1024 CHUNKSIZE = 50*1024
@ -1159,7 +1159,7 @@ class EncryptAnUploadable(object):
return self.original.close() return self.original.close()
@implementer(IUploadStatus) @implementer(IUploadStatus)
class UploadStatus(object): class UploadStatus:
statusid_counter = itertools.count(0) statusid_counter = itertools.count(0)
def __init__(self): def __init__(self):
@ -1208,7 +1208,7 @@ class UploadStatus(object):
def set_results(self, value): def set_results(self, value):
self.results = value self.results = value
class CHKUploader(object): class CHKUploader:
def __init__(self, storage_broker, secret_holder, reactor=None): def __init__(self, storage_broker, secret_holder, reactor=None):
# server_selector needs storage_broker and secret_holder # server_selector needs storage_broker and secret_holder
@ -1408,7 +1408,7 @@ def read_this_many_bytes(uploadable, size, prepend_data=None):
d.addCallback(_got) d.addCallback(_got)
return d return d
class LiteralUploader(object): class LiteralUploader:
def __init__(self): def __init__(self):
self._status = s = UploadStatus() self._status = s = UploadStatus()
@ -1525,7 +1525,7 @@ class RemoteEncryptedUploadable(Referenceable): # type: ignore # warner/foolsca
return self._eu.close() return self._eu.close()
class AssistedUploader(object): class AssistedUploader:
def __init__(self, helper, storage_broker): def __init__(self, helper, storage_broker):
self._helper = helper self._helper = helper
@ -1680,7 +1680,7 @@ class AssistedUploader(object):
def get_upload_status(self): def get_upload_status(self):
return self._upload_status return self._upload_status
class BaseUploadable(object): class BaseUploadable:
# this is overridden by max_segment_size # this is overridden by max_segment_size
default_max_segment_size = DEFAULT_IMMUTABLE_MAX_SEGMENT_SIZE default_max_segment_size = DEFAULT_IMMUTABLE_MAX_SEGMENT_SIZE
default_params_set = False default_params_set = False

View File

@ -64,7 +64,7 @@ def unsign_from_foolscap(ann_t):
return (ann, key_vs) return (ann, key_vs)
class SubscriberDescriptor(object): class SubscriberDescriptor:
"""This describes a subscriber, for status display purposes. It contains """This describes a subscriber, for status display purposes. It contains
the following attributes: the following attributes:
@ -88,7 +88,7 @@ class SubscriberDescriptor(object):
self.remote_address = remote_address self.remote_address = remote_address
self.tubid = tubid self.tubid = tubid
class AnnouncementDescriptor(object): class AnnouncementDescriptor:
"""This describes an announcement, for status display purposes. It """This describes an announcement, for status display purposes. It
contains the following attributes, which will be empty ("" for contains the following attributes, which will be empty ("" for
strings) if the client did not provide them: strings) if the client did not provide them:

View File

@ -87,7 +87,7 @@ class OperationCancelledError(Exception):
@implementer(IMonitor) @implementer(IMonitor)
class Monitor(object): class Monitor:
def __init__(self): def __init__(self):
self.cancelled = False self.cancelled = False

View File

@ -13,7 +13,7 @@ from allmydata.mutable.common import MODE_CHECK, MODE_WRITE, CorruptShareError
from allmydata.mutable.servermap import ServerMap, ServermapUpdater from allmydata.mutable.servermap import ServerMap, ServermapUpdater
from allmydata.mutable.retrieve import Retrieve # for verifying from allmydata.mutable.retrieve import Retrieve # for verifying
class MutableChecker(object): class MutableChecker:
SERVERMAP_MODE = MODE_CHECK SERVERMAP_MODE = MODE_CHECK
def __init__(self, node, storage_broker, history, monitor): def __init__(self, node, storage_broker, history, monitor):

View File

@ -34,7 +34,7 @@ from allmydata.mutable.checker import MutableChecker, MutableCheckAndRepairer
from allmydata.mutable.repairer import Repairer from allmydata.mutable.repairer import Repairer
class BackoffAgent(object): class BackoffAgent:
# these parameters are copied from foolscap.reconnector, which gets them # these parameters are copied from foolscap.reconnector, which gets them
# from twisted.internet.protocol.ReconnectingClientFactory # from twisted.internet.protocol.ReconnectingClientFactory
initialDelay = 1.0 initialDelay = 1.0
@ -59,7 +59,7 @@ class BackoffAgent(object):
# use nodemaker.create_mutable_file() to make one of these # use nodemaker.create_mutable_file() to make one of these
@implementer(IMutableFileNode, ICheckable) @implementer(IMutableFileNode, ICheckable)
class MutableFileNode(object): class MutableFileNode:
def __init__(self, storage_broker, secret_holder, def __init__(self, storage_broker, secret_holder,
default_encoding_parameters, history): default_encoding_parameters, history):
@ -698,7 +698,7 @@ class MutableFileNode(object):
@implementer(IMutableFileVersion, IWriteable) @implementer(IMutableFileVersion, IWriteable)
class MutableFileVersion(object): class MutableFileVersion:
""" """
I represent a specific version (most likely the best version) of a I represent a specific version (most likely the best version) of a
mutable file. mutable file.

View File

@ -222,7 +222,7 @@ def pack_prefix(seqnum, root_hash, IV,
@implementer(IMutableSlotWriter) @implementer(IMutableSlotWriter)
class SDMFSlotWriteProxy(object): class SDMFSlotWriteProxy:
""" """
I represent a remote write slot for an SDMF mutable file. I build a I represent a remote write slot for an SDMF mutable file. I build a
share in memory, and then write it in one piece to the remote share in memory, and then write it in one piece to the remote
@ -571,7 +571,7 @@ VERIFICATION_KEY_SIZE = 292
SHARE_HASH_CHAIN_SIZE = (2+HASH_SIZE)*mathutil.log_ceil(256, 2) SHARE_HASH_CHAIN_SIZE = (2+HASH_SIZE)*mathutil.log_ceil(256, 2)
@implementer(IMutableSlotWriter) @implementer(IMutableSlotWriter)
class MDMFSlotWriteProxy(object): class MDMFSlotWriteProxy:
""" """
I represent a remote write slot for an MDMF mutable file. I represent a remote write slot for an MDMF mutable file.
@ -1187,7 +1187,7 @@ def _handle_bad_struct(f):
f.trap(struct.error) f.trap(struct.error)
raise BadShareError(f.value.args[0]) raise BadShareError(f.value.args[0])
class MDMFSlotReadProxy(object): class MDMFSlotReadProxy:
""" """
I read from a mutable slot filled with data written in the MDMF data I read from a mutable slot filled with data written in the MDMF data
format (which is described above). format (which is described above).

View File

@ -41,7 +41,7 @@ PUSHING_EVERYTHING_ELSE_STATE = 1
DONE_STATE = 2 DONE_STATE = 2
@implementer(IPublishStatus) @implementer(IPublishStatus)
class PublishStatus(object): class PublishStatus:
statusid_counter = count(0) statusid_counter = count(0)
def __init__(self): def __init__(self):
self.timings = {} self.timings = {}
@ -112,7 +112,7 @@ class PublishStatus(object):
class LoopLimitExceededError(Exception): class LoopLimitExceededError(Exception):
pass pass
class Publish(object): class Publish:
"""I represent a single act of publishing the mutable file to the grid. I """I represent a single act of publishing the mutable file to the grid. I
will only publish my data if the servermap I am using still represents will only publish my data if the servermap I am using still represents
the current state of the world. the current state of the world.
@ -1229,7 +1229,7 @@ class Publish(object):
@implementer(IMutableUploadable) @implementer(IMutableUploadable)
class MutableFileHandle(object): class MutableFileHandle:
""" """
I am a mutable uploadable built around a filehandle-like object, I am a mutable uploadable built around a filehandle-like object,
usually either a BytesIO instance or a handle to an actual file. usually either a BytesIO instance or a handle to an actual file.
@ -1313,7 +1313,7 @@ class MutableData(MutableFileHandle):
@implementer(IMutableUploadable) @implementer(IMutableUploadable)
class TransformingUploadable(object): class TransformingUploadable:
""" """
I am an IMutableUploadable that wraps another IMutableUploadable, I am an IMutableUploadable that wraps another IMutableUploadable,
and some segments that are already on the grid. When I am called to and some segments that are already on the grid. When I am called to

View File

@ -10,7 +10,7 @@ from allmydata.mutable.common import MODE_REPAIR
from allmydata.mutable.servermap import ServerMap, ServermapUpdater from allmydata.mutable.servermap import ServerMap, ServermapUpdater
@implementer(IRepairResults) @implementer(IRepairResults)
class RepairResults(object): class RepairResults:
def __init__(self, smap): def __init__(self, smap):
self.servermap = smap self.servermap = smap
@ -27,7 +27,7 @@ class RepairRequiresWritecapError(Exception):
class MustForceRepairError(Exception): class MustForceRepairError(Exception):
pass pass
class Repairer(object): class Repairer:
def __init__(self, node, check_results, storage_broker, history, monitor): def __init__(self, node, check_results, storage_broker, history, monitor):
self.node = node self.node = node
self.check_results = ICheckResults(check_results) self.check_results = ICheckResults(check_results)

View File

@ -29,7 +29,7 @@ from allmydata.mutable.common import CorruptShareError, BadShareError, \
from allmydata.mutable.layout import MDMFSlotReadProxy from allmydata.mutable.layout import MDMFSlotReadProxy
@implementer(IRetrieveStatus) @implementer(IRetrieveStatus)
class RetrieveStatus(object): class RetrieveStatus:
statusid_counter = count(0) statusid_counter = count(0)
def __init__(self): def __init__(self):
self.timings = {} self.timings = {}
@ -95,11 +95,11 @@ class RetrieveStatus(object):
serverid = server.get_serverid() serverid = server.get_serverid()
self._problems[serverid] = f self._problems[serverid] = f
class Marker(object): class Marker:
pass pass
@implementer(IPushProducer) @implementer(IPushProducer)
class Retrieve(object): class Retrieve:
# this class is currently single-use. Eventually (in MDMF) we will make # this class is currently single-use. Eventually (in MDMF) we will make
# it multi-use, in which case you can call download(range) multiple # it multi-use, in which case you can call download(range) multiple
# times, and each will have a separate response chain. However the # times, and each will have a separate response chain. However the

View File

@ -25,7 +25,7 @@ from allmydata.mutable.common import MODE_CHECK, MODE_ANYTHING, MODE_WRITE, \
from allmydata.mutable.layout import SIGNED_PREFIX_LENGTH, MDMFSlotReadProxy from allmydata.mutable.layout import SIGNED_PREFIX_LENGTH, MDMFSlotReadProxy
@implementer(IServermapUpdaterStatus) @implementer(IServermapUpdaterStatus)
class UpdateStatus(object): class UpdateStatus:
statusid_counter = count(0) statusid_counter = count(0)
def __init__(self): def __init__(self):
self.timings = {} self.timings = {}
@ -86,7 +86,7 @@ class UpdateStatus(object):
def set_finished(self, when): def set_finished(self, when):
self.finished = when self.finished = when
class ServerMap(object): class ServerMap:
"""I record the placement of mutable shares. """I record the placement of mutable shares.
This object records which shares (of various versions) are located on This object records which shares (of various versions) are located on
@ -385,7 +385,7 @@ class ServerMap(object):
self.update_data.setdefault(shnum , []).append((verinfo, data)) self.update_data.setdefault(shnum , []).append((verinfo, data))
class ServermapUpdater(object): class ServermapUpdater:
def __init__(self, filenode, storage_broker, monitor, servermap, def __init__(self, filenode, storage_broker, monitor, servermap,
mode=MODE_READ, add_lease=False, update_range=None): mode=MODE_READ, add_lease=False, update_range=None):
"""I update a servermap, locating a sufficient number of useful """I update a servermap, locating a sufficient number of useful

View File

@ -121,7 +121,7 @@ such as private keys. On Unix-like systems, the permissions on this directory
are set to disallow users other than its owner from reading the contents of are set to disallow users other than its owner from reading the contents of
the files. See the 'configuration.rst' documentation file for details.""" the files. See the 'configuration.rst' documentation file for details."""
class _None(object): class _None:
""" """
This class is to be used as a marker in get_config() This class is to be used as a marker in get_config()
""" """
@ -276,7 +276,7 @@ def ensure_text_and_abspath_expanduser_unicode(basedir: Union[bytes, str]) -> st
@attr.s @attr.s
class _Config(object): class _Config:
""" """
Manages configuration of a Tahoe 'node directory'. Manages configuration of a Tahoe 'node directory'.

View File

@ -22,7 +22,7 @@ from allmydata import uri
@implementer(INodeMaker) @implementer(INodeMaker)
class NodeMaker(object): class NodeMaker:
def __init__(self, storage_broker, secret_holder, history, def __init__(self, storage_broker, secret_holder, history,
uploader, terminator, uploader, terminator,

View File

@ -80,7 +80,7 @@ def get_backupdb(dbfile, stderr=sys.stderr,
return None return None
class FileResult(object): class FileResult:
def __init__(self, bdb, filecap, should_check, def __init__(self, bdb, filecap, should_check,
path, mtime, ctime, size): path, mtime, ctime, size):
self.bdb = bdb self.bdb = bdb
@ -108,7 +108,7 @@ class FileResult(object):
self.bdb.did_check_file_healthy(self.filecap, results) self.bdb.did_check_file_healthy(self.filecap, results)
class DirectoryResult(object): class DirectoryResult:
def __init__(self, bdb, dirhash, dircap, should_check): def __init__(self, bdb, dirhash, dircap, should_check):
self.bdb = bdb self.bdb = bdb
self.dircap = dircap self.dircap = dircap
@ -130,7 +130,7 @@ class DirectoryResult(object):
self.bdb.did_check_directory_healthy(self.dircap, results) self.bdb.did_check_directory_healthy(self.dircap, results)
class BackupDB_v2(object): class BackupDB_v2:
VERSION = 2 VERSION = 2
NO_CHECK_BEFORE = 1*MONTH NO_CHECK_BEFORE = 1*MONTH
ALWAYS_CHECK_AFTER = 2*MONTH ALWAYS_CHECK_AFTER = 2*MONTH

View File

@ -185,7 +185,7 @@ def get_aliases(nodedir):
pass pass
return aliases return aliases
class DefaultAliasMarker(object): class DefaultAliasMarker:
pass pass
pretend_platform_uses_lettercolon = False # for tests pretend_platform_uses_lettercolon = False # for tests

View File

@ -31,7 +31,7 @@ def parse_url(url, defaultPort=None):
path = "/" path = "/"
return scheme, host, port, path return scheme, host, port, path
class BadResponse(object): class BadResponse:
def __init__(self, url, err): def __init__(self, url, err):
self.status = -1 self.status = -1
self.reason = "Error trying to connect to %s: %s" % (url, err) self.reason = "Error trying to connect to %s: %s" % (url, err)

View File

@ -13,7 +13,7 @@ from allmydata.util.encodingutil import quote_output, is_printable_ascii
from urllib.parse import quote as url_quote from urllib.parse import quote as url_quote
import json import json
class SlowOperationRunner(object): class SlowOperationRunner:
def run(self, options): def run(self, options):
stderr = options.stderr stderr = options.stderr

View File

@ -58,7 +58,7 @@ def put_child(dirurl, childname, childcap):
if resp.status not in (200, 201): if resp.status not in (200, 201):
raise HTTPError("Error during put_child", resp) raise HTTPError("Error during put_child", resp)
class BackerUpper(object): class BackerUpper:
""" """
:ivar int _files_checked: The number of files which the backup process has :ivar int _files_checked: The number of files which the backup process has
so-far inspected on the grid to determine if they need to be so-far inspected on the grid to determine if they need to be
@ -332,7 +332,7 @@ def run_backup(
return progress.backup_finished() return progress.backup_finished()
class FileTarget(object): class FileTarget:
def __init__(self, path): def __init__(self, path):
self._path = path self._path = path
@ -352,7 +352,7 @@ class FileTarget(object):
return progress.reused_file(self._path, childcap, metadata) return progress.reused_file(self._path, childcap, metadata)
class DirectoryTarget(object): class DirectoryTarget:
def __init__(self, path): def __init__(self, path):
self._path = path self._path = path
@ -368,7 +368,7 @@ class DirectoryTarget(object):
return progress.reused_directory(self._path, dircap, metadata) return progress.reused_directory(self._path, dircap, metadata)
class _ErrorTarget(object): class _ErrorTarget:
def __init__(self, path, isdir=False): def __init__(self, path, isdir=False):
self._path = path self._path = path
self._quoted_path = quote_local_unicode_path(path) self._quoted_path = quote_local_unicode_path(path)
@ -403,7 +403,7 @@ class SpecialTarget(_ErrorTarget):
) )
class BackupComplete(object): class BackupComplete:
def __init__(self, def __init__(self,
start_timestamp, start_timestamp,
end_timestamp, end_timestamp,
@ -462,7 +462,7 @@ class BackupComplete(object):
return "\n".join(result) return "\n".join(result)
class BackupProgress(object): class BackupProgress:
# Would be nice if this data structure were immutable and its methods were # Would be nice if this data structure were immutable and its methods were
# transformations that created a new slightly different object. Not there # transformations that created a new slightly different object. Not there
# yet, though. # yet, though.

View File

@ -14,7 +14,7 @@ from allmydata.scripts.common import get_alias, DEFAULT_ALIAS, escape_path, \
from allmydata.scripts.common_http import do_http, format_http_error from allmydata.scripts.common_http import do_http, format_http_error
from allmydata.util.encodingutil import quote_output, quote_path, get_io_encoding from allmydata.util.encodingutil import quote_output, quote_path, get_io_encoding
class Checker(object): class Checker:
pass pass
def _quote_serverid_index_share(serverid, storage_index, sharenum): def _quote_serverid_index_share(serverid, storage_index, sharenum):
@ -119,7 +119,7 @@ def check(options):
return errno return errno
return 0 return 0
class FakeTransport(object): class FakeTransport:
disconnecting = False disconnecting = False
class DeepCheckOutput(LineOnlyReceiver, object): class DeepCheckOutput(LineOnlyReceiver, object):

View File

@ -73,7 +73,7 @@ def make_tahoe_subdirectory(nodeurl, parent_writecap, name):
raise HTTPError("Error during mkdir", resp) raise HTTPError("Error during mkdir", resp)
class LocalFileSource(object): class LocalFileSource:
def __init__(self, pathname, basename): def __init__(self, pathname, basename):
precondition_abspath(pathname) precondition_abspath(pathname)
self.pathname = pathname self.pathname = pathname
@ -88,7 +88,7 @@ class LocalFileSource(object):
def open(self, caps_only): def open(self, caps_only):
return open(self.pathname, "rb") return open(self.pathname, "rb")
class LocalFileTarget(object): class LocalFileTarget:
def __init__(self, pathname): def __init__(self, pathname):
precondition_abspath(pathname) precondition_abspath(pathname)
self.pathname = pathname self.pathname = pathname
@ -96,7 +96,7 @@ class LocalFileTarget(object):
def put_file(self, inf): def put_file(self, inf):
fileutil.put_file(self.pathname, inf) fileutil.put_file(self.pathname, inf)
class LocalMissingTarget(object): class LocalMissingTarget:
def __init__(self, pathname): def __init__(self, pathname):
precondition_abspath(pathname) precondition_abspath(pathname)
self.pathname = pathname self.pathname = pathname
@ -104,7 +104,7 @@ class LocalMissingTarget(object):
def put_file(self, inf): def put_file(self, inf):
fileutil.put_file(self.pathname, inf) fileutil.put_file(self.pathname, inf)
class LocalDirectorySource(object): class LocalDirectorySource:
def __init__(self, progressfunc, pathname, basename): def __init__(self, progressfunc, pathname, basename):
precondition_abspath(pathname) precondition_abspath(pathname)
@ -136,7 +136,7 @@ class LocalDirectorySource(object):
# TODO: output a warning # TODO: output a warning
pass pass
class LocalDirectoryTarget(object): class LocalDirectoryTarget:
def __init__(self, progressfunc, pathname): def __init__(self, progressfunc, pathname):
precondition_abspath(pathname) precondition_abspath(pathname)
@ -183,7 +183,7 @@ class LocalDirectoryTarget(object):
pass pass
class TahoeFileSource(object): class TahoeFileSource:
def __init__(self, nodeurl, mutable, writecap, readcap, basename): def __init__(self, nodeurl, mutable, writecap, readcap, basename):
self.nodeurl = nodeurl self.nodeurl = nodeurl
self.mutable = mutable self.mutable = mutable
@ -216,7 +216,7 @@ def seekable(file_like):
) )
class TahoeFileTarget(object): class TahoeFileTarget:
def __init__(self, nodeurl, mutable, writecap, readcap, url): def __init__(self, nodeurl, mutable, writecap, readcap, url):
self.nodeurl = nodeurl self.nodeurl = nodeurl
self.mutable = mutable self.mutable = mutable
@ -236,7 +236,7 @@ class TahoeFileTarget(object):
# to always create mutable files, or to copy mutable files into new # to always create mutable files, or to copy mutable files into new
# mutable files. ticket #835 # mutable files. ticket #835
class TahoeDirectorySource(object): class TahoeDirectorySource:
def __init__(self, nodeurl, cache, progressfunc, basename): def __init__(self, nodeurl, cache, progressfunc, basename):
self.nodeurl = nodeurl self.nodeurl = nodeurl
self.cache = cache self.cache = cache
@ -309,7 +309,7 @@ class TahoeDirectorySource(object):
"You probably need to use a later version of " "You probably need to use a later version of "
"Tahoe-LAFS to copy this directory.") "Tahoe-LAFS to copy this directory.")
class TahoeMissingTarget(object): class TahoeMissingTarget:
def __init__(self, url): def __init__(self, url):
self.url = url self.url = url
@ -326,7 +326,7 @@ class TahoeMissingTarget(object):
# I'm not sure this will always work # I'm not sure this will always work
return PUT(self.url + "?t=uri", filecap) return PUT(self.url + "?t=uri", filecap)
class TahoeDirectoryTarget(object): class TahoeDirectoryTarget:
def __init__(self, nodeurl, cache, progressfunc): def __init__(self, nodeurl, cache, progressfunc):
self.nodeurl = nodeurl self.nodeurl = nodeurl
self.cache = cache self.cache = cache
@ -470,7 +470,7 @@ FileTargets = (LocalFileTarget, TahoeFileTarget)
DirectoryTargets = (LocalDirectoryTarget, TahoeDirectoryTarget) DirectoryTargets = (LocalDirectoryTarget, TahoeDirectoryTarget)
MissingTargets = (LocalMissingTarget, TahoeMissingTarget) MissingTargets = (LocalMissingTarget, TahoeMissingTarget)
class Copier(object): class Copier:
def do_copy(self, options, progressfunc=None): def do_copy(self, options, progressfunc=None):
if options['quiet']: if options['quiet']:

View File

@ -12,7 +12,7 @@ from allmydata.scripts.common import get_alias, DEFAULT_ALIAS, escape_path, \
from allmydata.scripts.common_http import do_http, format_http_error from allmydata.scripts.common_http import do_http, format_http_error
from allmydata.util.encodingutil import quote_output, quote_path from allmydata.util.encodingutil import quote_output, quote_path
class FakeTransport(object): class FakeTransport:
disconnecting = False disconnecting = False
class ManifestStreamer(LineOnlyReceiver, object): class ManifestStreamer(LineOnlyReceiver, object):

View File

@ -225,7 +225,7 @@ class DaemonizeTheRealService(Service, HookMixin):
reactor.callWhenRunning(start) reactor.callWhenRunning(start)
class DaemonizeTahoeNodePlugin(object): class DaemonizeTahoeNodePlugin:
tapname = "tahoenode" tapname = "tahoenode"
def __init__(self, nodetype, basedir, allow_stdin_close): def __init__(self, nodetype, basedir, allow_stdin_close):
self.nodetype = nodetype self.nodetype = nodetype

View File

@ -150,7 +150,7 @@ def _dump_json_to_file(js, afile):
f.write(data.encode("utf8")) f.write(data.encode("utf8"))
class _LeaseStateSerializer(object): class _LeaseStateSerializer:
""" """
Read and write state for LeaseCheckingCrawler. This understands Read and write state for LeaseCheckingCrawler. This understands
how to read the legacy pickle format files and upgrade them to the how to read the legacy pickle format files and upgrade them to the

View File

@ -30,7 +30,7 @@ def _convert_pickle_state_to_json(state):
} }
class _HistorySerializer(object): class _HistorySerializer:
""" """
Serialize the 'history' file of the lease-crawler state. This is Serialize the 'history' file of the lease-crawler state. This is
"storage/lease_checker.history" for the pickle or "storage/lease_checker.history" for the pickle or

View File

@ -215,7 +215,7 @@ def limited_content(
@define @define
class ImmutableCreateResult(object): class ImmutableCreateResult:
"""Result of creating a storage index for an immutable.""" """Result of creating a storage index for an immutable."""
already_have: set[int] already_have: set[int]
@ -422,7 +422,7 @@ class StorageClientFactory:
@define(hash=True) @define(hash=True)
class StorageClient(object): class StorageClient:
""" """
Low-level HTTP client that talks to the HTTP storage server. Low-level HTTP client that talks to the HTTP storage server.
@ -580,7 +580,7 @@ class StorageClient(object):
@define(hash=True) @define(hash=True)
class StorageClientGeneral(object): class StorageClientGeneral:
""" """
High-level HTTP APIs that aren't immutable- or mutable-specific. High-level HTTP APIs that aren't immutable- or mutable-specific.
""" """
@ -659,7 +659,7 @@ class StorageClientGeneral(object):
@define @define
class UploadProgress(object): class UploadProgress:
""" """
Progress of immutable upload, per the server. Progress of immutable upload, per the server.
""" """
@ -780,7 +780,7 @@ async def advise_corrupt_share(
@define(hash=True) @define(hash=True)
class StorageClientImmutables(object): class StorageClientImmutables:
""" """
APIs for interacting with immutables. APIs for interacting with immutables.
""" """

View File

@ -251,7 +251,7 @@ def _authorized_route(
@define @define
class StorageIndexUploads(object): class StorageIndexUploads:
""" """
In-progress upload to storage index. In-progress upload to storage index.
""" """
@ -265,7 +265,7 @@ class StorageIndexUploads(object):
@define @define
class UploadsInProgress(object): class UploadsInProgress:
""" """
Keep track of uploads for storage indexes. Keep track of uploads for storage indexes.
""" """
@ -1072,7 +1072,7 @@ class HTTPServer(BaseApp):
@implementer(IStreamServerEndpoint) @implementer(IStreamServerEndpoint)
@define @define
class _TLSEndpointWrapper(object): class _TLSEndpointWrapper:
""" """
Wrap an existing endpoint with the server-side storage TLS policy. This is Wrap an existing endpoint with the server-side storage TLS policy. This is
useful because not all Tahoe-LAFS endpoints might be plain TCP+TLS, for useful because not all Tahoe-LAFS endpoints might be plain TCP+TLS, for

View File

@ -103,7 +103,7 @@ def _fix_lease_count_format(lease_count_format):
return fixed return fixed
class ShareFile(object): class ShareFile:
""" """
Support interaction with persistent storage of a share. Support interaction with persistent storage of a share.
@ -351,7 +351,7 @@ class ShareFile(object):
return space_freed return space_freed
class BucketWriter(object): class BucketWriter:
""" """
Keep track of the process of writing to a ShareFile. Keep track of the process of writing to a ShareFile.
""" """
@ -518,7 +518,7 @@ class FoolscapBucketWriter(Referenceable): # type: ignore # warner/foolscap#78
return self._bucket_writer.abort() return self._bucket_writer.abort()
class BucketReader(object): class BucketReader:
""" """
Manage the process for reading from a ``ShareFile``. Manage the process for reading from a ``ShareFile``.
""" """

View File

@ -12,7 +12,7 @@ from .lease_schema import (
) )
@attr.s(frozen=True) @attr.s(frozen=True)
class _Schema(object): class _Schema:
""" """
Implement encoding and decoding for multiple versions of the immutable Implement encoding and decoding for multiple versions of the immutable
container schema. container schema.

View File

@ -115,7 +115,7 @@ class ILeaseInfo(Interface):
@implementer(ILeaseInfo) @implementer(ILeaseInfo)
@attr.s(frozen=True) @attr.s(frozen=True)
class LeaseInfo(object): class LeaseInfo:
""" """
Represent the details of one lease, a marker which is intended to inform Represent the details of one lease, a marker which is intended to inform
the storage server how long to store a particular share. the storage server how long to store a particular share.
@ -369,7 +369,7 @@ class HashedLeaseInfo(proxyForInterface(ILeaseInfo, "_lease_info")): # type: ign
@attr.s(frozen=True) @attr.s(frozen=True)
class _HashedCancelSecret(object): class _HashedCancelSecret:
""" """
``_HashedCancelSecret`` is a marker type for an already-hashed lease ``_HashedCancelSecret`` is a marker type for an already-hashed lease
cancel secret that lets internal lease cancellers bypass the hash-based cancel secret that lets internal lease cancellers bypass the hash-based

View File

@ -15,7 +15,7 @@ from .lease import (
) )
@attr.s(frozen=True) @attr.s(frozen=True)
class CleartextLeaseSerializer(object): class CleartextLeaseSerializer:
""" """
Serialize and unserialize leases with cleartext secrets. Serialize and unserialize leases with cleartext secrets.
""" """
@ -46,7 +46,7 @@ class CleartextLeaseSerializer(object):
return self._from_data(data) return self._from_data(data)
@attr.s(frozen=True) @attr.s(frozen=True)
class HashedLeaseSerializer(object): class HashedLeaseSerializer:
_to_data = attr.ib() _to_data = attr.ib()
_from_data = attr.ib() _from_data = attr.ib()

View File

@ -45,7 +45,7 @@ from .mutable_schema import (
assert struct.calcsize(">L") == 4, struct.calcsize(">L") assert struct.calcsize(">L") == 4, struct.calcsize(">L")
assert struct.calcsize(">Q") == 8, struct.calcsize(">Q") assert struct.calcsize(">Q") == 8, struct.calcsize(">Q")
class MutableShareFile(object): class MutableShareFile:
sharetype = "mutable" sharetype = "mutable"
DATA_LENGTH_OFFSET = struct.calcsize(">32s20s32s") DATA_LENGTH_OFFSET = struct.calcsize(">32s20s32s")
@ -457,7 +457,7 @@ def testv_compare(a, op, b):
return a == b return a == b
class EmptyShare(object): class EmptyShare:
def check_testv(self, testv): def check_testv(self, testv):
test_good = True test_good = True

View File

@ -89,7 +89,7 @@ _EXTRA_LEASE_OFFSET = _HEADER_SIZE + 4 * LeaseInfo().mutable_size()
@attr.s(frozen=True) @attr.s(frozen=True)
class _Schema(object): class _Schema:
""" """
Implement encoding and decoding for the mutable container. Implement encoding and decoding for the mutable container.

View File

@ -121,7 +121,7 @@ ANONYMOUS_STORAGE_NURLS = "anonymous-storage-NURLs"
# don't pass signatures: only pass validated blessed-objects # don't pass signatures: only pass validated blessed-objects
@attr.s @attr.s
class StorageClientConfig(object): class StorageClientConfig:
""" """
Configuration for a node acting as a storage client. Configuration for a node acting as a storage client.
@ -578,7 +578,7 @@ class StorageFarmBroker(service.MultiService):
return StubServer(serverid) return StubServer(serverid)
@implementer(IDisplayableServer) @implementer(IDisplayableServer)
class StubServer(object): class StubServer:
def __init__(self, serverid): def __init__(self, serverid):
assert isinstance(serverid, bytes) assert isinstance(serverid, bytes)
self.serverid = serverid # binary tubid self.serverid = serverid # binary tubid
@ -680,7 +680,7 @@ def _parse_announcement(server_id: bytes, furl: bytes, ann: dict) -> tuple[str,
@implementer(IFoolscapStorageServer) @implementer(IFoolscapStorageServer)
@attr.s(frozen=True) @attr.s(frozen=True)
class _FoolscapStorage(object): class _FoolscapStorage:
""" """
Abstraction for connecting to a storage server exposed via Foolscap. Abstraction for connecting to a storage server exposed via Foolscap.
""" """
@ -739,7 +739,7 @@ class _FoolscapStorage(object):
@implementer(IFoolscapStorageServer) @implementer(IFoolscapStorageServer)
@define @define
class _NullStorage(object): class _NullStorage:
""" """
Abstraction for *not* communicating with a storage server of a type with Abstraction for *not* communicating with a storage server of a type with
which we can't communicate. which we can't communicate.
@ -758,7 +758,7 @@ class _NullStorage(object):
return NonReconnector() return NonReconnector()
class NonReconnector(object): class NonReconnector:
""" """
A ``foolscap.reconnector.Reconnector``-alike that doesn't do anything. A ``foolscap.reconnector.Reconnector``-alike that doesn't do anything.
""" """
@ -1379,7 +1379,7 @@ class UnknownServerTypeError(Exception):
@implementer(IStorageServer) @implementer(IStorageServer)
@attr.s @attr.s
class _StorageServer(object): class _StorageServer:
""" """
``_StorageServer`` is a direct pass-through to an ``RIStorageServer`` via ``_StorageServer`` is a direct pass-through to an ``RIStorageServer`` via
a ``RemoteReference``. a ``RemoteReference``.
@ -1490,7 +1490,7 @@ class _StorageServer(object):
@attr.s(hash=True) @attr.s(hash=True)
class _FakeRemoteReference(object): class _FakeRemoteReference:
""" """
Emulate a Foolscap RemoteReference, calling a local object instead. Emulate a Foolscap RemoteReference, calling a local object instead.
""" """
@ -1506,7 +1506,7 @@ class _FakeRemoteReference(object):
@attr.s @attr.s
class _HTTPBucketWriter(object): class _HTTPBucketWriter:
""" """
Emulate a ``RIBucketWriter``, but use HTTP protocol underneath. Emulate a ``RIBucketWriter``, but use HTTP protocol underneath.
""" """
@ -1547,7 +1547,7 @@ def _ignore_404(failure: Failure) -> Optional[Failure]:
@attr.s(hash=True) @attr.s(hash=True)
class _HTTPBucketReader(object): class _HTTPBucketReader:
""" """
Emulate a ``RIBucketReader``, but use HTTP protocol underneath. Emulate a ``RIBucketReader``, but use HTTP protocol underneath.
""" """
@ -1570,7 +1570,7 @@ class _HTTPBucketReader(object):
# WORK IN PROGRESS, for now it doesn't actually implement whole thing. # WORK IN PROGRESS, for now it doesn't actually implement whole thing.
@implementer(IStorageServer) # type: ignore @implementer(IStorageServer) # type: ignore
@attr.s @attr.s
class _HTTPStorageServer(object): class _HTTPStorageServer:
""" """
Talk to remote storage server over HTTP. Talk to remote storage server over HTTP.
""" """

View File

@ -522,7 +522,7 @@ class CLI(CLITestMixin, unittest.TestCase):
self.basedir = "cli/exception_catcher" self.basedir = "cli/exception_catcher"
exc = Exception("canary") exc = Exception("canary")
class BrokenOptions(object): class BrokenOptions:
def parseOptions(self, argv): def parseOptions(self, argv):
raise exc raise exc

View File

@ -115,7 +115,7 @@ class Mv(GridTestMixin, CLITestMixin, unittest.TestCase):
original_do_http = tahoe_mv.do_http original_do_http = tahoe_mv.do_http
def mock_do_http(method, url, body=b""): def mock_do_http(method, url, body=b""):
if method == "DELETE": if method == "DELETE":
class FakeResponse(object): class FakeResponse:
def read(self): def read(self):
return "response" return "response"
resp = FakeResponse() resp = FakeResponse()

View File

@ -30,7 +30,7 @@ from ..common_web import do_http
from .common import CLITestMixin from .common import CLITestMixin
class FakeStatus(object): class FakeStatus:
def __init__(self): def __init__(self):
self.status = [] self.status = []

View File

@ -52,7 +52,7 @@ RelayURL = str
ApplicationKey = Tuple[RelayURL, AppId] ApplicationKey = Tuple[RelayURL, AppId]
@define @define
class MemoryWormholeServer(object): class MemoryWormholeServer:
""" """
A factory for in-memory wormholes. A factory for in-memory wormholes.
@ -105,7 +105,7 @@ class MemoryWormholeServer(object):
@frozen @frozen
class TestingHelper(object): class TestingHelper:
""" """
Provide extra functionality for interacting with an in-memory wormhole Provide extra functionality for interacting with an in-memory wormhole
implementation. implementation.
@ -160,7 +160,7 @@ _verify()
@define @define
class _WormholeApp(object): class _WormholeApp:
""" """
Represent a collection of wormholes that belong to the same Represent a collection of wormholes that belong to the same
appid/relay_url scope. appid/relay_url scope.
@ -204,7 +204,7 @@ class _WormholeApp(object):
@frozen @frozen
class _WormholeServerView(object): class _WormholeServerView:
""" """
Present an interface onto the server to be consumed by individual Present an interface onto the server to be consumed by individual
wormholes. wormholes.
@ -235,7 +235,7 @@ class _WormholeServerView(object):
@implementer(IWormhole) @implementer(IWormhole)
@define @define
class _MemoryWormhole(object): class _MemoryWormhole:
""" """
Represent one side of a wormhole as conceived by ``MemoryWormholeServer``. Represent one side of a wormhole as conceived by ``MemoryWormholeServer``.
""" """

View File

@ -119,7 +119,7 @@ def on_different(fd_mapping):
return _ProcessProtocolAdapter(fd_mapping) return _ProcessProtocolAdapter(fd_mapping)
@attr.s @attr.s
class CLINodeAPI(object): class CLINodeAPI:
reactor = attr.ib() reactor = attr.ib()
basedir = attr.ib(type=FilePath) basedir = attr.ib(type=FilePath)
process = attr.ib(default=None) process = attr.ib(default=None)

View File

@ -129,7 +129,7 @@ def byteschr(x):
return bytes([x]) return bytes([x])
@attr.s @attr.s
class FakeDisk(object): class FakeDisk:
""" """
Just enough of a disk to be able to report free / used information. Just enough of a disk to be able to report free / used information.
""" """
@ -166,7 +166,7 @@ class FakeDisk(object):
@attr.s @attr.s
class MemoryIntroducerClient(object): class MemoryIntroducerClient:
""" """
A model-only (no behavior) stand-in for ``IntroducerClient``. A model-only (no behavior) stand-in for ``IntroducerClient``.
""" """
@ -199,7 +199,7 @@ class MemoryIntroducerClient(object):
@attr.s @attr.s
class Subscription(object): class Subscription:
""" """
A model of an introducer subscription. A model of an introducer subscription.
""" """
@ -210,7 +210,7 @@ class Subscription(object):
@attr.s @attr.s
class Announcement(object): class Announcement:
""" """
A model of an introducer announcement. A model of an introducer announcement.
""" """
@ -237,7 +237,7 @@ def get_published_announcements(client):
) )
class UseTestPlugins(object): class UseTestPlugins:
""" """
A fixture which enables loading Twisted plugins from the Tahoe-LAFS test A fixture which enables loading Twisted plugins from the Tahoe-LAFS test
suite. suite.
@ -265,7 +265,7 @@ class UseTestPlugins(object):
@attr.s @attr.s
class UseNode(object): class UseNode:
""" """
A fixture which creates a client node. A fixture which creates a client node.
@ -374,7 +374,7 @@ class UseNode(object):
@implementer(IPlugin, IStreamServerEndpointStringParser) @implementer(IPlugin, IStreamServerEndpointStringParser)
class AdoptedServerPort(object): class AdoptedServerPort:
""" """
Parse an ``adopt-socket:<fd>`` endpoint description by adopting ``fd`` as Parse an ``adopt-socket:<fd>`` endpoint description by adopting ``fd`` as
a listening TCP port. a listening TCP port.
@ -414,7 +414,7 @@ def really_bind(s, addr):
raise Exception("Many bind attempts failed with EADDRINUSE") raise Exception("Many bind attempts failed with EADDRINUSE")
class SameProcessStreamEndpointAssigner(object): class SameProcessStreamEndpointAssigner:
""" """
A fixture which can assign streaming server endpoints for use *in this A fixture which can assign streaming server endpoints for use *in this
process only*. process only*.
@ -479,7 +479,7 @@ class SameProcessStreamEndpointAssigner(object):
return location_hint, port_endpoint return location_hint, port_endpoint
@implementer(IPullProducer) @implementer(IPullProducer)
class DummyProducer(object): class DummyProducer:
def resumeProducing(self): def resumeProducing(self):
pass pass
@ -916,7 +916,7 @@ class LoggingServiceParent(service.MultiService):
TEST_DATA=b"\x02"*(Uploader.URI_LIT_SIZE_THRESHOLD+1) TEST_DATA=b"\x02"*(Uploader.URI_LIT_SIZE_THRESHOLD+1)
class WebErrorMixin(object): class WebErrorMixin:
def explain_web_error(self, f): def explain_web_error(self, f):
# an error on the server side causes the client-side getPage() to # an error on the server side causes the client-side getPage() to
# return a failure(t.web.error.Error), and its str() doesn't show the # return a failure(t.web.error.Error), and its str() doesn't show the
@ -1276,7 +1276,7 @@ def _corrupt_uri_extension(data, debug=False):
@attr.s @attr.s
@implementer(IAddressFamily) @implementer(IAddressFamily)
class ConstantAddresses(object): class ConstantAddresses:
""" """
Pretend to provide support for some address family but just hand out Pretend to provide support for some address family but just hand out
canned responses. canned responses.
@ -1318,7 +1318,7 @@ def disable_modules(*names):
else: else:
sys.modules[n] = original sys.modules[n] = original
class _TestCaseMixin(object): class _TestCaseMixin:
""" """
A mixin for ``TestCase`` which collects helpful behaviors for subclasses. A mixin for ``TestCase`` which collects helpful behaviors for subclasses.

View File

@ -226,7 +226,7 @@ def flip_one_bit(s, offset=0, size=None):
return result return result
class ReallyEqualMixin(object): class ReallyEqualMixin:
def failUnlessReallyEqual(self, a, b, msg=None): def failUnlessReallyEqual(self, a, b, msg=None):
self.assertEqual(a, b, msg) self.assertEqual(a, b, msg)
if a.__class__ == str: if a.__class__ == str:
@ -236,7 +236,7 @@ class ReallyEqualMixin(object):
self.assertEqual(type(a), type(b), "a :: %r (%s), b :: %r (%s), %r" % (a, type(a), b, type(b), msg)) self.assertEqual(type(a), type(b), "a :: %r (%s), b :: %r (%s), %r" % (a, type(a), b, type(b), msg))
class SignalMixin(object): class SignalMixin:
# This class is necessary for any code which wants to use Processes # This class is necessary for any code which wants to use Processes
# outside the usual reactor.run() environment. It is copied from # outside the usual reactor.run() environment. It is copied from
# Twisted's twisted.test.test_process . Note that Twisted-8.2.0 uses # Twisted's twisted.test.test_process . Note that Twisted-8.2.0 uses
@ -258,17 +258,17 @@ class SignalMixin(object):
return super(SignalMixin, self).tearDown() return super(SignalMixin, self).tearDown()
class StallMixin(object): class StallMixin:
def stall(self, res=None, delay=1): def stall(self, res=None, delay=1):
d = defer.Deferred() d = defer.Deferred()
reactor.callLater(delay, d.callback, res) reactor.callLater(delay, d.callback, res)
return d return d
class Marker(object): class Marker:
pass pass
class FakeCanary(object): class FakeCanary:
"""For use in storage tests. """For use in storage tests.
""" """
def __init__(self, ignore_disconnectors=False): def __init__(self, ignore_disconnectors=False):
@ -300,7 +300,7 @@ class FakeCanary(object):
self.disconnectors = None self.disconnectors = None
class ShouldFailMixin(object): class ShouldFailMixin:
def shouldFail(self, expected_failure, which, substring, def shouldFail(self, expected_failure, which, substring,
callable, *args, **kwargs): callable, *args, **kwargs):
@ -392,7 +392,7 @@ class TestMixin(SignalMixin):
self.fail("Reactor was still active when it was required to be quiescent.") self.fail("Reactor was still active when it was required to be quiescent.")
class TimezoneMixin(object): class TimezoneMixin:
def setTimezone(self, timezone): def setTimezone(self, timezone):
def tzset_if_possible(): def tzset_if_possible():

View File

@ -57,7 +57,7 @@ RUN_TEST = ActionType(
@attr.s @attr.s
class EliotLoggedRunTest(object): class EliotLoggedRunTest:
""" """
A *RunTest* implementation which surrounds test invocation with an A *RunTest* implementation which surrounds test invocation with an
Eliot-based action. Eliot-based action.
@ -162,7 +162,7 @@ def with_logging(
@implementer(ILogger) @implementer(ILogger)
class _TwoLoggers(object): class _TwoLoggers:
""" """
Log to two loggers. Log to two loggers.

View File

@ -33,7 +33,7 @@ from allmydata.crypto import (
) )
@attr.s @attr.s
class MatchesNodePublicKey(object): class MatchesNodePublicKey:
""" """
Match an object representing the node's private key. Match an object representing the node's private key.
@ -124,7 +124,7 @@ def matches_base32():
class MatchesSameElements(object): class MatchesSameElements:
""" """
Match if the two-tuple value given contains two elements that are equal to Match if the two-tuple value given contains two elements that are equal to
each other. each other.

View File

@ -22,14 +22,14 @@ from ..no_network import GridTestMixin
from .. import common_util as testutil from .. import common_util as testutil
from ..common_util import DevNullDictionary from ..common_util import DevNullDictionary
class SameKeyGenerator(object): class SameKeyGenerator:
def __init__(self, pubkey, privkey): def __init__(self, pubkey, privkey):
self.pubkey = pubkey self.pubkey = pubkey
self.privkey = privkey self.privkey = privkey
def generate(self, keysize=None): def generate(self, keysize=None):
return defer.succeed( (self.pubkey, self.privkey) ) return defer.succeed( (self.pubkey, self.privkey) )
class FirstServerGetsKilled(object): class FirstServerGetsKilled:
done = False done = False
def notify(self, retval, wrapper, methname): def notify(self, retval, wrapper, methname):
if not self.done: if not self.done:
@ -37,7 +37,7 @@ class FirstServerGetsKilled(object):
self.done = True self.done = True
return retval return retval
class FirstServerGetsDeleted(object): class FirstServerGetsDeleted:
def __init__(self): def __init__(self):
self.done = False self.done = False
self.silenced = None self.silenced = None

View File

@ -32,7 +32,7 @@ def eventuaaaaaly(res=None):
# network connections, both to speed up the tests and to reduce the amount of # network connections, both to speed up the tests and to reduce the amount of
# non-mutable.py code being exercised. # non-mutable.py code being exercised.
class FakeStorage(object): class FakeStorage:
# this class replaces the collection of storage servers, allowing the # this class replaces the collection of storage servers, allowing the
# tests to examine and manipulate the published shares. It also lets us # tests to examine and manipulate the published shares. It also lets us
# control the order in which read queries are answered, to exercise more # control the order in which read queries are answered, to exercise more
@ -92,7 +92,7 @@ class FakeStorage(object):
# This doesn't actually implement the whole interface, but adding a commented # This doesn't actually implement the whole interface, but adding a commented
# interface implementation annotation for grepping purposes. # interface implementation annotation for grepping purposes.
#@implementer(RIStorageServer) #@implementer(RIStorageServer)
class FakeStorageServer(object): class FakeStorageServer:
""" """
A fake Foolscap remote object, implemented by overriding callRemote() to A fake Foolscap remote object, implemented by overriding callRemote() to
call local methods. call local methods.
@ -217,7 +217,7 @@ def corrupt(res, s, offset, shnums_to_corrupt=None, offset_offset=0):
return dl return dl
@attr.s @attr.s
class Peer(object): class Peer:
peerid = attr.ib() peerid = attr.ib()
storage_server = attr.ib() storage_server = attr.ib()
announcement = attr.ib() announcement = attr.ib()
@ -318,7 +318,7 @@ def make_nodemaker_with_storage_broker(storage_broker):
return nodemaker return nodemaker
class PublishMixin(object): class PublishMixin:
def publish_one(self): def publish_one(self):
# publish a file and create shares, which can then be manipulated # publish a file and create shares, which can then be manipulated
# later. # later.
@ -428,7 +428,7 @@ class PublishMixin(object):
index = versionmap[shnum] index = versionmap[shnum]
shares[peerid][shnum] = oldshares[index][peerid][shnum] shares[peerid][shnum] = oldshares[index][peerid][shnum]
class CheckerMixin(object): class CheckerMixin:
def check_good(self, r, where): def check_good(self, r, where):
self.failUnless(r.is_healthy(), where) self.failUnless(r.is_healthy(), where)
return r return r

View File

@ -57,13 +57,13 @@ from .common import (
class IntentionalError(Exception): class IntentionalError(Exception):
pass pass
class Marker(object): class Marker:
pass pass
fireNow = partial(defer.succeed, None) fireNow = partial(defer.succeed, None)
@implementer(IRemoteReference) # type: ignore # warner/foolscap#79 @implementer(IRemoteReference) # type: ignore # warner/foolscap#79
class LocalWrapper(object): class LocalWrapper:
""" """
A ``LocalWrapper`` presents the remote reference interface to a local A ``LocalWrapper`` presents the remote reference interface to a local
object which implements a ``RemoteInterface``. object which implements a ``RemoteInterface``.
@ -171,7 +171,7 @@ def wrap_storage_server(original):
return wrapper return wrapper
@implementer(IServer) @implementer(IServer)
class NoNetworkServer(object): class NoNetworkServer:
def __init__(self, serverid, rref): def __init__(self, serverid, rref):
self.serverid = serverid self.serverid = serverid
self.rref = rref self.rref = rref
@ -293,7 +293,7 @@ class _NoNetworkClient(_Client): # type: ignore # tahoe-lafs/ticket/3573
#._servers will be set by the NoNetworkGrid which creates us #._servers will be set by the NoNetworkGrid which creates us
class SimpleStats(object): class SimpleStats:
def __init__(self): def __init__(self):
self.counters = {} self.counters = {}
self.stats_producers = [] self.stats_producers = []
@ -463,7 +463,7 @@ class NoNetworkGrid(service.MultiService):
fileutil.rm_dir(os.path.join(server.sharedir, prefixdir)) fileutil.rm_dir(os.path.join(server.sharedir, prefixdir))
class GridTestMixin(object): class GridTestMixin:
def setUp(self): def setUp(self):
self.s = service.MultiService() self.s = service.MultiService()
self.s.startService() self.s.startService()

View File

@ -52,7 +52,7 @@ class RIDummy(RemoteInterface):
# https://twistedmatrix.com/trac/ticket/9717 # https://twistedmatrix.com/trac/ticket/9717
@implementer(IFoolscapStoragePlugin) # type: ignore @implementer(IFoolscapStoragePlugin) # type: ignore
@attr.s @attr.s
class DummyStorage(object): class DummyStorage:
name = attr.ib() name = attr.ib()
@property @property

View File

@ -46,7 +46,7 @@ from .web.common import (
assert_soup_has_tag_with_content, assert_soup_has_tag_with_content,
) )
class FakeClient(object): class FakeClient:
def get_storage_broker(self): def get_storage_broker(self):
return self.storage_broker return self.storage_broker
@ -781,7 +781,7 @@ class AddLease(GridTestMixin, unittest.TestCase):
d.addCallback(lambda ign: self.failUnless(really_did_break)) d.addCallback(lambda ign: self.failUnless(really_did_break))
return d return d
class CounterHolder(object): class CounterHolder:
def __init__(self): def __init__(self):
self._num_active_block_fetches = 0 self._num_active_block_fetches = 0
self._max_active_block_fetches = 0 self._max_active_block_fetches = 0

View File

@ -19,7 +19,7 @@ from testtools.matchers import (
@implementer(IPushProducer) @implementer(IPushProducer)
@implementer(IPullProducer) @implementer(IPullProducer)
class Producer(object): class Producer:
"""Can be used as either streaming or non-streaming producer. """Can be used as either streaming or non-streaming producer.
If used as streaming, the test should call iterate() manually. If used as streaming, the test should call iterate() manually.

View File

@ -47,7 +47,7 @@ from hypothesis.strategies import text
@implementer(IConsumer) @implementer(IConsumer)
class MemAccum(object): class MemAccum:
def registerProducer(self, producer, streaming): def registerProducer(self, producer, streaming):
self.producer = producer self.producer = producer
self.producer.resumeProducing() self.producer.resumeProducing()
@ -1395,7 +1395,7 @@ class Dirnode(GridTestMixin, unittest.TestCase,
self.set_up_grid(oneshare=True) self.set_up_grid(oneshare=True)
return self._do_initial_children_test(mdmf=True) return self._do_initial_children_test(mdmf=True)
class MinimalFakeMutableFile(object): class MinimalFakeMutableFile:
def get_writekey(self): def get_writekey(self):
return b"writekey" return b"writekey"

View File

@ -1347,7 +1347,7 @@ def make_servers(clientids):
servers[clientid] = make_server(clientid) servers[clientid] = make_server(clientid)
return servers return servers
class MyShare(object): class MyShare:
def __init__(self, shnum, server, rtt): def __init__(self, shnum, server, rtt):
self._shnum = shnum self._shnum = shnum
self._server = server self._server = server
@ -1363,7 +1363,7 @@ class MySegmentFetcher(SegmentFetcher):
def _start_share(self, share, shnum): def _start_share(self, share, shnum):
self._test_start_shares.append(share) self._test_start_shares.append(share)
class FakeNode(object): class FakeNode:
def __init__(self): def __init__(self):
self.want_more = 0 self.want_more = 0
self.failed = None self.failed = None

View File

@ -99,7 +99,7 @@ class EliotLoggedTestTests(TestCase):
A test method of an ``AsyncTestCase`` subclass that logs an unserializable A test method of an ``AsyncTestCase`` subclass that logs an unserializable
value with Eliot fails. value with Eliot fails.
""" """
class world(object): class world:
""" """
an unserializable object an unserializable object
""" """

View File

@ -25,7 +25,7 @@ def flip_bit(good): # flips the last bit
return good[:-1] + byteschr(ord(good[-1]) ^ 0x01) return good[:-1] + byteschr(ord(good[-1]) ^ 0x01)
@implementer(IStorageBucketWriter, IStorageBucketReader) @implementer(IStorageBucketWriter, IStorageBucketReader)
class FakeBucketReaderWriterProxy(object): class FakeBucketReaderWriterProxy:
# these are used for both reading and writing # these are used for both reading and writing
def __init__(self, mode="good", peerid="peer"): def __init__(self, mode="good", peerid="peer"):
self.mode = mode self.mode = mode

View File

@ -70,7 +70,7 @@ from allmydata.util.encodingutil import unicode_to_url, \
to_filepath, extend_filepath, unicode_from_filepath, unicode_segments_from, \ to_filepath, extend_filepath, unicode_from_filepath, unicode_segments_from, \
unicode_to_argv unicode_to_argv
class MockStdout(object): class MockStdout:
pass pass

View File

@ -11,10 +11,10 @@ from allmydata.mutable.filenode import MutableFileNode
from allmydata.util import hashutil from allmydata.util import hashutil
from allmydata.util.consumer import download_to_data from allmydata.util.consumer import download_to_data
class NotANode(object): class NotANode:
pass pass
class FakeClient(object): class FakeClient:
# just enough to let the node acquire a downloader (which it won't use), # just enough to let the node acquire a downloader (which it won't use),
# and to get default encoding parameters # and to get default encoding parameters
def getServiceNamed(self, name): def getServiceNamed(self, name):

View File

@ -282,7 +282,7 @@ class PlacementTests(unittest.TestCase):
assert happiness == min(len(peers), len(shares)) assert happiness == min(len(peers), len(shares))
class FakeServerTracker(object): class FakeServerTracker:
def __init__(self, serverid, buckets): def __init__(self, serverid, buckets):
self._serverid = serverid self._serverid = serverid
self.buckets = buckets self.buckets = buckets

View File

@ -96,7 +96,7 @@ class CHKUploadHelper_fake(offloaded.CHKUploadHelper):
return d return d
@attr.s @attr.s
class FakeCHKCheckerAndUEBFetcher(object): class FakeCHKCheckerAndUEBFetcher:
""" """
A fake of ``CHKCheckerAndUEBFetcher`` which hard-codes some check result. A fake of ``CHKCheckerAndUEBFetcher`` which hard-codes some check result.
""" """

View File

@ -24,11 +24,11 @@ from .no_network import (
NoNetworkServer, NoNetworkServer,
) )
class MockShareHashTree(object): class MockShareHashTree:
def needed_hashes(self): def needed_hashes(self):
return False return False
class MockNode(object): class MockNode:
def __init__(self, check_reneging, check_fetch_failed): def __init__(self, check_reneging, check_fetch_failed):
self.got = 0 self.got = 0
self.finished_d = defer.Deferred() self.finished_d = defer.Deferred()
@ -86,10 +86,10 @@ class TestShareFinder(unittest.TestCase):
rcap = uri.CHKFileURI(b'a'*32, b'a'*32, 3, 99, 100) rcap = uri.CHKFileURI(b'a'*32, b'a'*32, 3, 99, 100)
vcap = rcap.get_verify_cap() vcap = rcap.get_verify_cap()
class MockBuckets(object): class MockBuckets:
pass pass
class MockServer(object): class MockServer:
def __init__(self, buckets): def __init__(self, buckets):
self.version = { self.version = {
b'http://allmydata.org/tahoe/protocols/storage/v1': { b'http://allmydata.org/tahoe/protocols/storage/v1': {
@ -113,17 +113,17 @@ class TestShareFinder(unittest.TestCase):
eventually(_give_buckets_and_hunger_again) eventually(_give_buckets_and_hunger_again)
return d return d
class MockStorageBroker(object): class MockStorageBroker:
def __init__(self, servers): def __init__(self, servers):
self.servers = servers self.servers = servers
def get_servers_for_psi(self, si): def get_servers_for_psi(self, si):
return self.servers return self.servers
class MockDownloadStatus(object): class MockDownloadStatus:
def add_dyhb_request(self, server, when): def add_dyhb_request(self, server, when):
return MockDYHBEvent() return MockDYHBEvent()
class MockDYHBEvent(object): class MockDYHBEvent:
def finished(self, shnums, when): def finished(self, shnums, when):
pass pass

View File

@ -146,7 +146,7 @@ class Node(testutil.SignalMixin, testutil.ReallyEqualMixin, AsyncTestCase):
self.failUnlessReallyEqual(w.staticdir, expected) self.failUnlessReallyEqual(w.staticdir, expected)
class ServiceMixin(object): class ServiceMixin:
def setUp(self): def setUp(self):
self.parent = LoggingMultiService() self.parent = LoggingMultiService()
self.parent.startService() self.parent.startService()
@ -732,7 +732,7 @@ class SystemTest(SystemTestMixin, AsyncTestCase):
os.makedirs(self.basedir) os.makedirs(self.basedir)
return self.do_system_test() return self.do_system_test()
class FakeRemoteReference(object): class FakeRemoteReference:
def notifyOnDisconnect(self, *args, **kwargs): pass def notifyOnDisconnect(self, *args, **kwargs): pass
def getRemoteTubID(self): return "62ubehyunnyhzs7r6vdonnm2hpi52w6y" def getRemoteTubID(self): return "62ubehyunnyhzs7r6vdonnm2hpi52w6y"
def getPeer(self): return address.IPv4Address("TCP", "remote.example.com", def getPeer(self): return address.IPv4Address("TCP", "remote.example.com",

View File

@ -50,7 +50,7 @@ def new_secret():
return _randbytes(32) return _randbytes(32)
class IStorageServerSharedAPIsTestsMixin(object): class IStorageServerSharedAPIsTestsMixin:
""" """
Tests for ``IStorageServer``'s shared APIs. Tests for ``IStorageServer``'s shared APIs.
@ -68,7 +68,7 @@ class IStorageServerSharedAPIsTestsMixin(object):
self.assertIn(b"http://allmydata.org/tahoe/protocols/storage/v1", result) self.assertIn(b"http://allmydata.org/tahoe/protocols/storage/v1", result)
class IStorageServerImmutableAPIsTestsMixin(object): class IStorageServerImmutableAPIsTestsMixin:
""" """
Tests for ``IStorageServer``'s immutable APIs. Tests for ``IStorageServer``'s immutable APIs.
@ -515,7 +515,7 @@ class IStorageServerImmutableAPIsTestsMixin(object):
self.assertEqual(lease2.get_expiration_time() - initial_expiration_time, 167) self.assertEqual(lease2.get_expiration_time() - initial_expiration_time, 167)
class IStorageServerMutableAPIsTestsMixin(object): class IStorageServerMutableAPIsTestsMixin:
""" """
Tests for ``IStorageServer``'s mutable APIs. Tests for ``IStorageServer``'s mutable APIs.

View File

@ -7,7 +7,7 @@ from twisted.trial.unittest import TestCase
from allmydata.web.common import get_filenode_metadata, SDMF_VERSION, MDMF_VERSION from allmydata.web.common import get_filenode_metadata, SDMF_VERSION, MDMF_VERSION
class MockFileNode(object): class MockFileNode:
def __init__(self, size, mutable_version=None): def __init__(self, size, mutable_version=None):
self.size = size self.size = size
self.mutable_version = mutable_version self.mutable_version = mutable_version
@ -24,7 +24,7 @@ class MockFileNode(object):
return self.mutable_version return self.mutable_version
class CommonFixture(object): class CommonFixture:
def test_size_is_0(self): def test_size_is_0(self):
"""If get_size doesn't return None the returned metadata must contain "size".""" """If get_size doesn't return None the returned metadata must contain "size"."""
mockfilenode = MockFileNode(0, mutable_version=self.mutable_version) mockfilenode = MockFileNode(0, mutable_version=self.mutable_version)

View File

@ -159,7 +159,7 @@ class TestCase(testutil.SignalMixin, unittest.TestCase):
tub = testing_tub(reactor, config_data) tub = testing_tub(reactor, config_data)
class Foo(object): class Foo:
pass pass
furl = tub.registerReference(Foo()) furl = tub.registerReference(Foo())
@ -831,7 +831,7 @@ ENABLE_HELPER = """
enabled = true enabled = true
""" """
class FakeTub(object): class FakeTub:
def __init__(self): def __init__(self):
self.tubID = base64.b32encode(b"foo") self.tubID = base64.b32encode(b"foo")
self.listening_ports = [] self.listening_ports = []

View File

@ -26,7 +26,7 @@ from allmydata.web.status import Statistics
from allmydata.test.common import SyncTestCase from allmydata.test.common import SyncTestCase
class FakeStatsProvider(object): class FakeStatsProvider:
""" """
A stats provider that hands backed a canned collection of performance A stats provider that hands backed a canned collection of performance
statistics. statistics.

View File

@ -21,7 +21,7 @@ MAX_DELTA_READS = 10 * READ_LEEWAY # N = 10
timeout=240 # François's ARM box timed out after 120 seconds of Verifier.test_corrupt_crypttext_hashtree timeout=240 # François's ARM box timed out after 120 seconds of Verifier.test_corrupt_crypttext_hashtree
class RepairTestMixin(object): class RepairTestMixin:
def _count_reads(self): def _count_reads(self):
sum_of_read_counts = 0 sum_of_read_counts = 0
for (i, ss, storedir) in self.iterate_servers(): for (i, ss, storedir) in self.iterate_servers():

View File

@ -1391,7 +1391,7 @@ class Handler(GridTestMixin, ShouldFailMixin, ReallyEqualMixin, unittest.TestCas
return d return d
def test_execCommand_and_openShell(self): def test_execCommand_and_openShell(self):
class MockProtocol(object): class MockProtocol:
def __init__(self): def __init__(self):
self.output = "" self.output = ""
self.error = "" self.error = ""

Some files were not shown because too many files have changed in this diff Show More