mirror of
https://github.com/tahoe-lafs/tahoe-lafs.git
synced 2025-04-05 01:49:15 +00:00
Merge pull request #613 from tpltnt/new-style-classes
New style classes
This commit is contained in:
commit
11b94c03cf
misc
coding_tools
operations_helpers/provisioning
simulators
newsfragments
src/allmydata
blacklist.pycheck_results.pyclient.pycontrol.pydirnode.py
frontends
hashtree.pyhistory.pyimmutable
introducer
mutable
scripts
storage
test
bench_dirnode.pycheck_grid.pycheck_speed.py
uri.pycli
common.pycommon_util.pycommon_web.pyeliotutil.pymutable
no_network.pytest_checker.pytest_dirnode.pytest_download.pytest_filenode.pytest_introducer.pytest_iputil.pytest_node.pytest_python2_regressions.pytest_repairer.pytest_runner.pytest_sftp.pytest_storage.pytest_upload.pytest_util.pyweb
util
cachedir.pydeferredutil.pyfileutil.pyhashutil.pylimiter.pyobserver.pypipeline.pypollmixin.pyspans.py
web
windows
@ -138,7 +138,7 @@ def report(out, path, results):
|
||||
print(path + (":%r %s captures %r assigned at line %d" % r), file=out)
|
||||
|
||||
def check(sources, out):
|
||||
class Counts:
|
||||
class Counts(object):
|
||||
n = 0
|
||||
processed_files = 0
|
||||
suspect_files = 0
|
||||
|
@ -8,7 +8,7 @@ DAY=24*60*60
|
||||
MONTH=31*DAY
|
||||
YEAR=365*DAY
|
||||
|
||||
class ReliabilityModel:
|
||||
class ReliabilityModel(object):
|
||||
"""Generate a model of system-wide reliability, given several input
|
||||
parameters.
|
||||
|
||||
@ -207,7 +207,7 @@ class ReliabilityModel:
|
||||
repair = matrix(new_repair_rows)
|
||||
return repair
|
||||
|
||||
class ReliabilityReport:
|
||||
class ReliabilityReport(object):
|
||||
def __init__(self):
|
||||
self.samples = []
|
||||
|
||||
|
@ -10,7 +10,7 @@ except ImportError:
|
||||
from nevow import inevow
|
||||
from zope.interface import implements
|
||||
|
||||
class MyRequest:
|
||||
class MyRequest(object):
|
||||
implements(inevow.IRequest)
|
||||
pass
|
||||
|
||||
|
@ -54,7 +54,7 @@ print("average file size:", abbreviate_space(avg_filesize))
|
||||
|
||||
SERVER_CAPACITY = 10**12
|
||||
|
||||
class Server:
|
||||
class Server(object):
|
||||
def __init__(self, nodeid, capacity):
|
||||
self.nodeid = nodeid
|
||||
self.used = 0
|
||||
@ -75,7 +75,7 @@ class Server:
|
||||
else:
|
||||
return "<%s %s>" % (self.__class__.__name__, self.nodeid)
|
||||
|
||||
class Ring:
|
||||
class Ring(object):
|
||||
SHOW_MINMAX = False
|
||||
def __init__(self, numservers, seed, permute):
|
||||
self.servers = []
|
||||
|
@ -8,7 +8,7 @@ import random
|
||||
|
||||
SERVER_CAPACITY = 10**12
|
||||
|
||||
class Server:
|
||||
class Server(object):
|
||||
def __init__(self):
|
||||
self.si = random.randrange(0, 2**31)
|
||||
self.used = 0
|
||||
|
@ -17,7 +17,7 @@ def sha(s):
|
||||
def randomid():
|
||||
return os.urandom(20)
|
||||
|
||||
class Node:
|
||||
class Node(object):
|
||||
def __init__(self, nid, introducer, simulator):
|
||||
self.nid = nid
|
||||
self.introducer = introducer
|
||||
@ -112,7 +112,7 @@ class Node:
|
||||
self.introducer.delete(fileid)
|
||||
return True
|
||||
|
||||
class Introducer:
|
||||
class Introducer(object):
|
||||
def __init__(self, simulator):
|
||||
self.living_files = {}
|
||||
self.utilization = 0 # total size of all active files
|
||||
@ -149,7 +149,7 @@ class Introducer:
|
||||
self.simulator.stamp_utilization(self.utilization)
|
||||
del self.living_files[fileid]
|
||||
|
||||
class Simulator:
|
||||
class Simulator(object):
|
||||
NUM_NODES = 1000
|
||||
EVENTS = ["ADDFILE", "DELFILE", "ADDNODE", "DELNODE"]
|
||||
RATE_ADDFILE = 1.0 / 10
|
||||
|
@ -37,7 +37,7 @@ GiB=1024*MiB
|
||||
TiB=1024*GiB
|
||||
PiB=1024*TiB
|
||||
|
||||
class Sizes:
|
||||
class Sizes(object):
|
||||
def __init__(self, mode, file_size, arity=2):
|
||||
MAX_SEGSIZE = 128*KiB
|
||||
self.mode = mode
|
||||
|
@ -12,7 +12,7 @@ def roundup(size, blocksize=4096):
|
||||
return blocksize * mathutil.div_ceil(size, blocksize)
|
||||
|
||||
|
||||
class BigFakeString:
|
||||
class BigFakeString(object):
|
||||
def __init__(self, length):
|
||||
self.length = length
|
||||
self.fp = 0
|
||||
|
1
newsfragments/3042.other
Normal file
1
newsfragments/3042.other
Normal file
@ -0,0 +1 @@
|
||||
All old-style classes ported to new-style.
|
@ -17,7 +17,7 @@ class FileProhibited(Exception):
|
||||
self.reason = reason
|
||||
|
||||
|
||||
class Blacklist:
|
||||
class Blacklist(object):
|
||||
def __init__(self, blacklist_fn):
|
||||
self.blacklist_fn = blacklist_fn
|
||||
self.last_mtime = None
|
||||
|
@ -169,7 +169,7 @@ class CheckAndRepairResults(object):
|
||||
return self.post_repair_results
|
||||
|
||||
|
||||
class DeepResultsBase:
|
||||
class DeepResultsBase(object):
|
||||
|
||||
def __init__(self, root_storage_index):
|
||||
self.root_storage_index = root_storage_index
|
||||
|
@ -115,7 +115,7 @@ def _make_secret():
|
||||
return base32.b2a(os.urandom(hashutil.CRYPTO_VAL_SIZE)) + "\n"
|
||||
|
||||
|
||||
class SecretHolder:
|
||||
class SecretHolder(object):
|
||||
def __init__(self, lease_secret, convergence_secret):
|
||||
self._lease_secret = lease_secret
|
||||
self._convergence_secret = convergence_secret
|
||||
@ -129,7 +129,7 @@ class SecretHolder:
|
||||
def get_convergence_secret(self):
|
||||
return self._convergence_secret
|
||||
|
||||
class KeyGenerator:
|
||||
class KeyGenerator(object):
|
||||
"""I create RSA keys for mutable files. Each call to generate() returns a
|
||||
single keypair. The keysize is specified first by the keysize= argument
|
||||
to generate(), then with a default set by set_default_keysize(), then
|
||||
|
@ -38,7 +38,7 @@ def log_memory_usage(where=""):
|
||||
where))
|
||||
|
||||
@implementer(IConsumer)
|
||||
class FileWritingConsumer:
|
||||
class FileWritingConsumer(object):
|
||||
def __init__(self, filename):
|
||||
self.done = False
|
||||
self.f = open(filename, "wb")
|
||||
@ -143,7 +143,7 @@ class ControlServer(Referenceable, service.Service):
|
||||
d.addCallback(_average)
|
||||
return d
|
||||
|
||||
class SpeedTest:
|
||||
class SpeedTest(object):
|
||||
def __init__(self, parent, count, size, mutable):
|
||||
self.parent = parent
|
||||
self.count = count
|
||||
|
@ -111,7 +111,7 @@ def normalize(namex):
|
||||
# contents and end by repacking them. It might be better to apply them to
|
||||
# the unpacked contents.
|
||||
|
||||
class Deleter:
|
||||
class Deleter(object):
|
||||
def __init__(self, node, namex, must_exist=True, must_be_directory=False, must_be_file=False):
|
||||
self.node = node
|
||||
self.name = normalize(namex)
|
||||
@ -139,7 +139,7 @@ class Deleter:
|
||||
return new_contents
|
||||
|
||||
|
||||
class MetadataSetter:
|
||||
class MetadataSetter(object):
|
||||
def __init__(self, node, namex, metadata, create_readonly_node=None):
|
||||
self.node = node
|
||||
self.name = normalize(namex)
|
||||
@ -164,7 +164,7 @@ class MetadataSetter:
|
||||
return new_contents
|
||||
|
||||
|
||||
class Adder:
|
||||
class Adder(object):
|
||||
def __init__(self, node, entries=None, overwrite=True, create_readonly_node=None):
|
||||
self.node = node
|
||||
if entries is None:
|
||||
@ -861,7 +861,7 @@ class ManifestWalker(DeepStats):
|
||||
}
|
||||
|
||||
|
||||
class DeepChecker:
|
||||
class DeepChecker(object):
|
||||
def __init__(self, root, verify, repair, add_lease):
|
||||
root_si = root.get_storage_index()
|
||||
if root_si:
|
||||
|
@ -16,7 +16,7 @@ class NeedRootcapLookupScheme(Exception):
|
||||
mechanism to translate name+passwd pairs into a rootcap, either a file of
|
||||
name/passwd/rootcap tuples, or a server to do the translation."""
|
||||
|
||||
class FTPAvatarID:
|
||||
class FTPAvatarID(object):
|
||||
def __init__(self, username, rootcap):
|
||||
self.username = username
|
||||
self.rootcap = rootcap
|
||||
|
@ -951,7 +951,7 @@ class GeneralSFTPFile(PrefixingLogMixin):
|
||||
return d
|
||||
|
||||
|
||||
class StoppableList:
|
||||
class StoppableList(object):
|
||||
def __init__(self, items):
|
||||
self.items = items
|
||||
def __iter__(self):
|
||||
@ -961,7 +961,7 @@ class StoppableList:
|
||||
pass
|
||||
|
||||
|
||||
class Reason:
|
||||
class Reason(object):
|
||||
def __init__(self, value):
|
||||
self.value = value
|
||||
|
||||
|
@ -68,7 +68,7 @@ def roundup_pow2(x):
|
||||
return ans
|
||||
|
||||
|
||||
class CompleteBinaryTreeMixin:
|
||||
class CompleteBinaryTreeMixin(object):
|
||||
"""
|
||||
Adds convenience methods to a complete binary tree.
|
||||
|
||||
|
@ -1,7 +1,7 @@
|
||||
|
||||
import weakref
|
||||
|
||||
class History:
|
||||
class History(object):
|
||||
"""Keep track of recent operations, for a status display."""
|
||||
|
||||
name = "history"
|
||||
|
@ -7,7 +7,7 @@ from allmydata.util.dictutil import DictOfSets
|
||||
from common import OVERDUE, COMPLETE, CORRUPT, DEAD, BADSEGNUM, \
|
||||
BadSegmentNumberError
|
||||
|
||||
class SegmentFetcher:
|
||||
class SegmentFetcher(object):
|
||||
"""I am responsible for acquiring blocks for a single segment. I will use
|
||||
the Share instances passed to my add_shares() method to locate, retrieve,
|
||||
and validate those blocks. I expect my parent node to call my
|
||||
|
@ -20,11 +20,11 @@ def incidentally(res, f, *args, **kwargs):
|
||||
f(*args, **kwargs)
|
||||
return res
|
||||
|
||||
class RequestToken:
|
||||
class RequestToken(object):
|
||||
def __init__(self, server):
|
||||
self.server = server
|
||||
|
||||
class ShareFinder:
|
||||
class ShareFinder(object):
|
||||
OVERDUE_TIMEOUT = 10.0
|
||||
|
||||
def __init__(self, storage_broker, verifycap, node, download_status,
|
||||
|
@ -23,16 +23,18 @@ class IDownloadStatusHandlingConsumer(Interface):
|
||||
"""Record the DownloadStatus 'read event', to be updated with the
|
||||
time it takes to decrypt each chunk of data."""
|
||||
|
||||
class Cancel:
|
||||
class Cancel(object):
|
||||
def __init__(self, f):
|
||||
self._f = f
|
||||
self.active = True
|
||||
|
||||
def cancel(self):
|
||||
if self.active:
|
||||
self.active = False
|
||||
self._f(self)
|
||||
|
||||
class DownloadNode:
|
||||
|
||||
class DownloadNode(object):
|
||||
"""Internal class which manages downloads and holds state. External
|
||||
callers use CiphertextFileNode instead."""
|
||||
|
||||
|
@ -24,7 +24,7 @@ class DataUnavailable(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class Share:
|
||||
class Share(object):
|
||||
"""I represent a single instance of a single share (e.g. I reference the
|
||||
shnum2 for share SI=abcde on server xy12t, not the one on server ab45q).
|
||||
I am associated with a CommonShare that remembers data that is held in
|
||||
@ -825,7 +825,7 @@ class Share:
|
||||
o.notify(state=DEAD, f=f)
|
||||
|
||||
|
||||
class CommonShare:
|
||||
class CommonShare(object):
|
||||
# TODO: defer creation of the hashtree until somebody uses us. There will
|
||||
# be a lot of unused shares, and we shouldn't spend the memory on a large
|
||||
# hashtree unless necessary.
|
||||
|
@ -3,25 +3,32 @@ import itertools
|
||||
from zope.interface import implementer
|
||||
from allmydata.interfaces import IDownloadStatus
|
||||
|
||||
class ReadEvent:
|
||||
class ReadEvent(object):
|
||||
|
||||
def __init__(self, ev, ds):
|
||||
self._ev = ev
|
||||
self._ds = ds
|
||||
|
||||
def update(self, bytes, decrypttime, pausetime):
|
||||
self._ev["bytes_returned"] += bytes
|
||||
self._ev["decrypt_time"] += decrypttime
|
||||
self._ev["paused_time"] += pausetime
|
||||
|
||||
def finished(self, finishtime):
|
||||
self._ev["finish_time"] = finishtime
|
||||
self._ds.update_last_timestamp(finishtime)
|
||||
|
||||
class SegmentEvent:
|
||||
|
||||
class SegmentEvent(object):
|
||||
|
||||
def __init__(self, ev, ds):
|
||||
self._ev = ev
|
||||
self._ds = ds
|
||||
|
||||
def activate(self, when):
|
||||
if self._ev["active_time"] is None:
|
||||
self._ev["active_time"] = when
|
||||
|
||||
def deliver(self, when, start, length, decodetime):
|
||||
assert self._ev["active_time"] is not None
|
||||
self._ev["finish_time"] = when
|
||||
@ -30,34 +37,43 @@ class SegmentEvent:
|
||||
self._ev["segment_start"] = start
|
||||
self._ev["segment_length"] = length
|
||||
self._ds.update_last_timestamp(when)
|
||||
|
||||
def error(self, when):
|
||||
self._ev["finish_time"] = when
|
||||
self._ev["success"] = False
|
||||
self._ds.update_last_timestamp(when)
|
||||
|
||||
class DYHBEvent:
|
||||
|
||||
class DYHBEvent(object):
|
||||
|
||||
def __init__(self, ev, ds):
|
||||
self._ev = ev
|
||||
self._ds = ds
|
||||
|
||||
def error(self, when):
|
||||
self._ev["finish_time"] = when
|
||||
self._ev["success"] = False
|
||||
self._ds.update_last_timestamp(when)
|
||||
|
||||
def finished(self, shnums, when):
|
||||
self._ev["finish_time"] = when
|
||||
self._ev["success"] = True
|
||||
self._ev["response_shnums"] = shnums
|
||||
self._ds.update_last_timestamp(when)
|
||||
|
||||
class BlockRequestEvent:
|
||||
|
||||
class BlockRequestEvent(object):
|
||||
|
||||
def __init__(self, ev, ds):
|
||||
self._ev = ev
|
||||
self._ds = ds
|
||||
|
||||
def finished(self, received, when):
|
||||
self._ev["finish_time"] = when
|
||||
self._ev["success"] = True
|
||||
self._ev["response_length"] = received
|
||||
self._ds.update_last_timestamp(when)
|
||||
|
||||
def error(self, when):
|
||||
self._ev["finish_time"] = when
|
||||
self._ev["success"] = False
|
||||
|
@ -21,7 +21,7 @@ from allmydata.immutable.downloader.node import DownloadNode, \
|
||||
IDownloadStatusHandlingConsumer
|
||||
from allmydata.immutable.downloader.status import DownloadStatus
|
||||
|
||||
class CiphertextFileNode:
|
||||
class CiphertextFileNode(object):
|
||||
def __init__(self, verifycap, storage_broker, secret_holder,
|
||||
terminator, history):
|
||||
assert isinstance(verifycap, uri.CHKFileVerifierURI)
|
||||
|
@ -16,7 +16,7 @@ class NotEnoughWritersError(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class CHKCheckerAndUEBFetcher:
|
||||
class CHKCheckerAndUEBFetcher(object):
|
||||
"""I check to see if a file is already present in the grid. I also fetch
|
||||
the URI Extension Block, which is useful for an uploading client who
|
||||
wants to avoid the work of encryption and encoding.
|
||||
@ -244,7 +244,7 @@ class CHKUploadHelper(Referenceable, upload.CHKUploader):
|
||||
self._helper.upload_finished(self._storage_index, 0)
|
||||
del self._reader
|
||||
|
||||
class AskUntilSuccessMixin:
|
||||
class AskUntilSuccessMixin(object):
|
||||
# create me with a _reader array
|
||||
_last_failure = None
|
||||
|
||||
|
@ -1355,7 +1355,7 @@ def read_this_many_bytes(uploadable, size, prepend_data=[]):
|
||||
d.addCallback(_got)
|
||||
return d
|
||||
|
||||
class LiteralUploader:
|
||||
class LiteralUploader(object):
|
||||
|
||||
def __init__(self, progress=None):
|
||||
self._status = s = UploadStatus()
|
||||
@ -1477,7 +1477,7 @@ class RemoteEncryptedUploadable(Referenceable):
|
||||
return self._eu.close()
|
||||
|
||||
|
||||
class AssistedUploader:
|
||||
class AssistedUploader(object):
|
||||
|
||||
def __init__(self, helper, storage_broker):
|
||||
self._helper = helper
|
||||
@ -1632,7 +1632,7 @@ class AssistedUploader:
|
||||
def get_upload_status(self):
|
||||
return self._upload_status
|
||||
|
||||
class BaseUploadable:
|
||||
class BaseUploadable(object):
|
||||
# this is overridden by max_segment_size
|
||||
default_max_segment_size = DEFAULT_MAX_SEGMENT_SIZE
|
||||
default_params_set = False
|
||||
|
@ -41,7 +41,7 @@ def unsign_from_foolscap(ann_t):
|
||||
ann = json.loads(msg.decode("utf-8"))
|
||||
return (ann, key_vs)
|
||||
|
||||
class SubscriberDescriptor:
|
||||
class SubscriberDescriptor(object):
|
||||
"""This describes a subscriber, for status display purposes. It contains
|
||||
the following attributes:
|
||||
|
||||
@ -65,7 +65,7 @@ class SubscriberDescriptor:
|
||||
self.remote_address = remote_address
|
||||
self.tubid = tubid
|
||||
|
||||
class AnnouncementDescriptor:
|
||||
class AnnouncementDescriptor(object):
|
||||
"""This describes an announcement, for status display purposes. It
|
||||
contains the following attributes, which will be empty ("" for
|
||||
strings) if the client did not provide them:
|
||||
|
@ -8,7 +8,7 @@ from allmydata.mutable.common import MODE_CHECK, MODE_WRITE, CorruptShareError
|
||||
from allmydata.mutable.servermap import ServerMap, ServermapUpdater
|
||||
from allmydata.mutable.retrieve import Retrieve # for verifying
|
||||
|
||||
class MutableChecker:
|
||||
class MutableChecker(object):
|
||||
SERVERMAP_MODE = MODE_CHECK
|
||||
|
||||
def __init__(self, node, storage_broker, history, monitor):
|
||||
|
@ -24,7 +24,7 @@ from allmydata.mutable.checker import MutableChecker, MutableCheckAndRepairer
|
||||
from allmydata.mutable.repairer import Repairer
|
||||
|
||||
|
||||
class BackoffAgent:
|
||||
class BackoffAgent(object):
|
||||
# these parameters are copied from foolscap.reconnector, which gets them
|
||||
# from twisted.internet.protocol.ReconnectingClientFactory
|
||||
initialDelay = 1.0
|
||||
|
@ -1180,7 +1180,7 @@ def _handle_bad_struct(f):
|
||||
f.trap(struct.error)
|
||||
raise BadShareError(f.value.args[0])
|
||||
|
||||
class MDMFSlotReadProxy:
|
||||
class MDMFSlotReadProxy(object):
|
||||
"""
|
||||
I read from a mutable slot filled with data written in the MDMF data
|
||||
format (which is described above).
|
||||
|
@ -100,7 +100,7 @@ class PublishStatus(object):
|
||||
class LoopLimitExceededError(Exception):
|
||||
pass
|
||||
|
||||
class Publish:
|
||||
class Publish(object):
|
||||
"""I represent a single act of publishing the mutable file to the grid. I
|
||||
will only publish my data if the servermap I am using still represents
|
||||
the current state of the world.
|
||||
|
@ -24,7 +24,7 @@ class RepairRequiresWritecapError(Exception):
|
||||
class MustForceRepairError(Exception):
|
||||
pass
|
||||
|
||||
class Repairer:
|
||||
class Repairer(object):
|
||||
def __init__(self, node, check_results, storage_broker, history, monitor):
|
||||
self.node = node
|
||||
self.check_results = ICheckResults(check_results)
|
||||
|
@ -89,7 +89,7 @@ class RetrieveStatus(object):
|
||||
serverid = server.get_serverid()
|
||||
self._problems[serverid] = f
|
||||
|
||||
class Marker:
|
||||
class Marker(object):
|
||||
pass
|
||||
|
||||
@implementer(IPushProducer)
|
||||
|
@ -80,7 +80,7 @@ class UpdateStatus(object):
|
||||
def set_finished(self, when):
|
||||
self.finished = when
|
||||
|
||||
class ServerMap:
|
||||
class ServerMap(object):
|
||||
"""I record the placement of mutable shares.
|
||||
|
||||
This object records which shares (of various versions) are located on
|
||||
@ -378,7 +378,7 @@ class ServerMap:
|
||||
self.update_data.setdefault(shnum , []).append((verinfo, data))
|
||||
|
||||
|
||||
class ServermapUpdater:
|
||||
class ServermapUpdater(object):
|
||||
def __init__(self, filenode, storage_broker, monitor, servermap,
|
||||
mode=MODE_READ, add_lease=False, update_range=None):
|
||||
"""I update a servermap, locating a sufficient number of useful
|
||||
|
@ -78,7 +78,7 @@ def get_backupdb(dbfile, stderr=sys.stderr,
|
||||
return None
|
||||
|
||||
|
||||
class FileResult:
|
||||
class FileResult(object):
|
||||
def __init__(self, bdb, filecap, should_check,
|
||||
path, mtime, ctime, size):
|
||||
self.bdb = bdb
|
||||
@ -106,7 +106,7 @@ class FileResult:
|
||||
self.bdb.did_check_file_healthy(self.filecap, results)
|
||||
|
||||
|
||||
class DirectoryResult:
|
||||
class DirectoryResult(object):
|
||||
def __init__(self, bdb, dirhash, dircap, should_check):
|
||||
self.bdb = bdb
|
||||
self.dircap = dircap
|
||||
@ -128,7 +128,7 @@ class DirectoryResult:
|
||||
self.bdb.did_check_directory_healthy(self.dircap, results)
|
||||
|
||||
|
||||
class BackupDB_v2:
|
||||
class BackupDB_v2(object):
|
||||
VERSION = 2
|
||||
NO_CHECK_BEFORE = 1*MONTH
|
||||
ALWAYS_CHECK_AFTER = 2*MONTH
|
||||
|
@ -151,7 +151,7 @@ def get_aliases(nodedir):
|
||||
pass
|
||||
return aliases
|
||||
|
||||
class DefaultAliasMarker:
|
||||
class DefaultAliasMarker(object):
|
||||
pass
|
||||
|
||||
pretend_platform_uses_lettercolon = False # for tests
|
||||
|
@ -9,7 +9,7 @@ from allmydata.util.encodingutil import quote_output, is_printable_ascii
|
||||
import urllib
|
||||
import json
|
||||
|
||||
class SlowOperationRunner:
|
||||
class SlowOperationRunner(object):
|
||||
|
||||
def run(self, options):
|
||||
stderr = options.stderr
|
||||
|
@ -8,7 +8,7 @@ from allmydata.scripts.common import get_alias, DEFAULT_ALIAS, escape_path, \
|
||||
from allmydata.scripts.common_http import do_http, format_http_error
|
||||
from allmydata.util.encodingutil import quote_output, quote_path
|
||||
|
||||
class Checker:
|
||||
class Checker(object):
|
||||
pass
|
||||
|
||||
def _quote_serverid_index_share(serverid, storage_index, sharenum):
|
||||
@ -111,7 +111,7 @@ def check(options):
|
||||
return errno
|
||||
return 0
|
||||
|
||||
class FakeTransport:
|
||||
class FakeTransport(object):
|
||||
disconnecting = False
|
||||
|
||||
class DeepCheckOutput(LineOnlyReceiver):
|
||||
|
@ -70,7 +70,7 @@ def make_tahoe_subdirectory(nodeurl, parent_writecap, name):
|
||||
raise HTTPError("Error during mkdir", resp)
|
||||
|
||||
|
||||
class LocalFileSource:
|
||||
class LocalFileSource(object):
|
||||
def __init__(self, pathname, basename):
|
||||
precondition_abspath(pathname)
|
||||
self.pathname = pathname
|
||||
@ -85,7 +85,7 @@ class LocalFileSource:
|
||||
def open(self, caps_only):
|
||||
return open(self.pathname, "rb")
|
||||
|
||||
class LocalFileTarget:
|
||||
class LocalFileTarget(object):
|
||||
def __init__(self, pathname):
|
||||
precondition_abspath(pathname)
|
||||
self.pathname = pathname
|
||||
@ -93,7 +93,7 @@ class LocalFileTarget:
|
||||
def put_file(self, inf):
|
||||
fileutil.put_file(self.pathname, inf)
|
||||
|
||||
class LocalMissingTarget:
|
||||
class LocalMissingTarget(object):
|
||||
def __init__(self, pathname):
|
||||
precondition_abspath(pathname)
|
||||
self.pathname = pathname
|
||||
@ -101,7 +101,7 @@ class LocalMissingTarget:
|
||||
def put_file(self, inf):
|
||||
fileutil.put_file(self.pathname, inf)
|
||||
|
||||
class LocalDirectorySource:
|
||||
class LocalDirectorySource(object):
|
||||
def __init__(self, progressfunc, pathname, basename):
|
||||
precondition_abspath(pathname)
|
||||
|
||||
@ -133,7 +133,7 @@ class LocalDirectorySource:
|
||||
# TODO: output a warning
|
||||
pass
|
||||
|
||||
class LocalDirectoryTarget:
|
||||
class LocalDirectoryTarget(object):
|
||||
def __init__(self, progressfunc, pathname):
|
||||
precondition_abspath(pathname)
|
||||
|
||||
@ -180,7 +180,7 @@ class LocalDirectoryTarget:
|
||||
pass
|
||||
|
||||
|
||||
class TahoeFileSource:
|
||||
class TahoeFileSource(object):
|
||||
def __init__(self, nodeurl, mutable, writecap, readcap, basename):
|
||||
self.nodeurl = nodeurl
|
||||
self.mutable = mutable
|
||||
@ -205,7 +205,7 @@ class TahoeFileSource:
|
||||
def bestcap(self):
|
||||
return self.writecap or self.readcap
|
||||
|
||||
class TahoeFileTarget:
|
||||
class TahoeFileTarget(object):
|
||||
def __init__(self, nodeurl, mutable, writecap, readcap, url):
|
||||
self.nodeurl = nodeurl
|
||||
self.mutable = mutable
|
||||
@ -225,7 +225,7 @@ class TahoeFileTarget:
|
||||
# to always create mutable files, or to copy mutable files into new
|
||||
# mutable files. ticket #835
|
||||
|
||||
class TahoeDirectorySource:
|
||||
class TahoeDirectorySource(object):
|
||||
def __init__(self, nodeurl, cache, progressfunc, basename):
|
||||
self.nodeurl = nodeurl
|
||||
self.cache = cache
|
||||
@ -298,7 +298,7 @@ class TahoeDirectorySource:
|
||||
"You probably need to use a later version of "
|
||||
"Tahoe-LAFS to copy this directory.")
|
||||
|
||||
class TahoeMissingTarget:
|
||||
class TahoeMissingTarget(object):
|
||||
def __init__(self, url):
|
||||
self.url = url
|
||||
|
||||
@ -315,7 +315,7 @@ class TahoeMissingTarget:
|
||||
# I'm not sure this will always work
|
||||
return PUT(self.url + "?t=uri", filecap)
|
||||
|
||||
class TahoeDirectoryTarget:
|
||||
class TahoeDirectoryTarget(object):
|
||||
def __init__(self, nodeurl, cache, progressfunc):
|
||||
self.nodeurl = nodeurl
|
||||
self.cache = cache
|
||||
@ -459,7 +459,7 @@ FileTargets = (LocalFileTarget, TahoeFileTarget)
|
||||
DirectoryTargets = (LocalDirectoryTarget, TahoeDirectoryTarget)
|
||||
MissingTargets = (LocalMissingTarget, TahoeMissingTarget)
|
||||
|
||||
class Copier:
|
||||
class Copier(object):
|
||||
|
||||
def do_copy(self, options, progressfunc=None):
|
||||
if options['quiet']:
|
||||
|
@ -9,7 +9,7 @@ from allmydata.scripts.common import get_alias, DEFAULT_ALIAS, escape_path, \
|
||||
from allmydata.scripts.common_http import do_http, format_http_error
|
||||
from allmydata.util.encodingutil import quote_output, quote_path
|
||||
|
||||
class FakeTransport:
|
||||
class FakeTransport(object):
|
||||
disconnecting = False
|
||||
|
||||
class ManifestStreamer(LineOnlyReceiver):
|
||||
|
@ -36,7 +36,7 @@ from allmydata.storage.common import UnknownImmutableContainerVersionError, \
|
||||
# then the value stored in this field will be the actual share data length
|
||||
# modulo 2**32.
|
||||
|
||||
class ShareFile:
|
||||
class ShareFile(object):
|
||||
LEASE_SIZE = struct.calcsize(">L32s32sL")
|
||||
sharetype = "immutable"
|
||||
|
||||
|
@ -1,6 +1,6 @@
|
||||
import struct, time
|
||||
|
||||
class LeaseInfo:
|
||||
class LeaseInfo(object):
|
||||
def __init__(self, owner_num=None, renew_secret=None, cancel_secret=None,
|
||||
expiration_time=None, nodeid=None):
|
||||
self.owner_num = owner_num
|
||||
@ -14,9 +14,11 @@ class LeaseInfo:
|
||||
|
||||
def get_expiration_time(self):
|
||||
return self.expiration_time
|
||||
|
||||
def get_grant_renew_time_time(self):
|
||||
# hack, based upon fixed 31day expiration period
|
||||
return self.expiration_time - 31*24*60*60
|
||||
|
||||
def get_age(self):
|
||||
return time.time() - self.get_grant_renew_time_time()
|
||||
|
||||
@ -27,6 +29,7 @@ class LeaseInfo:
|
||||
self.expiration_time) = struct.unpack(">L32s32sL", data)
|
||||
self.nodeid = None
|
||||
return self
|
||||
|
||||
def to_immutable_data(self):
|
||||
return struct.pack(">L32s32sL",
|
||||
self.owner_num,
|
||||
@ -39,6 +42,7 @@ class LeaseInfo:
|
||||
int(self.expiration_time),
|
||||
self.renew_secret, self.cancel_secret,
|
||||
self.nodeid)
|
||||
|
||||
def from_mutable_data(self, data):
|
||||
(self.owner_num,
|
||||
self.expiration_time,
|
||||
|
@ -35,7 +35,7 @@ from allmydata.mutable.layout import MAX_MUTABLE_SHARE_SIZE
|
||||
assert struct.calcsize(">L") == 4, struct.calcsize(">L")
|
||||
assert struct.calcsize(">Q") == 8, struct.calcsize(">Q")
|
||||
|
||||
class MutableShareFile:
|
||||
class MutableShareFile(object):
|
||||
|
||||
sharetype = "mutable"
|
||||
DATA_LENGTH_OFFSET = struct.calcsize(">32s20s32s")
|
||||
@ -443,7 +443,7 @@ def testv_compare(a, op, b):
|
||||
return a > b
|
||||
# never reached
|
||||
|
||||
class EmptyShare:
|
||||
class EmptyShare(object):
|
||||
|
||||
def check_testv(self, testv):
|
||||
test_good = True
|
||||
|
@ -29,11 +29,11 @@ class ContainerNode(object):
|
||||
def is_mutable(self):
|
||||
return True
|
||||
|
||||
class FakeNode:
|
||||
class FakeNode(object):
|
||||
def raise_error(self):
|
||||
return None
|
||||
|
||||
class FakeNodeMaker:
|
||||
class FakeNodeMaker(object):
|
||||
def create_from_cap(self, writecap, readcap=None, deep_immutable=False, name=''):
|
||||
return FakeNode()
|
||||
|
||||
|
@ -72,7 +72,7 @@ class GridTesterOptions(usage.Options):
|
||||
class CommandFailed(Exception):
|
||||
pass
|
||||
|
||||
class GridTester:
|
||||
class GridTester(object):
|
||||
def __init__(self, config):
|
||||
self.config = config
|
||||
self.tahoe = config.tahoe
|
||||
|
@ -8,7 +8,7 @@ from foolscap.api import Tub, fireEventually
|
||||
|
||||
MB = 1000000
|
||||
|
||||
class SpeedTest:
|
||||
class SpeedTest(object):
|
||||
DO_IMMUTABLE = True
|
||||
DO_MUTABLE_CREATE = True
|
||||
DO_MUTABLE = True
|
||||
|
@ -110,7 +110,7 @@ class Mv(GridTestMixin, CLITestMixin, unittest.TestCase):
|
||||
original_do_http = tahoe_mv.do_http
|
||||
def mock_do_http(method, url, body=""):
|
||||
if method == "DELETE":
|
||||
class FakeResponse:
|
||||
class FakeResponse(object):
|
||||
def read(self):
|
||||
return "response"
|
||||
resp = FakeResponse()
|
||||
|
@ -201,7 +201,7 @@ class DummyProducer(object):
|
||||
pass
|
||||
|
||||
@implementer(IImmutableFileNode)
|
||||
class FakeCHKFileNode:
|
||||
class FakeCHKFileNode(object):
|
||||
"""I provide IImmutableFileNode, but all of my data is stored in a
|
||||
class-level dictionary."""
|
||||
|
||||
@ -339,7 +339,7 @@ def create_chk_filenode(contents, all_contents):
|
||||
|
||||
|
||||
@implementer(IMutableFileNode, ICheckable)
|
||||
class FakeMutableFileNode:
|
||||
class FakeMutableFileNode(object):
|
||||
"""I provide IMutableFileNode, but all of my data is stored in a
|
||||
class-level dictionary."""
|
||||
|
||||
@ -597,7 +597,7 @@ class LoggingServiceParent(service.MultiService):
|
||||
|
||||
TEST_DATA="\x02"*(Uploader.URI_LIT_SIZE_THRESHOLD+1)
|
||||
|
||||
class ShouldFailMixin:
|
||||
class ShouldFailMixin(object):
|
||||
def shouldFail(self, expected_failure, which, substring,
|
||||
callable, *args, **kwargs):
|
||||
"""Assert that a function call raises some exception. This is a
|
||||
@ -638,7 +638,7 @@ class ShouldFailMixin:
|
||||
d.addBoth(done)
|
||||
return d
|
||||
|
||||
class WebErrorMixin:
|
||||
class WebErrorMixin(object):
|
||||
def explain_web_error(self, f):
|
||||
# an error on the server side causes the client-side getPage() to
|
||||
# return a failure(t.web.error.Error), and its str() doesn't show the
|
||||
|
@ -83,13 +83,13 @@ def flip_one_bit(s, offset=0, size=None):
|
||||
return result
|
||||
|
||||
|
||||
class ReallyEqualMixin:
|
||||
class ReallyEqualMixin(object):
|
||||
def failUnlessReallyEqual(self, a, b, msg=None):
|
||||
self.assertEqual(a, b, msg)
|
||||
self.assertEqual(type(a), type(b), "a :: %r, b :: %r, %r" % (a, b, msg))
|
||||
|
||||
|
||||
class NonASCIIPathMixin:
|
||||
class NonASCIIPathMixin(object):
|
||||
def mkdir_nonascii(self, dirpath):
|
||||
# Kludge to work around the fact that buildbot can't remove a directory tree that has
|
||||
# any non-ASCII directory names on Windows. (#1472)
|
||||
@ -143,13 +143,13 @@ class SignalMixin(object):
|
||||
signal.signal(signal.SIGCHLD, self.sigchldHandler)
|
||||
return super(SignalMixin, self).tearDown()
|
||||
|
||||
class StallMixin:
|
||||
class StallMixin(object):
|
||||
def stall(self, res=None, delay=1):
|
||||
d = defer.Deferred()
|
||||
reactor.callLater(delay, d.callback, res)
|
||||
return d
|
||||
|
||||
class ShouldFailMixin:
|
||||
class ShouldFailMixin(object):
|
||||
|
||||
def shouldFail(self, expected_failure, which, substring,
|
||||
callable, *args, **kwargs):
|
||||
|
@ -6,7 +6,7 @@ from twisted.web.error import Error
|
||||
from nevow.testutil import FakeRequest
|
||||
from nevow import inevow, context
|
||||
|
||||
class WebRenderingMixin:
|
||||
class WebRenderingMixin(object):
|
||||
# d=page.renderString() or s=page.renderSynchronously() will exercise
|
||||
# docFactory, render_*/data_* . It won't exercise want_json(), or my
|
||||
# renderHTTP() override which tests want_json(). To exercise args=, we
|
||||
|
@ -52,7 +52,7 @@ def eliot_logged_test(f):
|
||||
"""
|
||||
# A convenient, mutable container into which nested functions can write
|
||||
# state to be shared among them.
|
||||
class storage:
|
||||
class storage(object):
|
||||
pass
|
||||
|
||||
@wraps(f)
|
||||
|
@ -19,14 +19,14 @@ from ..no_network import GridTestMixin
|
||||
from .. import common_util as testutil
|
||||
from ..common_util import DevNullDictionary
|
||||
|
||||
class SameKeyGenerator:
|
||||
class SameKeyGenerator(object):
|
||||
def __init__(self, pubkey, privkey):
|
||||
self.pubkey = pubkey
|
||||
self.privkey = privkey
|
||||
def generate(self, keysize=None):
|
||||
return defer.succeed( (self.pubkey, self.privkey) )
|
||||
|
||||
class FirstServerGetsKilled:
|
||||
class FirstServerGetsKilled(object):
|
||||
done = False
|
||||
def notify(self, retval, wrapper, methname):
|
||||
if not self.done:
|
||||
@ -34,7 +34,7 @@ class FirstServerGetsKilled:
|
||||
self.done = True
|
||||
return retval
|
||||
|
||||
class FirstServerGetsDeleted:
|
||||
class FirstServerGetsDeleted(object):
|
||||
def __init__(self):
|
||||
self.done = False
|
||||
self.silenced = None
|
||||
|
@ -21,7 +21,7 @@ def eventuaaaaaly(res=None):
|
||||
# network connections, both to speed up the tests and to reduce the amount of
|
||||
# non-mutable.py code being exercised.
|
||||
|
||||
class FakeStorage:
|
||||
class FakeStorage(object):
|
||||
# this class replaces the collection of storage servers, allowing the
|
||||
# tests to examine and manipulate the published shares. It also lets us
|
||||
# control the order in which read queries are answered, to exercise more
|
||||
@ -78,11 +78,13 @@ class FakeStorage:
|
||||
shares[shnum] = f.getvalue()
|
||||
|
||||
|
||||
class FakeStorageServer:
|
||||
class FakeStorageServer(object):
|
||||
|
||||
def __init__(self, peerid, storage):
|
||||
self.peerid = peerid
|
||||
self.storage = storage
|
||||
self.queries = 0
|
||||
|
||||
def callRemote(self, methname, *args, **kwargs):
|
||||
self.queries += 1
|
||||
def _call():
|
||||
@ -221,7 +223,7 @@ def make_nodemaker(s=None, num_peers=10, keysize=TEST_RSA_KEY_SIZE):
|
||||
{"k": 3, "n": 10}, SDMF_VERSION, keygen)
|
||||
return nodemaker
|
||||
|
||||
class PublishMixin:
|
||||
class PublishMixin(object):
|
||||
def publish_one(self):
|
||||
# publish a file and create shares, which can then be manipulated
|
||||
# later.
|
||||
@ -331,7 +333,7 @@ class PublishMixin:
|
||||
index = versionmap[shnum]
|
||||
shares[peerid][shnum] = oldshares[index][peerid][shnum]
|
||||
|
||||
class CheckerMixin:
|
||||
class CheckerMixin(object):
|
||||
def check_good(self, r, where):
|
||||
self.failUnless(r.is_healthy(), where)
|
||||
return r
|
||||
|
@ -41,10 +41,10 @@ from .common import (
|
||||
class IntentionalError(Exception):
|
||||
pass
|
||||
|
||||
class Marker:
|
||||
class Marker(object):
|
||||
pass
|
||||
|
||||
class LocalWrapper:
|
||||
class LocalWrapper(object):
|
||||
def __init__(self, original):
|
||||
self.original = original
|
||||
self.broken = False
|
||||
@ -250,7 +250,7 @@ class _NoNetworkClient(_Client):
|
||||
pass
|
||||
#._servers will be set by the NoNetworkGrid which creates us
|
||||
|
||||
class SimpleStats:
|
||||
class SimpleStats(object):
|
||||
def __init__(self):
|
||||
self.counters = {}
|
||||
self.stats_producers = []
|
||||
|
@ -15,7 +15,7 @@ from allmydata.immutable.upload import Data
|
||||
from allmydata.test.common_web import WebRenderingMixin
|
||||
from allmydata.mutable.publish import MutableData
|
||||
|
||||
class FakeClient:
|
||||
class FakeClient(object):
|
||||
def get_storage_broker(self):
|
||||
return self.storage_broker
|
||||
|
||||
|
@ -1380,7 +1380,7 @@ class Dirnode(GridTestMixin, unittest.TestCase,
|
||||
self.set_up_grid(oneshare=True)
|
||||
return self._do_initial_children_test(mdmf=True)
|
||||
|
||||
class MinimalFakeMutableFile:
|
||||
class MinimalFakeMutableFile(object):
|
||||
def get_writekey(self):
|
||||
return "writekey"
|
||||
|
||||
|
@ -1287,11 +1287,12 @@ def make_servers(clientids):
|
||||
servers[clientid] = make_server(clientid)
|
||||
return servers
|
||||
|
||||
class MyShare:
|
||||
class MyShare(object):
|
||||
def __init__(self, shnum, server, rtt):
|
||||
self._shnum = shnum
|
||||
self._server = server
|
||||
self._dyhb_rtt = rtt
|
||||
|
||||
def __repr__(self):
|
||||
return "sh%d-on-%s" % (self._shnum, self._server.get_name())
|
||||
|
||||
@ -1302,21 +1303,26 @@ class MySegmentFetcher(SegmentFetcher):
|
||||
def _start_share(self, share, shnum):
|
||||
self._test_start_shares.append(share)
|
||||
|
||||
class FakeNode:
|
||||
class FakeNode(object):
|
||||
def __init__(self):
|
||||
self.want_more = 0
|
||||
self.failed = None
|
||||
self.processed = None
|
||||
self._si_prefix = "si_prefix"
|
||||
|
||||
def want_more_shares(self):
|
||||
self.want_more += 1
|
||||
|
||||
def fetch_failed(self, fetcher, f):
|
||||
self.failed = f
|
||||
|
||||
def process_blocks(self, segnum, blocks):
|
||||
self.processed = (segnum, blocks)
|
||||
|
||||
def get_num_segments(self):
|
||||
return 1, True
|
||||
|
||||
|
||||
class Selection(unittest.TestCase):
|
||||
def test_no_shares(self):
|
||||
node = FakeNode()
|
||||
|
@ -8,10 +8,10 @@ from allmydata.mutable.filenode import MutableFileNode
|
||||
from allmydata.util import hashutil
|
||||
from allmydata.util.consumer import download_to_data
|
||||
|
||||
class NotANode:
|
||||
class NotANode(object):
|
||||
pass
|
||||
|
||||
class FakeClient:
|
||||
class FakeClient(object):
|
||||
# just enough to let the node acquire a downloader (which it won't use),
|
||||
# and to get default encoding parameters
|
||||
def getServiceNamed(self, name):
|
||||
|
@ -716,7 +716,7 @@ class SystemTest(SystemTestMixin, AsyncTestCase):
|
||||
os.makedirs(self.basedir)
|
||||
return self.do_system_test()
|
||||
|
||||
class FakeRemoteReference:
|
||||
class FakeRemoteReference(object):
|
||||
def notifyOnDisconnect(self, *args, **kwargs): pass
|
||||
def getRemoteTubID(self): return "62ubehyunnyhzs7r6vdonnm2hpi52w6y"
|
||||
def getLocationHints(self): return ["tcp:here.example.com:1234",
|
||||
|
@ -86,7 +86,7 @@ WINDOWS_TEST_ADDRESSES = set(["127.0.0.1", "10.0.2.15", "192.168.0.10"])
|
||||
CYGWIN_TEST_ADDRESSES = set(["127.0.0.1", "192.168.0.10"])
|
||||
|
||||
|
||||
class FakeProcess:
|
||||
class FakeProcess(object):
|
||||
def __init__(self, output, err):
|
||||
self.output = output
|
||||
self.err = err
|
||||
|
@ -520,7 +520,7 @@ ENABLE_HELPER = """
|
||||
enabled = true
|
||||
"""
|
||||
|
||||
class FakeTub:
|
||||
class FakeTub(object):
|
||||
def __init__(self):
|
||||
self.tubID = base64.b32encode("foo")
|
||||
self.listening_ports = []
|
||||
|
31
src/allmydata/test/test_python2_regressions.py
Normal file
31
src/allmydata/test/test_python2_regressions.py
Normal file
@ -0,0 +1,31 @@
|
||||
"""
|
||||
Tests to check for Python2 regressions
|
||||
"""
|
||||
from twisted.trial import unittest
|
||||
from twisted.python.modules import getModule
|
||||
|
||||
class PythonTwoRegressions(unittest.TestCase):
|
||||
"""
|
||||
A test class to hold Python2 regression tests.
|
||||
"""
|
||||
|
||||
def is_new_style(self, cls):
|
||||
"""check for being a new-style class"""
|
||||
# another test could be: issubclass(value, type)
|
||||
has_class_attr = hasattr(cls, '__class__')
|
||||
dict_or_slots = '__dict__' in dir(cls) or hasattr(cls, '__slots__')
|
||||
return has_class_attr and dict_or_slots
|
||||
|
||||
def test_old_style_class(self):
|
||||
"""
|
||||
Check if all classes are new-style classes
|
||||
"""
|
||||
for mod in getModule("allmydata").walkModules():
|
||||
for attr in mod.iterAttributes():
|
||||
value = attr.load()
|
||||
if isinstance(value, str):
|
||||
# apparently strings are note a new-style class (in Python 2.7)
|
||||
# so we skip testing them
|
||||
return
|
||||
self.assertTrue(self.is_new_style(value),
|
||||
"{} does not seem to be a new-style class".format(attr.name))
|
@ -19,7 +19,7 @@ MAX_DELTA_READS = 10 * READ_LEEWAY # N = 10
|
||||
|
||||
timeout=240 # François's ARM box timed out after 120 seconds of Verifier.test_corrupt_crypttext_hashtree
|
||||
|
||||
class RepairTestMixin:
|
||||
class RepairTestMixin(object):
|
||||
def failUnlessIsInstance(self, x, xtype):
|
||||
self.failUnless(isinstance(x, xtype), x)
|
||||
|
||||
|
@ -55,7 +55,9 @@ def get_root_from_file(src):
|
||||
srcfile = allmydata.__file__
|
||||
rootdir = get_root_from_file(srcfile)
|
||||
|
||||
class RunBinTahoeMixin:
|
||||
|
||||
class RunBinTahoeMixin(object):
|
||||
|
||||
@inlineCallbacks
|
||||
def find_import_location(self):
|
||||
res = yield self.run_bintahoe(["--version-and-path"])
|
||||
|
@ -1393,7 +1393,7 @@ class Handler(GridTestMixin, ShouldFailMixin, ReallyEqualMixin, unittest.TestCas
|
||||
return d
|
||||
|
||||
def test_execCommand_and_openShell(self):
|
||||
class MockProtocol:
|
||||
class MockProtocol(object):
|
||||
def __init__(self):
|
||||
self.output = ""
|
||||
self.error = ""
|
||||
|
@ -33,9 +33,10 @@ from allmydata.test.common_web import WebRenderingMixin
|
||||
from allmydata.test.no_network import NoNetworkServer
|
||||
from allmydata.web.storage import StorageStatus, remove_prefix
|
||||
|
||||
class Marker:
|
||||
class Marker(object):
|
||||
pass
|
||||
class FakeCanary:
|
||||
|
||||
class FakeCanary(object):
|
||||
def __init__(self, ignore_disconnectors=False):
|
||||
self.ignore = ignore_disconnectors
|
||||
self.disconnectors = {}
|
||||
@ -50,7 +51,7 @@ class FakeCanary:
|
||||
return
|
||||
del self.disconnectors[marker]
|
||||
|
||||
class FakeStatsProvider:
|
||||
class FakeStatsProvider(object):
|
||||
def count(self, name, delta=1):
|
||||
pass
|
||||
def register_producer(self, producer):
|
||||
@ -159,7 +160,7 @@ class Bucket(unittest.TestCase):
|
||||
result_of_read = br.remote_read(0, len(share_data)+1)
|
||||
self.failUnlessEqual(result_of_read, share_data)
|
||||
|
||||
class RemoteBucket:
|
||||
class RemoteBucket(object):
|
||||
|
||||
def __init__(self):
|
||||
self.read_count = 0
|
||||
@ -3052,8 +3053,9 @@ class InstrumentedLeaseCheckingCrawler(LeaseCheckingCrawler):
|
||||
if not self.stop_after_first_bucket:
|
||||
self.cpu_slice = 500
|
||||
|
||||
class BrokenStatResults:
|
||||
class BrokenStatResults(object):
|
||||
pass
|
||||
|
||||
class No_ST_BLOCKS_LeaseCheckingCrawler(LeaseCheckingCrawler):
|
||||
def stat(self, fn):
|
||||
s = os.stat(fn)
|
||||
|
@ -86,7 +86,7 @@ class Uploadable(unittest.TestCase):
|
||||
class ServerError(Exception):
|
||||
pass
|
||||
|
||||
class SetDEPMixin:
|
||||
class SetDEPMixin(object):
|
||||
def set_encoding_parameters(self, k, happy, n, max_segsize=1*MiB):
|
||||
p = {"k": k,
|
||||
"happy": happy,
|
||||
@ -95,7 +95,7 @@ class SetDEPMixin:
|
||||
}
|
||||
self.node.encoding_params = p
|
||||
|
||||
class FakeStorageServer:
|
||||
class FakeStorageServer(object):
|
||||
def __init__(self, mode, reactor=None):
|
||||
self.mode = mode
|
||||
self.allocated = []
|
||||
@ -162,7 +162,7 @@ class FakeStorageServer:
|
||||
|
||||
|
||||
|
||||
class FakeBucketWriter:
|
||||
class FakeBucketWriter(object):
|
||||
# a diagnostic version of storageserver.BucketWriter
|
||||
def __init__(self, size):
|
||||
self.data = StringIO()
|
||||
@ -856,7 +856,7 @@ def is_happy_enough(servertoshnums, h, k):
|
||||
return False
|
||||
return True
|
||||
|
||||
class FakeServerTracker:
|
||||
class FakeServerTracker(object):
|
||||
def __init__(self, serverid, buckets):
|
||||
self._serverid = serverid
|
||||
self.buckets = buckets
|
||||
|
@ -1333,7 +1333,7 @@ class CacheDir(unittest.TestCase):
|
||||
del b2
|
||||
|
||||
ctr = [0]
|
||||
class EqButNotIs:
|
||||
class EqButNotIs(object):
|
||||
def __init__(self, x):
|
||||
self.x = x
|
||||
self.hash = ctr[0]
|
||||
@ -1615,7 +1615,7 @@ class Log(unittest.TestCase):
|
||||
self.flushLoggedErrors(SampleError)
|
||||
|
||||
|
||||
class SimpleSpans:
|
||||
class SimpleSpans(object):
|
||||
# this is a simple+inefficient form of util.spans.Spans . We compare the
|
||||
# behavior of this reference model against the real (efficient) form.
|
||||
|
||||
@ -1943,7 +1943,7 @@ def replace(s, start, data):
|
||||
assert len(s) >= start+len(data)
|
||||
return s[:start] + data + s[start+len(data):]
|
||||
|
||||
class SimpleDataSpans:
|
||||
class SimpleDataSpans(object):
|
||||
def __init__(self, other=None):
|
||||
self.missing = "" # "1" where missing, "0" where found
|
||||
self.data = ""
|
||||
|
@ -10,7 +10,7 @@ class FakeRoot(Root):
|
||||
def now_fn(self):
|
||||
return 0
|
||||
|
||||
class FakeContext:
|
||||
class FakeContext(object):
|
||||
def __init__(self):
|
||||
self.slots = {}
|
||||
self.tag = self
|
||||
|
@ -57,7 +57,7 @@ from ..status import FakeStatus
|
||||
# create a fake uploader/downloader, and a couple of fake dirnodes, then
|
||||
# create a webserver that works against them
|
||||
|
||||
class FakeStatsProvider:
|
||||
class FakeStatsProvider(object):
|
||||
def get_stats(self):
|
||||
stats = {'stats': {}, 'counters': {}}
|
||||
return stats
|
||||
@ -179,7 +179,7 @@ def build_one_ds():
|
||||
|
||||
return ds
|
||||
|
||||
class FakeHistory:
|
||||
class FakeHistory(object):
|
||||
_all_upload_status = [upload.UploadStatus()]
|
||||
_all_download_status = [build_one_ds()]
|
||||
_all_mapupdate_statuses = [servermap.UpdateStatus()]
|
||||
|
@ -707,7 +707,7 @@ class ImmutableDirectoryURIVerifier(DirectoryURIVerifier):
|
||||
INNER_URI_CLASS=CHKFileVerifierURI
|
||||
|
||||
|
||||
class UnknownURI:
|
||||
class UnknownURI(object):
|
||||
def __init__(self, uri, error=None):
|
||||
self._uri = uri
|
||||
self._error = error
|
||||
|
@ -35,7 +35,7 @@ class CacheDirectoryManager(service.MultiService):
|
||||
if now - mtime > self.old:
|
||||
os.remove(absfn)
|
||||
|
||||
class CacheFile:
|
||||
class CacheFile(object):
|
||||
def __init__(self, absfn):
|
||||
self.filename = absfn
|
||||
|
||||
|
@ -102,7 +102,7 @@ def eventual_chain(source, target):
|
||||
source.addCallbacks(eventually_callback(target), eventually_errback(target))
|
||||
|
||||
|
||||
class HookMixin:
|
||||
class HookMixin(object):
|
||||
"""
|
||||
I am a helper mixin that maintains a collection of named hooks, primarily
|
||||
for use in tests. Each hook is set to an unfired Deferred using 'set_hook',
|
||||
|
@ -75,7 +75,7 @@ def remove(f, tries=4, basedelay=0.1):
|
||||
basedelay *= 2
|
||||
return os.remove(f) # The last try.
|
||||
|
||||
class ReopenableNamedTemporaryFile:
|
||||
class ReopenableNamedTemporaryFile(object):
|
||||
"""
|
||||
This uses tempfile.mkstemp() to generate a secure temp file. It then closes
|
||||
the file, leaving a zero-length file as a placeholder. You can get the
|
||||
@ -99,7 +99,7 @@ class ReopenableNamedTemporaryFile:
|
||||
def shutdown(self):
|
||||
remove(self.name)
|
||||
|
||||
class EncryptedTemporaryFile:
|
||||
class EncryptedTemporaryFile(object):
|
||||
# not implemented: next, readline, readlines, xreadlines, writelines
|
||||
|
||||
def __init__(self):
|
||||
|
@ -14,7 +14,7 @@ from allmydata.util.netstring import netstring
|
||||
# kinds.
|
||||
CRYPTO_VAL_SIZE=32
|
||||
|
||||
class _SHA256d_Hasher:
|
||||
class _SHA256d_Hasher(object):
|
||||
# use SHA-256d, as defined by Ferguson and Schneier: hash the output
|
||||
# again to prevent length-extension attacks
|
||||
def __init__(self, truncate_to=None):
|
||||
|
@ -2,7 +2,7 @@
|
||||
from twisted.internet import defer
|
||||
from foolscap.api import eventually
|
||||
|
||||
class ConcurrencyLimiter:
|
||||
class ConcurrencyLimiter(object):
|
||||
"""I implement a basic concurrency limiter. Add work to it in the form of
|
||||
(callable, args, kwargs) tuples. No more than LIMIT callables will be
|
||||
outstanding at any one time.
|
||||
|
@ -10,7 +10,7 @@ something happens. The way this is typically implemented is that the observed
|
||||
has an ObserverList whose when_fired method is called in the observed's
|
||||
'when_something'."""
|
||||
|
||||
class OneShotObserverList:
|
||||
class OneShotObserverList(object):
|
||||
"""A one-shot event distributor."""
|
||||
def __init__(self):
|
||||
self._fired = False
|
||||
@ -18,6 +18,12 @@ class OneShotObserverList:
|
||||
self._watchers = []
|
||||
self.__repr__ = self._unfired_repr
|
||||
|
||||
def __repr__(self):
|
||||
"""string representation of the OneshotObserverList"""
|
||||
if self._fired:
|
||||
return self._fired_repr()
|
||||
return self._unfired_repr()
|
||||
|
||||
def _unfired_repr(self):
|
||||
return "<OneShotObserverList [%s]>" % (self._watchers, )
|
||||
|
||||
@ -77,7 +83,7 @@ class LazyOneShotObserverList(OneShotObserverList):
|
||||
if self._watchers: # if not, don't call result_producer
|
||||
self._fire(self._get_result())
|
||||
|
||||
class ObserverList:
|
||||
class ObserverList(object):
|
||||
"""A simple class to distribute events to a number of subscribers."""
|
||||
|
||||
def __init__(self):
|
||||
@ -93,7 +99,7 @@ class ObserverList:
|
||||
for o in self._watchers:
|
||||
eventually(o, *args, **kwargs)
|
||||
|
||||
class EventStreamObserver:
|
||||
class EventStreamObserver(object):
|
||||
"""A simple class to distribute multiple events to a single subscriber.
|
||||
It accepts arbitrary kwargs, but no posargs."""
|
||||
def __init__(self):
|
||||
|
@ -66,7 +66,7 @@ class ExpandableDeferredList(defer.Deferred):
|
||||
return f
|
||||
|
||||
|
||||
class Pipeline:
|
||||
class Pipeline(object):
|
||||
"""I manage a size-limited pipeline of Deferred operations, usually
|
||||
callRemote() messages."""
|
||||
|
||||
|
@ -9,7 +9,7 @@ class TimeoutError(Exception):
|
||||
class PollComplete(Exception):
|
||||
pass
|
||||
|
||||
class PollMixin:
|
||||
class PollMixin(object):
|
||||
_poll_should_ignore_these_errors = []
|
||||
|
||||
def poll(self, check_f, pollinterval=0.01, timeout=1000):
|
||||
|
@ -1,7 +1,7 @@
|
||||
from __future__ import print_function
|
||||
|
||||
|
||||
class Spans:
|
||||
class Spans(object):
|
||||
"""I represent a compressed list of booleans, one per index (an integer).
|
||||
Typically, each index represents an offset into a large string, pointing
|
||||
to a specific byte of a share. In this context, True means that byte has
|
||||
@ -222,7 +222,7 @@ def adjacent(start0, length0, start1, length1):
|
||||
return True
|
||||
return False
|
||||
|
||||
class DataSpans:
|
||||
class DataSpans(object):
|
||||
"""I represent portions of a large string. Equivalently, I can be said to
|
||||
maintain a large array of characters (with gaps of empty elements). I can
|
||||
be used to manage access to a remote share, where some pieces have been
|
||||
|
@ -63,7 +63,7 @@ def json_check_and_repair_results(r):
|
||||
data["post-repair-results"] = json_check_results(post)
|
||||
return data
|
||||
|
||||
class ResultsBase:
|
||||
class ResultsBase(object):
|
||||
# self.client must point to the Client, so we can get nicknames and
|
||||
# determine the permuted peer order
|
||||
|
||||
@ -205,7 +205,7 @@ class LiteralCheckResultsRenderer(rend.Page, ResultsBase):
|
||||
return T.div[T.a(href=return_to)["Return to file."]]
|
||||
return ""
|
||||
|
||||
class CheckerBase:
|
||||
class CheckerBase(object):
|
||||
|
||||
def renderHTTP(self, ctx):
|
||||
if self.want_json(ctx):
|
||||
|
@ -371,7 +371,7 @@ class NeedOperationHandleError(WebError):
|
||||
pass
|
||||
|
||||
|
||||
class RenderMixin:
|
||||
class RenderMixin(object):
|
||||
|
||||
def renderHTTP(self, ctx):
|
||||
request = IRequest(ctx)
|
||||
|
@ -23,7 +23,7 @@ from allmydata.web.check_results import CheckResultsRenderer, \
|
||||
CheckAndRepairResultsRenderer, LiteralCheckResultsRenderer
|
||||
from allmydata.web.info import MoreInfo
|
||||
|
||||
class ReplaceMeMixin:
|
||||
class ReplaceMeMixin(object):
|
||||
def replace_me_with_a_child(self, req, client, replace):
|
||||
# a new file is being uploaded in our place.
|
||||
file_format = get_format(req, "CHK")
|
||||
|
@ -120,7 +120,7 @@ class OphandleTable(rend.Page, service.Service):
|
||||
self.timers.pop(ophandle, None)
|
||||
self.handles.pop(ophandle, None)
|
||||
|
||||
class ReloadMixin:
|
||||
class ReloadMixin(object):
|
||||
REFRESH_TIME = 1*MINUTE
|
||||
|
||||
def render_refresh(self, ctx, data):
|
||||
|
@ -17,7 +17,7 @@ from allmydata.web.common import (
|
||||
from allmydata.interfaces import IUploadStatus, IDownloadStatus, \
|
||||
IPublishStatus, IRetrieveStatus, IServermapUpdaterStatus
|
||||
|
||||
class RateAndTimeMixin:
|
||||
class RateAndTimeMixin(object):
|
||||
|
||||
def render_time(self, ctx, data):
|
||||
return abbreviate_time(data)
|
||||
|
@ -117,7 +117,7 @@ def initialize():
|
||||
use_last_error=True
|
||||
)(("WriteConsoleW", windll.kernel32))
|
||||
|
||||
class UnicodeOutput:
|
||||
class UnicodeOutput(object):
|
||||
def __init__(self, hConsole, stream, fileno, name):
|
||||
self._hConsole = hConsole
|
||||
self._stream = stream
|
||||
|
Loading…
x
Reference in New Issue
Block a user