mirror of
https://github.com/tahoe-lafs/tahoe-lafs.git
synced 2025-06-18 07:18:20 +00:00
ported old-style classes to new-style
This commit is contained in:
@ -138,7 +138,7 @@ def report(out, path, results):
|
|||||||
print(path + (":%r %s captures %r assigned at line %d" % r), file=out)
|
print(path + (":%r %s captures %r assigned at line %d" % r), file=out)
|
||||||
|
|
||||||
def check(sources, out):
|
def check(sources, out):
|
||||||
class Counts:
|
class Counts(object):
|
||||||
n = 0
|
n = 0
|
||||||
processed_files = 0
|
processed_files = 0
|
||||||
suspect_files = 0
|
suspect_files = 0
|
||||||
|
@ -8,7 +8,7 @@ DAY=24*60*60
|
|||||||
MONTH=31*DAY
|
MONTH=31*DAY
|
||||||
YEAR=365*DAY
|
YEAR=365*DAY
|
||||||
|
|
||||||
class ReliabilityModel:
|
class ReliabilityModel(object):
|
||||||
"""Generate a model of system-wide reliability, given several input
|
"""Generate a model of system-wide reliability, given several input
|
||||||
parameters.
|
parameters.
|
||||||
|
|
||||||
@ -207,7 +207,7 @@ class ReliabilityModel:
|
|||||||
repair = matrix(new_repair_rows)
|
repair = matrix(new_repair_rows)
|
||||||
return repair
|
return repair
|
||||||
|
|
||||||
class ReliabilityReport:
|
class ReliabilityReport(object):
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
self.samples = []
|
self.samples = []
|
||||||
|
|
||||||
|
@ -10,7 +10,7 @@ except ImportError:
|
|||||||
from nevow import inevow
|
from nevow import inevow
|
||||||
from zope.interface import implements
|
from zope.interface import implements
|
||||||
|
|
||||||
class MyRequest:
|
class MyRequest(object):
|
||||||
implements(inevow.IRequest)
|
implements(inevow.IRequest)
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
@ -54,7 +54,7 @@ print("average file size:", abbreviate_space(avg_filesize))
|
|||||||
|
|
||||||
SERVER_CAPACITY = 10**12
|
SERVER_CAPACITY = 10**12
|
||||||
|
|
||||||
class Server:
|
class Server(object):
|
||||||
def __init__(self, nodeid, capacity):
|
def __init__(self, nodeid, capacity):
|
||||||
self.nodeid = nodeid
|
self.nodeid = nodeid
|
||||||
self.used = 0
|
self.used = 0
|
||||||
@ -75,7 +75,7 @@ class Server:
|
|||||||
else:
|
else:
|
||||||
return "<%s %s>" % (self.__class__.__name__, self.nodeid)
|
return "<%s %s>" % (self.__class__.__name__, self.nodeid)
|
||||||
|
|
||||||
class Ring:
|
class Ring(object):
|
||||||
SHOW_MINMAX = False
|
SHOW_MINMAX = False
|
||||||
def __init__(self, numservers, seed, permute):
|
def __init__(self, numservers, seed, permute):
|
||||||
self.servers = []
|
self.servers = []
|
||||||
|
@ -8,7 +8,7 @@ import random
|
|||||||
|
|
||||||
SERVER_CAPACITY = 10**12
|
SERVER_CAPACITY = 10**12
|
||||||
|
|
||||||
class Server:
|
class Server(object):
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
self.si = random.randrange(0, 2**31)
|
self.si = random.randrange(0, 2**31)
|
||||||
self.used = 0
|
self.used = 0
|
||||||
|
@ -17,7 +17,7 @@ def sha(s):
|
|||||||
def randomid():
|
def randomid():
|
||||||
return os.urandom(20)
|
return os.urandom(20)
|
||||||
|
|
||||||
class Node:
|
class Node(object):
|
||||||
def __init__(self, nid, introducer, simulator):
|
def __init__(self, nid, introducer, simulator):
|
||||||
self.nid = nid
|
self.nid = nid
|
||||||
self.introducer = introducer
|
self.introducer = introducer
|
||||||
@ -112,7 +112,7 @@ class Node:
|
|||||||
self.introducer.delete(fileid)
|
self.introducer.delete(fileid)
|
||||||
return True
|
return True
|
||||||
|
|
||||||
class Introducer:
|
class Introducer(object):
|
||||||
def __init__(self, simulator):
|
def __init__(self, simulator):
|
||||||
self.living_files = {}
|
self.living_files = {}
|
||||||
self.utilization = 0 # total size of all active files
|
self.utilization = 0 # total size of all active files
|
||||||
@ -149,7 +149,7 @@ class Introducer:
|
|||||||
self.simulator.stamp_utilization(self.utilization)
|
self.simulator.stamp_utilization(self.utilization)
|
||||||
del self.living_files[fileid]
|
del self.living_files[fileid]
|
||||||
|
|
||||||
class Simulator:
|
class Simulator(object):
|
||||||
NUM_NODES = 1000
|
NUM_NODES = 1000
|
||||||
EVENTS = ["ADDFILE", "DELFILE", "ADDNODE", "DELNODE"]
|
EVENTS = ["ADDFILE", "DELFILE", "ADDNODE", "DELNODE"]
|
||||||
RATE_ADDFILE = 1.0 / 10
|
RATE_ADDFILE = 1.0 / 10
|
||||||
|
@ -37,7 +37,7 @@ GiB=1024*MiB
|
|||||||
TiB=1024*GiB
|
TiB=1024*GiB
|
||||||
PiB=1024*TiB
|
PiB=1024*TiB
|
||||||
|
|
||||||
class Sizes:
|
class Sizes(object):
|
||||||
def __init__(self, mode, file_size, arity=2):
|
def __init__(self, mode, file_size, arity=2):
|
||||||
MAX_SEGSIZE = 128*KiB
|
MAX_SEGSIZE = 128*KiB
|
||||||
self.mode = mode
|
self.mode = mode
|
||||||
|
@ -12,7 +12,7 @@ def roundup(size, blocksize=4096):
|
|||||||
return blocksize * mathutil.div_ceil(size, blocksize)
|
return blocksize * mathutil.div_ceil(size, blocksize)
|
||||||
|
|
||||||
|
|
||||||
class BigFakeString:
|
class BigFakeString(object):
|
||||||
def __init__(self, length):
|
def __init__(self, length):
|
||||||
self.length = length
|
self.length = length
|
||||||
self.fp = 0
|
self.fp = 0
|
||||||
|
@ -17,7 +17,7 @@ class FileProhibited(Exception):
|
|||||||
self.reason = reason
|
self.reason = reason
|
||||||
|
|
||||||
|
|
||||||
class Blacklist:
|
class Blacklist(object):
|
||||||
def __init__(self, blacklist_fn):
|
def __init__(self, blacklist_fn):
|
||||||
self.blacklist_fn = blacklist_fn
|
self.blacklist_fn = blacklist_fn
|
||||||
self.last_mtime = None
|
self.last_mtime = None
|
||||||
|
@ -169,7 +169,7 @@ class CheckAndRepairResults(object):
|
|||||||
return self.post_repair_results
|
return self.post_repair_results
|
||||||
|
|
||||||
|
|
||||||
class DeepResultsBase:
|
class DeepResultsBase(object):
|
||||||
|
|
||||||
def __init__(self, root_storage_index):
|
def __init__(self, root_storage_index):
|
||||||
self.root_storage_index = root_storage_index
|
self.root_storage_index = root_storage_index
|
||||||
|
@ -115,7 +115,7 @@ def _make_secret():
|
|||||||
return base32.b2a(os.urandom(hashutil.CRYPTO_VAL_SIZE)) + "\n"
|
return base32.b2a(os.urandom(hashutil.CRYPTO_VAL_SIZE)) + "\n"
|
||||||
|
|
||||||
|
|
||||||
class SecretHolder:
|
class SecretHolder(object):
|
||||||
def __init__(self, lease_secret, convergence_secret):
|
def __init__(self, lease_secret, convergence_secret):
|
||||||
self._lease_secret = lease_secret
|
self._lease_secret = lease_secret
|
||||||
self._convergence_secret = convergence_secret
|
self._convergence_secret = convergence_secret
|
||||||
@ -129,7 +129,7 @@ class SecretHolder:
|
|||||||
def get_convergence_secret(self):
|
def get_convergence_secret(self):
|
||||||
return self._convergence_secret
|
return self._convergence_secret
|
||||||
|
|
||||||
class KeyGenerator:
|
class KeyGenerator(object):
|
||||||
"""I create RSA keys for mutable files. Each call to generate() returns a
|
"""I create RSA keys for mutable files. Each call to generate() returns a
|
||||||
single keypair. The keysize is specified first by the keysize= argument
|
single keypair. The keysize is specified first by the keysize= argument
|
||||||
to generate(), then with a default set by set_default_keysize(), then
|
to generate(), then with a default set by set_default_keysize(), then
|
||||||
|
@ -38,7 +38,7 @@ def log_memory_usage(where=""):
|
|||||||
where))
|
where))
|
||||||
|
|
||||||
@implementer(IConsumer)
|
@implementer(IConsumer)
|
||||||
class FileWritingConsumer:
|
class FileWritingConsumer(object):
|
||||||
def __init__(self, filename):
|
def __init__(self, filename):
|
||||||
self.done = False
|
self.done = False
|
||||||
self.f = open(filename, "wb")
|
self.f = open(filename, "wb")
|
||||||
@ -143,7 +143,7 @@ class ControlServer(Referenceable, service.Service):
|
|||||||
d.addCallback(_average)
|
d.addCallback(_average)
|
||||||
return d
|
return d
|
||||||
|
|
||||||
class SpeedTest:
|
class SpeedTest(object):
|
||||||
def __init__(self, parent, count, size, mutable):
|
def __init__(self, parent, count, size, mutable):
|
||||||
self.parent = parent
|
self.parent = parent
|
||||||
self.count = count
|
self.count = count
|
||||||
|
@ -111,7 +111,7 @@ def normalize(namex):
|
|||||||
# contents and end by repacking them. It might be better to apply them to
|
# contents and end by repacking them. It might be better to apply them to
|
||||||
# the unpacked contents.
|
# the unpacked contents.
|
||||||
|
|
||||||
class Deleter:
|
class Deleter(object):
|
||||||
def __init__(self, node, namex, must_exist=True, must_be_directory=False, must_be_file=False):
|
def __init__(self, node, namex, must_exist=True, must_be_directory=False, must_be_file=False):
|
||||||
self.node = node
|
self.node = node
|
||||||
self.name = normalize(namex)
|
self.name = normalize(namex)
|
||||||
@ -139,7 +139,7 @@ class Deleter:
|
|||||||
return new_contents
|
return new_contents
|
||||||
|
|
||||||
|
|
||||||
class MetadataSetter:
|
class MetadataSetter(object):
|
||||||
def __init__(self, node, namex, metadata, create_readonly_node=None):
|
def __init__(self, node, namex, metadata, create_readonly_node=None):
|
||||||
self.node = node
|
self.node = node
|
||||||
self.name = normalize(namex)
|
self.name = normalize(namex)
|
||||||
@ -164,7 +164,7 @@ class MetadataSetter:
|
|||||||
return new_contents
|
return new_contents
|
||||||
|
|
||||||
|
|
||||||
class Adder:
|
class Adder(object):
|
||||||
def __init__(self, node, entries=None, overwrite=True, create_readonly_node=None):
|
def __init__(self, node, entries=None, overwrite=True, create_readonly_node=None):
|
||||||
self.node = node
|
self.node = node
|
||||||
if entries is None:
|
if entries is None:
|
||||||
@ -861,7 +861,7 @@ class ManifestWalker(DeepStats):
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
class DeepChecker:
|
class DeepChecker(object):
|
||||||
def __init__(self, root, verify, repair, add_lease):
|
def __init__(self, root, verify, repair, add_lease):
|
||||||
root_si = root.get_storage_index()
|
root_si = root.get_storage_index()
|
||||||
if root_si:
|
if root_si:
|
||||||
|
@ -16,7 +16,7 @@ class NeedRootcapLookupScheme(Exception):
|
|||||||
mechanism to translate name+passwd pairs into a rootcap, either a file of
|
mechanism to translate name+passwd pairs into a rootcap, either a file of
|
||||||
name/passwd/rootcap tuples, or a server to do the translation."""
|
name/passwd/rootcap tuples, or a server to do the translation."""
|
||||||
|
|
||||||
class FTPAvatarID:
|
class FTPAvatarID(object):
|
||||||
def __init__(self, username, rootcap):
|
def __init__(self, username, rootcap):
|
||||||
self.username = username
|
self.username = username
|
||||||
self.rootcap = rootcap
|
self.rootcap = rootcap
|
||||||
|
@ -951,7 +951,7 @@ class GeneralSFTPFile(PrefixingLogMixin):
|
|||||||
return d
|
return d
|
||||||
|
|
||||||
|
|
||||||
class StoppableList:
|
class StoppableList(object):
|
||||||
def __init__(self, items):
|
def __init__(self, items):
|
||||||
self.items = items
|
self.items = items
|
||||||
def __iter__(self):
|
def __iter__(self):
|
||||||
@ -961,7 +961,7 @@ class StoppableList:
|
|||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
class Reason:
|
class Reason(object):
|
||||||
def __init__(self, value):
|
def __init__(self, value):
|
||||||
self.value = value
|
self.value = value
|
||||||
|
|
||||||
|
@ -68,7 +68,7 @@ def roundup_pow2(x):
|
|||||||
return ans
|
return ans
|
||||||
|
|
||||||
|
|
||||||
class CompleteBinaryTreeMixin:
|
class CompleteBinaryTreeMixin(object):
|
||||||
"""
|
"""
|
||||||
Adds convenience methods to a complete binary tree.
|
Adds convenience methods to a complete binary tree.
|
||||||
|
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
|
|
||||||
import weakref
|
import weakref
|
||||||
|
|
||||||
class History:
|
class History(object):
|
||||||
"""Keep track of recent operations, for a status display."""
|
"""Keep track of recent operations, for a status display."""
|
||||||
|
|
||||||
name = "history"
|
name = "history"
|
||||||
|
@ -7,7 +7,7 @@ from allmydata.util.dictutil import DictOfSets
|
|||||||
from common import OVERDUE, COMPLETE, CORRUPT, DEAD, BADSEGNUM, \
|
from common import OVERDUE, COMPLETE, CORRUPT, DEAD, BADSEGNUM, \
|
||||||
BadSegmentNumberError
|
BadSegmentNumberError
|
||||||
|
|
||||||
class SegmentFetcher:
|
class SegmentFetcher(object):
|
||||||
"""I am responsible for acquiring blocks for a single segment. I will use
|
"""I am responsible for acquiring blocks for a single segment. I will use
|
||||||
the Share instances passed to my add_shares() method to locate, retrieve,
|
the Share instances passed to my add_shares() method to locate, retrieve,
|
||||||
and validate those blocks. I expect my parent node to call my
|
and validate those blocks. I expect my parent node to call my
|
||||||
|
@ -20,11 +20,11 @@ def incidentally(res, f, *args, **kwargs):
|
|||||||
f(*args, **kwargs)
|
f(*args, **kwargs)
|
||||||
return res
|
return res
|
||||||
|
|
||||||
class RequestToken:
|
class RequestToken(object):
|
||||||
def __init__(self, server):
|
def __init__(self, server):
|
||||||
self.server = server
|
self.server = server
|
||||||
|
|
||||||
class ShareFinder:
|
class ShareFinder(object):
|
||||||
OVERDUE_TIMEOUT = 10.0
|
OVERDUE_TIMEOUT = 10.0
|
||||||
|
|
||||||
def __init__(self, storage_broker, verifycap, node, download_status,
|
def __init__(self, storage_broker, verifycap, node, download_status,
|
||||||
|
@ -23,16 +23,18 @@ class IDownloadStatusHandlingConsumer(Interface):
|
|||||||
"""Record the DownloadStatus 'read event', to be updated with the
|
"""Record the DownloadStatus 'read event', to be updated with the
|
||||||
time it takes to decrypt each chunk of data."""
|
time it takes to decrypt each chunk of data."""
|
||||||
|
|
||||||
class Cancel:
|
class Cancel(object):
|
||||||
def __init__(self, f):
|
def __init__(self, f):
|
||||||
self._f = f
|
self._f = f
|
||||||
self.active = True
|
self.active = True
|
||||||
|
|
||||||
def cancel(self):
|
def cancel(self):
|
||||||
if self.active:
|
if self.active:
|
||||||
self.active = False
|
self.active = False
|
||||||
self._f(self)
|
self._f(self)
|
||||||
|
|
||||||
class DownloadNode:
|
|
||||||
|
class DownloadNode(object):
|
||||||
"""Internal class which manages downloads and holds state. External
|
"""Internal class which manages downloads and holds state. External
|
||||||
callers use CiphertextFileNode instead."""
|
callers use CiphertextFileNode instead."""
|
||||||
|
|
||||||
|
@ -24,7 +24,7 @@ class DataUnavailable(Exception):
|
|||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
class Share:
|
class Share(object):
|
||||||
"""I represent a single instance of a single share (e.g. I reference the
|
"""I represent a single instance of a single share (e.g. I reference the
|
||||||
shnum2 for share SI=abcde on server xy12t, not the one on server ab45q).
|
shnum2 for share SI=abcde on server xy12t, not the one on server ab45q).
|
||||||
I am associated with a CommonShare that remembers data that is held in
|
I am associated with a CommonShare that remembers data that is held in
|
||||||
@ -825,7 +825,7 @@ class Share:
|
|||||||
o.notify(state=DEAD, f=f)
|
o.notify(state=DEAD, f=f)
|
||||||
|
|
||||||
|
|
||||||
class CommonShare:
|
class CommonShare(object):
|
||||||
# TODO: defer creation of the hashtree until somebody uses us. There will
|
# TODO: defer creation of the hashtree until somebody uses us. There will
|
||||||
# be a lot of unused shares, and we shouldn't spend the memory on a large
|
# be a lot of unused shares, and we shouldn't spend the memory on a large
|
||||||
# hashtree unless necessary.
|
# hashtree unless necessary.
|
||||||
|
@ -3,25 +3,32 @@ import itertools
|
|||||||
from zope.interface import implementer
|
from zope.interface import implementer
|
||||||
from allmydata.interfaces import IDownloadStatus
|
from allmydata.interfaces import IDownloadStatus
|
||||||
|
|
||||||
class ReadEvent:
|
class ReadEvent(object):
|
||||||
|
|
||||||
def __init__(self, ev, ds):
|
def __init__(self, ev, ds):
|
||||||
self._ev = ev
|
self._ev = ev
|
||||||
self._ds = ds
|
self._ds = ds
|
||||||
|
|
||||||
def update(self, bytes, decrypttime, pausetime):
|
def update(self, bytes, decrypttime, pausetime):
|
||||||
self._ev["bytes_returned"] += bytes
|
self._ev["bytes_returned"] += bytes
|
||||||
self._ev["decrypt_time"] += decrypttime
|
self._ev["decrypt_time"] += decrypttime
|
||||||
self._ev["paused_time"] += pausetime
|
self._ev["paused_time"] += pausetime
|
||||||
|
|
||||||
def finished(self, finishtime):
|
def finished(self, finishtime):
|
||||||
self._ev["finish_time"] = finishtime
|
self._ev["finish_time"] = finishtime
|
||||||
self._ds.update_last_timestamp(finishtime)
|
self._ds.update_last_timestamp(finishtime)
|
||||||
|
|
||||||
class SegmentEvent:
|
|
||||||
|
class SegmentEvent(object):
|
||||||
|
|
||||||
def __init__(self, ev, ds):
|
def __init__(self, ev, ds):
|
||||||
self._ev = ev
|
self._ev = ev
|
||||||
self._ds = ds
|
self._ds = ds
|
||||||
|
|
||||||
def activate(self, when):
|
def activate(self, when):
|
||||||
if self._ev["active_time"] is None:
|
if self._ev["active_time"] is None:
|
||||||
self._ev["active_time"] = when
|
self._ev["active_time"] = when
|
||||||
|
|
||||||
def deliver(self, when, start, length, decodetime):
|
def deliver(self, when, start, length, decodetime):
|
||||||
assert self._ev["active_time"] is not None
|
assert self._ev["active_time"] is not None
|
||||||
self._ev["finish_time"] = when
|
self._ev["finish_time"] = when
|
||||||
@ -30,34 +37,43 @@ class SegmentEvent:
|
|||||||
self._ev["segment_start"] = start
|
self._ev["segment_start"] = start
|
||||||
self._ev["segment_length"] = length
|
self._ev["segment_length"] = length
|
||||||
self._ds.update_last_timestamp(when)
|
self._ds.update_last_timestamp(when)
|
||||||
|
|
||||||
def error(self, when):
|
def error(self, when):
|
||||||
self._ev["finish_time"] = when
|
self._ev["finish_time"] = when
|
||||||
self._ev["success"] = False
|
self._ev["success"] = False
|
||||||
self._ds.update_last_timestamp(when)
|
self._ds.update_last_timestamp(when)
|
||||||
|
|
||||||
class DYHBEvent:
|
|
||||||
|
class DYHBEvent(object):
|
||||||
|
|
||||||
def __init__(self, ev, ds):
|
def __init__(self, ev, ds):
|
||||||
self._ev = ev
|
self._ev = ev
|
||||||
self._ds = ds
|
self._ds = ds
|
||||||
|
|
||||||
def error(self, when):
|
def error(self, when):
|
||||||
self._ev["finish_time"] = when
|
self._ev["finish_time"] = when
|
||||||
self._ev["success"] = False
|
self._ev["success"] = False
|
||||||
self._ds.update_last_timestamp(when)
|
self._ds.update_last_timestamp(when)
|
||||||
|
|
||||||
def finished(self, shnums, when):
|
def finished(self, shnums, when):
|
||||||
self._ev["finish_time"] = when
|
self._ev["finish_time"] = when
|
||||||
self._ev["success"] = True
|
self._ev["success"] = True
|
||||||
self._ev["response_shnums"] = shnums
|
self._ev["response_shnums"] = shnums
|
||||||
self._ds.update_last_timestamp(when)
|
self._ds.update_last_timestamp(when)
|
||||||
|
|
||||||
class BlockRequestEvent:
|
|
||||||
|
class BlockRequestEvent(object):
|
||||||
|
|
||||||
def __init__(self, ev, ds):
|
def __init__(self, ev, ds):
|
||||||
self._ev = ev
|
self._ev = ev
|
||||||
self._ds = ds
|
self._ds = ds
|
||||||
|
|
||||||
def finished(self, received, when):
|
def finished(self, received, when):
|
||||||
self._ev["finish_time"] = when
|
self._ev["finish_time"] = when
|
||||||
self._ev["success"] = True
|
self._ev["success"] = True
|
||||||
self._ev["response_length"] = received
|
self._ev["response_length"] = received
|
||||||
self._ds.update_last_timestamp(when)
|
self._ds.update_last_timestamp(when)
|
||||||
|
|
||||||
def error(self, when):
|
def error(self, when):
|
||||||
self._ev["finish_time"] = when
|
self._ev["finish_time"] = when
|
||||||
self._ev["success"] = False
|
self._ev["success"] = False
|
||||||
|
@ -21,7 +21,7 @@ from allmydata.immutable.downloader.node import DownloadNode, \
|
|||||||
IDownloadStatusHandlingConsumer
|
IDownloadStatusHandlingConsumer
|
||||||
from allmydata.immutable.downloader.status import DownloadStatus
|
from allmydata.immutable.downloader.status import DownloadStatus
|
||||||
|
|
||||||
class CiphertextFileNode:
|
class CiphertextFileNode(object):
|
||||||
def __init__(self, verifycap, storage_broker, secret_holder,
|
def __init__(self, verifycap, storage_broker, secret_holder,
|
||||||
terminator, history):
|
terminator, history):
|
||||||
assert isinstance(verifycap, uri.CHKFileVerifierURI)
|
assert isinstance(verifycap, uri.CHKFileVerifierURI)
|
||||||
|
@ -16,7 +16,7 @@ class NotEnoughWritersError(Exception):
|
|||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
class CHKCheckerAndUEBFetcher:
|
class CHKCheckerAndUEBFetcher(object):
|
||||||
"""I check to see if a file is already present in the grid. I also fetch
|
"""I check to see if a file is already present in the grid. I also fetch
|
||||||
the URI Extension Block, which is useful for an uploading client who
|
the URI Extension Block, which is useful for an uploading client who
|
||||||
wants to avoid the work of encryption and encoding.
|
wants to avoid the work of encryption and encoding.
|
||||||
@ -244,7 +244,7 @@ class CHKUploadHelper(Referenceable, upload.CHKUploader):
|
|||||||
self._helper.upload_finished(self._storage_index, 0)
|
self._helper.upload_finished(self._storage_index, 0)
|
||||||
del self._reader
|
del self._reader
|
||||||
|
|
||||||
class AskUntilSuccessMixin:
|
class AskUntilSuccessMixin(object):
|
||||||
# create me with a _reader array
|
# create me with a _reader array
|
||||||
_last_failure = None
|
_last_failure = None
|
||||||
|
|
||||||
|
@ -1355,7 +1355,7 @@ def read_this_many_bytes(uploadable, size, prepend_data=[]):
|
|||||||
d.addCallback(_got)
|
d.addCallback(_got)
|
||||||
return d
|
return d
|
||||||
|
|
||||||
class LiteralUploader:
|
class LiteralUploader(object):
|
||||||
|
|
||||||
def __init__(self, progress=None):
|
def __init__(self, progress=None):
|
||||||
self._status = s = UploadStatus()
|
self._status = s = UploadStatus()
|
||||||
@ -1477,7 +1477,7 @@ class RemoteEncryptedUploadable(Referenceable):
|
|||||||
return self._eu.close()
|
return self._eu.close()
|
||||||
|
|
||||||
|
|
||||||
class AssistedUploader:
|
class AssistedUploader(object):
|
||||||
|
|
||||||
def __init__(self, helper, storage_broker):
|
def __init__(self, helper, storage_broker):
|
||||||
self._helper = helper
|
self._helper = helper
|
||||||
@ -1632,7 +1632,7 @@ class AssistedUploader:
|
|||||||
def get_upload_status(self):
|
def get_upload_status(self):
|
||||||
return self._upload_status
|
return self._upload_status
|
||||||
|
|
||||||
class BaseUploadable:
|
class BaseUploadable(object):
|
||||||
# this is overridden by max_segment_size
|
# this is overridden by max_segment_size
|
||||||
default_max_segment_size = DEFAULT_MAX_SEGMENT_SIZE
|
default_max_segment_size = DEFAULT_MAX_SEGMENT_SIZE
|
||||||
default_params_set = False
|
default_params_set = False
|
||||||
|
@ -41,7 +41,7 @@ def unsign_from_foolscap(ann_t):
|
|||||||
ann = json.loads(msg.decode("utf-8"))
|
ann = json.loads(msg.decode("utf-8"))
|
||||||
return (ann, key_vs)
|
return (ann, key_vs)
|
||||||
|
|
||||||
class SubscriberDescriptor:
|
class SubscriberDescriptor(object):
|
||||||
"""This describes a subscriber, for status display purposes. It contains
|
"""This describes a subscriber, for status display purposes. It contains
|
||||||
the following attributes:
|
the following attributes:
|
||||||
|
|
||||||
@ -65,7 +65,7 @@ class SubscriberDescriptor:
|
|||||||
self.remote_address = remote_address
|
self.remote_address = remote_address
|
||||||
self.tubid = tubid
|
self.tubid = tubid
|
||||||
|
|
||||||
class AnnouncementDescriptor:
|
class AnnouncementDescriptor(object):
|
||||||
"""This describes an announcement, for status display purposes. It
|
"""This describes an announcement, for status display purposes. It
|
||||||
contains the following attributes, which will be empty ("" for
|
contains the following attributes, which will be empty ("" for
|
||||||
strings) if the client did not provide them:
|
strings) if the client did not provide them:
|
||||||
|
@ -8,7 +8,7 @@ from allmydata.mutable.common import MODE_CHECK, MODE_WRITE, CorruptShareError
|
|||||||
from allmydata.mutable.servermap import ServerMap, ServermapUpdater
|
from allmydata.mutable.servermap import ServerMap, ServermapUpdater
|
||||||
from allmydata.mutable.retrieve import Retrieve # for verifying
|
from allmydata.mutable.retrieve import Retrieve # for verifying
|
||||||
|
|
||||||
class MutableChecker:
|
class MutableChecker(object):
|
||||||
SERVERMAP_MODE = MODE_CHECK
|
SERVERMAP_MODE = MODE_CHECK
|
||||||
|
|
||||||
def __init__(self, node, storage_broker, history, monitor):
|
def __init__(self, node, storage_broker, history, monitor):
|
||||||
|
@ -24,7 +24,7 @@ from allmydata.mutable.checker import MutableChecker, MutableCheckAndRepairer
|
|||||||
from allmydata.mutable.repairer import Repairer
|
from allmydata.mutable.repairer import Repairer
|
||||||
|
|
||||||
|
|
||||||
class BackoffAgent:
|
class BackoffAgent(object):
|
||||||
# these parameters are copied from foolscap.reconnector, which gets them
|
# these parameters are copied from foolscap.reconnector, which gets them
|
||||||
# from twisted.internet.protocol.ReconnectingClientFactory
|
# from twisted.internet.protocol.ReconnectingClientFactory
|
||||||
initialDelay = 1.0
|
initialDelay = 1.0
|
||||||
|
@ -1180,7 +1180,7 @@ def _handle_bad_struct(f):
|
|||||||
f.trap(struct.error)
|
f.trap(struct.error)
|
||||||
raise BadShareError(f.value.args[0])
|
raise BadShareError(f.value.args[0])
|
||||||
|
|
||||||
class MDMFSlotReadProxy:
|
class MDMFSlotReadProxy(object):
|
||||||
"""
|
"""
|
||||||
I read from a mutable slot filled with data written in the MDMF data
|
I read from a mutable slot filled with data written in the MDMF data
|
||||||
format (which is described above).
|
format (which is described above).
|
||||||
|
@ -100,7 +100,7 @@ class PublishStatus(object):
|
|||||||
class LoopLimitExceededError(Exception):
|
class LoopLimitExceededError(Exception):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
class Publish:
|
class Publish(object):
|
||||||
"""I represent a single act of publishing the mutable file to the grid. I
|
"""I represent a single act of publishing the mutable file to the grid. I
|
||||||
will only publish my data if the servermap I am using still represents
|
will only publish my data if the servermap I am using still represents
|
||||||
the current state of the world.
|
the current state of the world.
|
||||||
|
@ -24,7 +24,7 @@ class RepairRequiresWritecapError(Exception):
|
|||||||
class MustForceRepairError(Exception):
|
class MustForceRepairError(Exception):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
class Repairer:
|
class Repairer(object):
|
||||||
def __init__(self, node, check_results, storage_broker, history, monitor):
|
def __init__(self, node, check_results, storage_broker, history, monitor):
|
||||||
self.node = node
|
self.node = node
|
||||||
self.check_results = ICheckResults(check_results)
|
self.check_results = ICheckResults(check_results)
|
||||||
|
@ -89,7 +89,7 @@ class RetrieveStatus(object):
|
|||||||
serverid = server.get_serverid()
|
serverid = server.get_serverid()
|
||||||
self._problems[serverid] = f
|
self._problems[serverid] = f
|
||||||
|
|
||||||
class Marker:
|
class Marker(object):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
@implementer(IPushProducer)
|
@implementer(IPushProducer)
|
||||||
|
@ -80,7 +80,7 @@ class UpdateStatus(object):
|
|||||||
def set_finished(self, when):
|
def set_finished(self, when):
|
||||||
self.finished = when
|
self.finished = when
|
||||||
|
|
||||||
class ServerMap:
|
class ServerMap(object):
|
||||||
"""I record the placement of mutable shares.
|
"""I record the placement of mutable shares.
|
||||||
|
|
||||||
This object records which shares (of various versions) are located on
|
This object records which shares (of various versions) are located on
|
||||||
@ -378,7 +378,7 @@ class ServerMap:
|
|||||||
self.update_data.setdefault(shnum , []).append((verinfo, data))
|
self.update_data.setdefault(shnum , []).append((verinfo, data))
|
||||||
|
|
||||||
|
|
||||||
class ServermapUpdater:
|
class ServermapUpdater(object):
|
||||||
def __init__(self, filenode, storage_broker, monitor, servermap,
|
def __init__(self, filenode, storage_broker, monitor, servermap,
|
||||||
mode=MODE_READ, add_lease=False, update_range=None):
|
mode=MODE_READ, add_lease=False, update_range=None):
|
||||||
"""I update a servermap, locating a sufficient number of useful
|
"""I update a servermap, locating a sufficient number of useful
|
||||||
|
@ -78,7 +78,7 @@ def get_backupdb(dbfile, stderr=sys.stderr,
|
|||||||
return None
|
return None
|
||||||
|
|
||||||
|
|
||||||
class FileResult:
|
class FileResult(object):
|
||||||
def __init__(self, bdb, filecap, should_check,
|
def __init__(self, bdb, filecap, should_check,
|
||||||
path, mtime, ctime, size):
|
path, mtime, ctime, size):
|
||||||
self.bdb = bdb
|
self.bdb = bdb
|
||||||
@ -106,7 +106,7 @@ class FileResult:
|
|||||||
self.bdb.did_check_file_healthy(self.filecap, results)
|
self.bdb.did_check_file_healthy(self.filecap, results)
|
||||||
|
|
||||||
|
|
||||||
class DirectoryResult:
|
class DirectoryResult(object):
|
||||||
def __init__(self, bdb, dirhash, dircap, should_check):
|
def __init__(self, bdb, dirhash, dircap, should_check):
|
||||||
self.bdb = bdb
|
self.bdb = bdb
|
||||||
self.dircap = dircap
|
self.dircap = dircap
|
||||||
@ -128,7 +128,7 @@ class DirectoryResult:
|
|||||||
self.bdb.did_check_directory_healthy(self.dircap, results)
|
self.bdb.did_check_directory_healthy(self.dircap, results)
|
||||||
|
|
||||||
|
|
||||||
class BackupDB_v2:
|
class BackupDB_v2(object):
|
||||||
VERSION = 2
|
VERSION = 2
|
||||||
NO_CHECK_BEFORE = 1*MONTH
|
NO_CHECK_BEFORE = 1*MONTH
|
||||||
ALWAYS_CHECK_AFTER = 2*MONTH
|
ALWAYS_CHECK_AFTER = 2*MONTH
|
||||||
|
@ -151,7 +151,7 @@ def get_aliases(nodedir):
|
|||||||
pass
|
pass
|
||||||
return aliases
|
return aliases
|
||||||
|
|
||||||
class DefaultAliasMarker:
|
class DefaultAliasMarker(object):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
pretend_platform_uses_lettercolon = False # for tests
|
pretend_platform_uses_lettercolon = False # for tests
|
||||||
|
@ -9,7 +9,7 @@ from allmydata.util.encodingutil import quote_output, is_printable_ascii
|
|||||||
import urllib
|
import urllib
|
||||||
import json
|
import json
|
||||||
|
|
||||||
class SlowOperationRunner:
|
class SlowOperationRunner(object):
|
||||||
|
|
||||||
def run(self, options):
|
def run(self, options):
|
||||||
stderr = options.stderr
|
stderr = options.stderr
|
||||||
|
@ -8,7 +8,7 @@ from allmydata.scripts.common import get_alias, DEFAULT_ALIAS, escape_path, \
|
|||||||
from allmydata.scripts.common_http import do_http, format_http_error
|
from allmydata.scripts.common_http import do_http, format_http_error
|
||||||
from allmydata.util.encodingutil import quote_output, quote_path
|
from allmydata.util.encodingutil import quote_output, quote_path
|
||||||
|
|
||||||
class Checker:
|
class Checker(object):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
def _quote_serverid_index_share(serverid, storage_index, sharenum):
|
def _quote_serverid_index_share(serverid, storage_index, sharenum):
|
||||||
@ -111,7 +111,7 @@ def check(options):
|
|||||||
return errno
|
return errno
|
||||||
return 0
|
return 0
|
||||||
|
|
||||||
class FakeTransport:
|
class FakeTransport(object):
|
||||||
disconnecting = False
|
disconnecting = False
|
||||||
|
|
||||||
class DeepCheckOutput(LineOnlyReceiver):
|
class DeepCheckOutput(LineOnlyReceiver):
|
||||||
|
@ -70,7 +70,7 @@ def make_tahoe_subdirectory(nodeurl, parent_writecap, name):
|
|||||||
raise HTTPError("Error during mkdir", resp)
|
raise HTTPError("Error during mkdir", resp)
|
||||||
|
|
||||||
|
|
||||||
class LocalFileSource:
|
class LocalFileSource(object):
|
||||||
def __init__(self, pathname, basename):
|
def __init__(self, pathname, basename):
|
||||||
precondition_abspath(pathname)
|
precondition_abspath(pathname)
|
||||||
self.pathname = pathname
|
self.pathname = pathname
|
||||||
@ -85,7 +85,7 @@ class LocalFileSource:
|
|||||||
def open(self, caps_only):
|
def open(self, caps_only):
|
||||||
return open(self.pathname, "rb")
|
return open(self.pathname, "rb")
|
||||||
|
|
||||||
class LocalFileTarget:
|
class LocalFileTarget(object):
|
||||||
def __init__(self, pathname):
|
def __init__(self, pathname):
|
||||||
precondition_abspath(pathname)
|
precondition_abspath(pathname)
|
||||||
self.pathname = pathname
|
self.pathname = pathname
|
||||||
@ -93,7 +93,7 @@ class LocalFileTarget:
|
|||||||
def put_file(self, inf):
|
def put_file(self, inf):
|
||||||
fileutil.put_file(self.pathname, inf)
|
fileutil.put_file(self.pathname, inf)
|
||||||
|
|
||||||
class LocalMissingTarget:
|
class LocalMissingTarget(object):
|
||||||
def __init__(self, pathname):
|
def __init__(self, pathname):
|
||||||
precondition_abspath(pathname)
|
precondition_abspath(pathname)
|
||||||
self.pathname = pathname
|
self.pathname = pathname
|
||||||
@ -101,7 +101,7 @@ class LocalMissingTarget:
|
|||||||
def put_file(self, inf):
|
def put_file(self, inf):
|
||||||
fileutil.put_file(self.pathname, inf)
|
fileutil.put_file(self.pathname, inf)
|
||||||
|
|
||||||
class LocalDirectorySource:
|
class LocalDirectorySource(object):
|
||||||
def __init__(self, progressfunc, pathname, basename):
|
def __init__(self, progressfunc, pathname, basename):
|
||||||
precondition_abspath(pathname)
|
precondition_abspath(pathname)
|
||||||
|
|
||||||
@ -133,7 +133,7 @@ class LocalDirectorySource:
|
|||||||
# TODO: output a warning
|
# TODO: output a warning
|
||||||
pass
|
pass
|
||||||
|
|
||||||
class LocalDirectoryTarget:
|
class LocalDirectoryTarget(object):
|
||||||
def __init__(self, progressfunc, pathname):
|
def __init__(self, progressfunc, pathname):
|
||||||
precondition_abspath(pathname)
|
precondition_abspath(pathname)
|
||||||
|
|
||||||
@ -180,7 +180,7 @@ class LocalDirectoryTarget:
|
|||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
class TahoeFileSource:
|
class TahoeFileSource(object):
|
||||||
def __init__(self, nodeurl, mutable, writecap, readcap, basename):
|
def __init__(self, nodeurl, mutable, writecap, readcap, basename):
|
||||||
self.nodeurl = nodeurl
|
self.nodeurl = nodeurl
|
||||||
self.mutable = mutable
|
self.mutable = mutable
|
||||||
@ -205,7 +205,7 @@ class TahoeFileSource:
|
|||||||
def bestcap(self):
|
def bestcap(self):
|
||||||
return self.writecap or self.readcap
|
return self.writecap or self.readcap
|
||||||
|
|
||||||
class TahoeFileTarget:
|
class TahoeFileTarget(object):
|
||||||
def __init__(self, nodeurl, mutable, writecap, readcap, url):
|
def __init__(self, nodeurl, mutable, writecap, readcap, url):
|
||||||
self.nodeurl = nodeurl
|
self.nodeurl = nodeurl
|
||||||
self.mutable = mutable
|
self.mutable = mutable
|
||||||
@ -225,7 +225,7 @@ class TahoeFileTarget:
|
|||||||
# to always create mutable files, or to copy mutable files into new
|
# to always create mutable files, or to copy mutable files into new
|
||||||
# mutable files. ticket #835
|
# mutable files. ticket #835
|
||||||
|
|
||||||
class TahoeDirectorySource:
|
class TahoeDirectorySource(object):
|
||||||
def __init__(self, nodeurl, cache, progressfunc, basename):
|
def __init__(self, nodeurl, cache, progressfunc, basename):
|
||||||
self.nodeurl = nodeurl
|
self.nodeurl = nodeurl
|
||||||
self.cache = cache
|
self.cache = cache
|
||||||
@ -298,7 +298,7 @@ class TahoeDirectorySource:
|
|||||||
"You probably need to use a later version of "
|
"You probably need to use a later version of "
|
||||||
"Tahoe-LAFS to copy this directory.")
|
"Tahoe-LAFS to copy this directory.")
|
||||||
|
|
||||||
class TahoeMissingTarget:
|
class TahoeMissingTarget(object):
|
||||||
def __init__(self, url):
|
def __init__(self, url):
|
||||||
self.url = url
|
self.url = url
|
||||||
|
|
||||||
@ -315,7 +315,7 @@ class TahoeMissingTarget:
|
|||||||
# I'm not sure this will always work
|
# I'm not sure this will always work
|
||||||
return PUT(self.url + "?t=uri", filecap)
|
return PUT(self.url + "?t=uri", filecap)
|
||||||
|
|
||||||
class TahoeDirectoryTarget:
|
class TahoeDirectoryTarget(object):
|
||||||
def __init__(self, nodeurl, cache, progressfunc):
|
def __init__(self, nodeurl, cache, progressfunc):
|
||||||
self.nodeurl = nodeurl
|
self.nodeurl = nodeurl
|
||||||
self.cache = cache
|
self.cache = cache
|
||||||
@ -459,7 +459,7 @@ FileTargets = (LocalFileTarget, TahoeFileTarget)
|
|||||||
DirectoryTargets = (LocalDirectoryTarget, TahoeDirectoryTarget)
|
DirectoryTargets = (LocalDirectoryTarget, TahoeDirectoryTarget)
|
||||||
MissingTargets = (LocalMissingTarget, TahoeMissingTarget)
|
MissingTargets = (LocalMissingTarget, TahoeMissingTarget)
|
||||||
|
|
||||||
class Copier:
|
class Copier(object):
|
||||||
|
|
||||||
def do_copy(self, options, progressfunc=None):
|
def do_copy(self, options, progressfunc=None):
|
||||||
if options['quiet']:
|
if options['quiet']:
|
||||||
|
@ -9,7 +9,7 @@ from allmydata.scripts.common import get_alias, DEFAULT_ALIAS, escape_path, \
|
|||||||
from allmydata.scripts.common_http import do_http, format_http_error
|
from allmydata.scripts.common_http import do_http, format_http_error
|
||||||
from allmydata.util.encodingutil import quote_output, quote_path
|
from allmydata.util.encodingutil import quote_output, quote_path
|
||||||
|
|
||||||
class FakeTransport:
|
class FakeTransport(object):
|
||||||
disconnecting = False
|
disconnecting = False
|
||||||
|
|
||||||
class ManifestStreamer(LineOnlyReceiver):
|
class ManifestStreamer(LineOnlyReceiver):
|
||||||
|
@ -36,7 +36,7 @@ from allmydata.storage.common import UnknownImmutableContainerVersionError, \
|
|||||||
# then the value stored in this field will be the actual share data length
|
# then the value stored in this field will be the actual share data length
|
||||||
# modulo 2**32.
|
# modulo 2**32.
|
||||||
|
|
||||||
class ShareFile:
|
class ShareFile(object):
|
||||||
LEASE_SIZE = struct.calcsize(">L32s32sL")
|
LEASE_SIZE = struct.calcsize(">L32s32sL")
|
||||||
sharetype = "immutable"
|
sharetype = "immutable"
|
||||||
|
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
import struct, time
|
import struct, time
|
||||||
|
|
||||||
class LeaseInfo:
|
class LeaseInfo(object):
|
||||||
def __init__(self, owner_num=None, renew_secret=None, cancel_secret=None,
|
def __init__(self, owner_num=None, renew_secret=None, cancel_secret=None,
|
||||||
expiration_time=None, nodeid=None):
|
expiration_time=None, nodeid=None):
|
||||||
self.owner_num = owner_num
|
self.owner_num = owner_num
|
||||||
@ -14,9 +14,11 @@ class LeaseInfo:
|
|||||||
|
|
||||||
def get_expiration_time(self):
|
def get_expiration_time(self):
|
||||||
return self.expiration_time
|
return self.expiration_time
|
||||||
|
|
||||||
def get_grant_renew_time_time(self):
|
def get_grant_renew_time_time(self):
|
||||||
# hack, based upon fixed 31day expiration period
|
# hack, based upon fixed 31day expiration period
|
||||||
return self.expiration_time - 31*24*60*60
|
return self.expiration_time - 31*24*60*60
|
||||||
|
|
||||||
def get_age(self):
|
def get_age(self):
|
||||||
return time.time() - self.get_grant_renew_time_time()
|
return time.time() - self.get_grant_renew_time_time()
|
||||||
|
|
||||||
@ -27,6 +29,7 @@ class LeaseInfo:
|
|||||||
self.expiration_time) = struct.unpack(">L32s32sL", data)
|
self.expiration_time) = struct.unpack(">L32s32sL", data)
|
||||||
self.nodeid = None
|
self.nodeid = None
|
||||||
return self
|
return self
|
||||||
|
|
||||||
def to_immutable_data(self):
|
def to_immutable_data(self):
|
||||||
return struct.pack(">L32s32sL",
|
return struct.pack(">L32s32sL",
|
||||||
self.owner_num,
|
self.owner_num,
|
||||||
@ -39,6 +42,7 @@ class LeaseInfo:
|
|||||||
int(self.expiration_time),
|
int(self.expiration_time),
|
||||||
self.renew_secret, self.cancel_secret,
|
self.renew_secret, self.cancel_secret,
|
||||||
self.nodeid)
|
self.nodeid)
|
||||||
|
|
||||||
def from_mutable_data(self, data):
|
def from_mutable_data(self, data):
|
||||||
(self.owner_num,
|
(self.owner_num,
|
||||||
self.expiration_time,
|
self.expiration_time,
|
||||||
|
@ -35,7 +35,7 @@ from allmydata.mutable.layout import MAX_MUTABLE_SHARE_SIZE
|
|||||||
assert struct.calcsize(">L") == 4, struct.calcsize(">L")
|
assert struct.calcsize(">L") == 4, struct.calcsize(">L")
|
||||||
assert struct.calcsize(">Q") == 8, struct.calcsize(">Q")
|
assert struct.calcsize(">Q") == 8, struct.calcsize(">Q")
|
||||||
|
|
||||||
class MutableShareFile:
|
class MutableShareFile(object):
|
||||||
|
|
||||||
sharetype = "mutable"
|
sharetype = "mutable"
|
||||||
DATA_LENGTH_OFFSET = struct.calcsize(">32s20s32s")
|
DATA_LENGTH_OFFSET = struct.calcsize(">32s20s32s")
|
||||||
@ -443,7 +443,7 @@ def testv_compare(a, op, b):
|
|||||||
return a > b
|
return a > b
|
||||||
# never reached
|
# never reached
|
||||||
|
|
||||||
class EmptyShare:
|
class EmptyShare(object):
|
||||||
|
|
||||||
def check_testv(self, testv):
|
def check_testv(self, testv):
|
||||||
test_good = True
|
test_good = True
|
||||||
|
@ -29,11 +29,11 @@ class ContainerNode(object):
|
|||||||
def is_mutable(self):
|
def is_mutable(self):
|
||||||
return True
|
return True
|
||||||
|
|
||||||
class FakeNode:
|
class FakeNode(object):
|
||||||
def raise_error(self):
|
def raise_error(self):
|
||||||
return None
|
return None
|
||||||
|
|
||||||
class FakeNodeMaker:
|
class FakeNodeMaker(object):
|
||||||
def create_from_cap(self, writecap, readcap=None, deep_immutable=False, name=''):
|
def create_from_cap(self, writecap, readcap=None, deep_immutable=False, name=''):
|
||||||
return FakeNode()
|
return FakeNode()
|
||||||
|
|
||||||
|
@ -72,7 +72,7 @@ class GridTesterOptions(usage.Options):
|
|||||||
class CommandFailed(Exception):
|
class CommandFailed(Exception):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
class GridTester:
|
class GridTester(object):
|
||||||
def __init__(self, config):
|
def __init__(self, config):
|
||||||
self.config = config
|
self.config = config
|
||||||
self.tahoe = config.tahoe
|
self.tahoe = config.tahoe
|
||||||
|
@ -8,7 +8,7 @@ from foolscap.api import Tub, fireEventually
|
|||||||
|
|
||||||
MB = 1000000
|
MB = 1000000
|
||||||
|
|
||||||
class SpeedTest:
|
class SpeedTest(object):
|
||||||
DO_IMMUTABLE = True
|
DO_IMMUTABLE = True
|
||||||
DO_MUTABLE_CREATE = True
|
DO_MUTABLE_CREATE = True
|
||||||
DO_MUTABLE = True
|
DO_MUTABLE = True
|
||||||
|
@ -110,7 +110,7 @@ class Mv(GridTestMixin, CLITestMixin, unittest.TestCase):
|
|||||||
original_do_http = tahoe_mv.do_http
|
original_do_http = tahoe_mv.do_http
|
||||||
def mock_do_http(method, url, body=""):
|
def mock_do_http(method, url, body=""):
|
||||||
if method == "DELETE":
|
if method == "DELETE":
|
||||||
class FakeResponse:
|
class FakeResponse(object):
|
||||||
def read(self):
|
def read(self):
|
||||||
return "response"
|
return "response"
|
||||||
resp = FakeResponse()
|
resp = FakeResponse()
|
||||||
|
@ -201,7 +201,7 @@ class DummyProducer(object):
|
|||||||
pass
|
pass
|
||||||
|
|
||||||
@implementer(IImmutableFileNode)
|
@implementer(IImmutableFileNode)
|
||||||
class FakeCHKFileNode:
|
class FakeCHKFileNode(object):
|
||||||
"""I provide IImmutableFileNode, but all of my data is stored in a
|
"""I provide IImmutableFileNode, but all of my data is stored in a
|
||||||
class-level dictionary."""
|
class-level dictionary."""
|
||||||
|
|
||||||
@ -339,7 +339,7 @@ def create_chk_filenode(contents, all_contents):
|
|||||||
|
|
||||||
|
|
||||||
@implementer(IMutableFileNode, ICheckable)
|
@implementer(IMutableFileNode, ICheckable)
|
||||||
class FakeMutableFileNode:
|
class FakeMutableFileNode(object):
|
||||||
"""I provide IMutableFileNode, but all of my data is stored in a
|
"""I provide IMutableFileNode, but all of my data is stored in a
|
||||||
class-level dictionary."""
|
class-level dictionary."""
|
||||||
|
|
||||||
@ -597,7 +597,7 @@ class LoggingServiceParent(service.MultiService):
|
|||||||
|
|
||||||
TEST_DATA="\x02"*(Uploader.URI_LIT_SIZE_THRESHOLD+1)
|
TEST_DATA="\x02"*(Uploader.URI_LIT_SIZE_THRESHOLD+1)
|
||||||
|
|
||||||
class ShouldFailMixin:
|
class ShouldFailMixin(object):
|
||||||
def shouldFail(self, expected_failure, which, substring,
|
def shouldFail(self, expected_failure, which, substring,
|
||||||
callable, *args, **kwargs):
|
callable, *args, **kwargs):
|
||||||
"""Assert that a function call raises some exception. This is a
|
"""Assert that a function call raises some exception. This is a
|
||||||
@ -638,7 +638,7 @@ class ShouldFailMixin:
|
|||||||
d.addBoth(done)
|
d.addBoth(done)
|
||||||
return d
|
return d
|
||||||
|
|
||||||
class WebErrorMixin:
|
class WebErrorMixin(object):
|
||||||
def explain_web_error(self, f):
|
def explain_web_error(self, f):
|
||||||
# an error on the server side causes the client-side getPage() to
|
# an error on the server side causes the client-side getPage() to
|
||||||
# return a failure(t.web.error.Error), and its str() doesn't show the
|
# return a failure(t.web.error.Error), and its str() doesn't show the
|
||||||
|
@ -83,13 +83,13 @@ def flip_one_bit(s, offset=0, size=None):
|
|||||||
return result
|
return result
|
||||||
|
|
||||||
|
|
||||||
class ReallyEqualMixin:
|
class ReallyEqualMixin(object):
|
||||||
def failUnlessReallyEqual(self, a, b, msg=None):
|
def failUnlessReallyEqual(self, a, b, msg=None):
|
||||||
self.assertEqual(a, b, msg)
|
self.assertEqual(a, b, msg)
|
||||||
self.assertEqual(type(a), type(b), "a :: %r, b :: %r, %r" % (a, b, msg))
|
self.assertEqual(type(a), type(b), "a :: %r, b :: %r, %r" % (a, b, msg))
|
||||||
|
|
||||||
|
|
||||||
class NonASCIIPathMixin:
|
class NonASCIIPathMixin(object):
|
||||||
def mkdir_nonascii(self, dirpath):
|
def mkdir_nonascii(self, dirpath):
|
||||||
# Kludge to work around the fact that buildbot can't remove a directory tree that has
|
# Kludge to work around the fact that buildbot can't remove a directory tree that has
|
||||||
# any non-ASCII directory names on Windows. (#1472)
|
# any non-ASCII directory names on Windows. (#1472)
|
||||||
@ -143,13 +143,13 @@ class SignalMixin(object):
|
|||||||
signal.signal(signal.SIGCHLD, self.sigchldHandler)
|
signal.signal(signal.SIGCHLD, self.sigchldHandler)
|
||||||
return super(SignalMixin, self).tearDown()
|
return super(SignalMixin, self).tearDown()
|
||||||
|
|
||||||
class StallMixin:
|
class StallMixin(object):
|
||||||
def stall(self, res=None, delay=1):
|
def stall(self, res=None, delay=1):
|
||||||
d = defer.Deferred()
|
d = defer.Deferred()
|
||||||
reactor.callLater(delay, d.callback, res)
|
reactor.callLater(delay, d.callback, res)
|
||||||
return d
|
return d
|
||||||
|
|
||||||
class ShouldFailMixin:
|
class ShouldFailMixin(object):
|
||||||
|
|
||||||
def shouldFail(self, expected_failure, which, substring,
|
def shouldFail(self, expected_failure, which, substring,
|
||||||
callable, *args, **kwargs):
|
callable, *args, **kwargs):
|
||||||
|
@ -6,7 +6,7 @@ from twisted.web.error import Error
|
|||||||
from nevow.testutil import FakeRequest
|
from nevow.testutil import FakeRequest
|
||||||
from nevow import inevow, context
|
from nevow import inevow, context
|
||||||
|
|
||||||
class WebRenderingMixin:
|
class WebRenderingMixin(object):
|
||||||
# d=page.renderString() or s=page.renderSynchronously() will exercise
|
# d=page.renderString() or s=page.renderSynchronously() will exercise
|
||||||
# docFactory, render_*/data_* . It won't exercise want_json(), or my
|
# docFactory, render_*/data_* . It won't exercise want_json(), or my
|
||||||
# renderHTTP() override which tests want_json(). To exercise args=, we
|
# renderHTTP() override which tests want_json(). To exercise args=, we
|
||||||
|
@ -52,7 +52,7 @@ def eliot_logged_test(f):
|
|||||||
"""
|
"""
|
||||||
# A convenient, mutable container into which nested functions can write
|
# A convenient, mutable container into which nested functions can write
|
||||||
# state to be shared among them.
|
# state to be shared among them.
|
||||||
class storage:
|
class storage(object):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
@wraps(f)
|
@wraps(f)
|
||||||
|
@ -19,14 +19,14 @@ from ..no_network import GridTestMixin
|
|||||||
from .. import common_util as testutil
|
from .. import common_util as testutil
|
||||||
from ..common_util import DevNullDictionary
|
from ..common_util import DevNullDictionary
|
||||||
|
|
||||||
class SameKeyGenerator:
|
class SameKeyGenerator(object):
|
||||||
def __init__(self, pubkey, privkey):
|
def __init__(self, pubkey, privkey):
|
||||||
self.pubkey = pubkey
|
self.pubkey = pubkey
|
||||||
self.privkey = privkey
|
self.privkey = privkey
|
||||||
def generate(self, keysize=None):
|
def generate(self, keysize=None):
|
||||||
return defer.succeed( (self.pubkey, self.privkey) )
|
return defer.succeed( (self.pubkey, self.privkey) )
|
||||||
|
|
||||||
class FirstServerGetsKilled:
|
class FirstServerGetsKilled(object):
|
||||||
done = False
|
done = False
|
||||||
def notify(self, retval, wrapper, methname):
|
def notify(self, retval, wrapper, methname):
|
||||||
if not self.done:
|
if not self.done:
|
||||||
@ -34,7 +34,7 @@ class FirstServerGetsKilled:
|
|||||||
self.done = True
|
self.done = True
|
||||||
return retval
|
return retval
|
||||||
|
|
||||||
class FirstServerGetsDeleted:
|
class FirstServerGetsDeleted(object):
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
self.done = False
|
self.done = False
|
||||||
self.silenced = None
|
self.silenced = None
|
||||||
|
@ -21,7 +21,7 @@ def eventuaaaaaly(res=None):
|
|||||||
# network connections, both to speed up the tests and to reduce the amount of
|
# network connections, both to speed up the tests and to reduce the amount of
|
||||||
# non-mutable.py code being exercised.
|
# non-mutable.py code being exercised.
|
||||||
|
|
||||||
class FakeStorage:
|
class FakeStorage(object):
|
||||||
# this class replaces the collection of storage servers, allowing the
|
# this class replaces the collection of storage servers, allowing the
|
||||||
# tests to examine and manipulate the published shares. It also lets us
|
# tests to examine and manipulate the published shares. It also lets us
|
||||||
# control the order in which read queries are answered, to exercise more
|
# control the order in which read queries are answered, to exercise more
|
||||||
@ -78,11 +78,13 @@ class FakeStorage:
|
|||||||
shares[shnum] = f.getvalue()
|
shares[shnum] = f.getvalue()
|
||||||
|
|
||||||
|
|
||||||
class FakeStorageServer:
|
class FakeStorageServer(object):
|
||||||
|
|
||||||
def __init__(self, peerid, storage):
|
def __init__(self, peerid, storage):
|
||||||
self.peerid = peerid
|
self.peerid = peerid
|
||||||
self.storage = storage
|
self.storage = storage
|
||||||
self.queries = 0
|
self.queries = 0
|
||||||
|
|
||||||
def callRemote(self, methname, *args, **kwargs):
|
def callRemote(self, methname, *args, **kwargs):
|
||||||
self.queries += 1
|
self.queries += 1
|
||||||
def _call():
|
def _call():
|
||||||
@ -221,7 +223,7 @@ def make_nodemaker(s=None, num_peers=10, keysize=TEST_RSA_KEY_SIZE):
|
|||||||
{"k": 3, "n": 10}, SDMF_VERSION, keygen)
|
{"k": 3, "n": 10}, SDMF_VERSION, keygen)
|
||||||
return nodemaker
|
return nodemaker
|
||||||
|
|
||||||
class PublishMixin:
|
class PublishMixin(object):
|
||||||
def publish_one(self):
|
def publish_one(self):
|
||||||
# publish a file and create shares, which can then be manipulated
|
# publish a file and create shares, which can then be manipulated
|
||||||
# later.
|
# later.
|
||||||
@ -331,7 +333,7 @@ class PublishMixin:
|
|||||||
index = versionmap[shnum]
|
index = versionmap[shnum]
|
||||||
shares[peerid][shnum] = oldshares[index][peerid][shnum]
|
shares[peerid][shnum] = oldshares[index][peerid][shnum]
|
||||||
|
|
||||||
class CheckerMixin:
|
class CheckerMixin(object):
|
||||||
def check_good(self, r, where):
|
def check_good(self, r, where):
|
||||||
self.failUnless(r.is_healthy(), where)
|
self.failUnless(r.is_healthy(), where)
|
||||||
return r
|
return r
|
||||||
|
@ -41,10 +41,10 @@ from .common import (
|
|||||||
class IntentionalError(Exception):
|
class IntentionalError(Exception):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
class Marker:
|
class Marker(object):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
class LocalWrapper:
|
class LocalWrapper(object):
|
||||||
def __init__(self, original):
|
def __init__(self, original):
|
||||||
self.original = original
|
self.original = original
|
||||||
self.broken = False
|
self.broken = False
|
||||||
@ -250,7 +250,7 @@ class _NoNetworkClient(_Client):
|
|||||||
pass
|
pass
|
||||||
#._servers will be set by the NoNetworkGrid which creates us
|
#._servers will be set by the NoNetworkGrid which creates us
|
||||||
|
|
||||||
class SimpleStats:
|
class SimpleStats(object):
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
self.counters = {}
|
self.counters = {}
|
||||||
self.stats_producers = []
|
self.stats_producers = []
|
||||||
|
@ -15,7 +15,7 @@ from allmydata.immutable.upload import Data
|
|||||||
from allmydata.test.common_web import WebRenderingMixin
|
from allmydata.test.common_web import WebRenderingMixin
|
||||||
from allmydata.mutable.publish import MutableData
|
from allmydata.mutable.publish import MutableData
|
||||||
|
|
||||||
class FakeClient:
|
class FakeClient(object):
|
||||||
def get_storage_broker(self):
|
def get_storage_broker(self):
|
||||||
return self.storage_broker
|
return self.storage_broker
|
||||||
|
|
||||||
|
@ -1380,7 +1380,7 @@ class Dirnode(GridTestMixin, unittest.TestCase,
|
|||||||
self.set_up_grid(oneshare=True)
|
self.set_up_grid(oneshare=True)
|
||||||
return self._do_initial_children_test(mdmf=True)
|
return self._do_initial_children_test(mdmf=True)
|
||||||
|
|
||||||
class MinimalFakeMutableFile:
|
class MinimalFakeMutableFile(object):
|
||||||
def get_writekey(self):
|
def get_writekey(self):
|
||||||
return "writekey"
|
return "writekey"
|
||||||
|
|
||||||
|
@ -1287,11 +1287,12 @@ def make_servers(clientids):
|
|||||||
servers[clientid] = make_server(clientid)
|
servers[clientid] = make_server(clientid)
|
||||||
return servers
|
return servers
|
||||||
|
|
||||||
class MyShare:
|
class MyShare(object):
|
||||||
def __init__(self, shnum, server, rtt):
|
def __init__(self, shnum, server, rtt):
|
||||||
self._shnum = shnum
|
self._shnum = shnum
|
||||||
self._server = server
|
self._server = server
|
||||||
self._dyhb_rtt = rtt
|
self._dyhb_rtt = rtt
|
||||||
|
|
||||||
def __repr__(self):
|
def __repr__(self):
|
||||||
return "sh%d-on-%s" % (self._shnum, self._server.get_name())
|
return "sh%d-on-%s" % (self._shnum, self._server.get_name())
|
||||||
|
|
||||||
@ -1302,21 +1303,26 @@ class MySegmentFetcher(SegmentFetcher):
|
|||||||
def _start_share(self, share, shnum):
|
def _start_share(self, share, shnum):
|
||||||
self._test_start_shares.append(share)
|
self._test_start_shares.append(share)
|
||||||
|
|
||||||
class FakeNode:
|
class FakeNode(object):
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
self.want_more = 0
|
self.want_more = 0
|
||||||
self.failed = None
|
self.failed = None
|
||||||
self.processed = None
|
self.processed = None
|
||||||
self._si_prefix = "si_prefix"
|
self._si_prefix = "si_prefix"
|
||||||
|
|
||||||
def want_more_shares(self):
|
def want_more_shares(self):
|
||||||
self.want_more += 1
|
self.want_more += 1
|
||||||
|
|
||||||
def fetch_failed(self, fetcher, f):
|
def fetch_failed(self, fetcher, f):
|
||||||
self.failed = f
|
self.failed = f
|
||||||
|
|
||||||
def process_blocks(self, segnum, blocks):
|
def process_blocks(self, segnum, blocks):
|
||||||
self.processed = (segnum, blocks)
|
self.processed = (segnum, blocks)
|
||||||
|
|
||||||
def get_num_segments(self):
|
def get_num_segments(self):
|
||||||
return 1, True
|
return 1, True
|
||||||
|
|
||||||
|
|
||||||
class Selection(unittest.TestCase):
|
class Selection(unittest.TestCase):
|
||||||
def test_no_shares(self):
|
def test_no_shares(self):
|
||||||
node = FakeNode()
|
node = FakeNode()
|
||||||
|
@ -8,10 +8,10 @@ from allmydata.mutable.filenode import MutableFileNode
|
|||||||
from allmydata.util import hashutil
|
from allmydata.util import hashutil
|
||||||
from allmydata.util.consumer import download_to_data
|
from allmydata.util.consumer import download_to_data
|
||||||
|
|
||||||
class NotANode:
|
class NotANode(object):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
class FakeClient:
|
class FakeClient(object):
|
||||||
# just enough to let the node acquire a downloader (which it won't use),
|
# just enough to let the node acquire a downloader (which it won't use),
|
||||||
# and to get default encoding parameters
|
# and to get default encoding parameters
|
||||||
def getServiceNamed(self, name):
|
def getServiceNamed(self, name):
|
||||||
|
@ -716,7 +716,7 @@ class SystemTest(SystemTestMixin, AsyncTestCase):
|
|||||||
os.makedirs(self.basedir)
|
os.makedirs(self.basedir)
|
||||||
return self.do_system_test()
|
return self.do_system_test()
|
||||||
|
|
||||||
class FakeRemoteReference:
|
class FakeRemoteReference(object):
|
||||||
def notifyOnDisconnect(self, *args, **kwargs): pass
|
def notifyOnDisconnect(self, *args, **kwargs): pass
|
||||||
def getRemoteTubID(self): return "62ubehyunnyhzs7r6vdonnm2hpi52w6y"
|
def getRemoteTubID(self): return "62ubehyunnyhzs7r6vdonnm2hpi52w6y"
|
||||||
def getLocationHints(self): return ["tcp:here.example.com:1234",
|
def getLocationHints(self): return ["tcp:here.example.com:1234",
|
||||||
|
@ -86,7 +86,7 @@ WINDOWS_TEST_ADDRESSES = set(["127.0.0.1", "10.0.2.15", "192.168.0.10"])
|
|||||||
CYGWIN_TEST_ADDRESSES = set(["127.0.0.1", "192.168.0.10"])
|
CYGWIN_TEST_ADDRESSES = set(["127.0.0.1", "192.168.0.10"])
|
||||||
|
|
||||||
|
|
||||||
class FakeProcess:
|
class FakeProcess(object):
|
||||||
def __init__(self, output, err):
|
def __init__(self, output, err):
|
||||||
self.output = output
|
self.output = output
|
||||||
self.err = err
|
self.err = err
|
||||||
|
@ -520,7 +520,7 @@ ENABLE_HELPER = """
|
|||||||
enabled = true
|
enabled = true
|
||||||
"""
|
"""
|
||||||
|
|
||||||
class FakeTub:
|
class FakeTub(object):
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
self.tubID = base64.b32encode("foo")
|
self.tubID = base64.b32encode("foo")
|
||||||
self.listening_ports = []
|
self.listening_ports = []
|
||||||
|
@ -19,7 +19,7 @@ MAX_DELTA_READS = 10 * READ_LEEWAY # N = 10
|
|||||||
|
|
||||||
timeout=240 # François's ARM box timed out after 120 seconds of Verifier.test_corrupt_crypttext_hashtree
|
timeout=240 # François's ARM box timed out after 120 seconds of Verifier.test_corrupt_crypttext_hashtree
|
||||||
|
|
||||||
class RepairTestMixin:
|
class RepairTestMixin(object):
|
||||||
def failUnlessIsInstance(self, x, xtype):
|
def failUnlessIsInstance(self, x, xtype):
|
||||||
self.failUnless(isinstance(x, xtype), x)
|
self.failUnless(isinstance(x, xtype), x)
|
||||||
|
|
||||||
|
@ -55,7 +55,9 @@ def get_root_from_file(src):
|
|||||||
srcfile = allmydata.__file__
|
srcfile = allmydata.__file__
|
||||||
rootdir = get_root_from_file(srcfile)
|
rootdir = get_root_from_file(srcfile)
|
||||||
|
|
||||||
class RunBinTahoeMixin:
|
|
||||||
|
class RunBinTahoeMixin(object):
|
||||||
|
|
||||||
@inlineCallbacks
|
@inlineCallbacks
|
||||||
def find_import_location(self):
|
def find_import_location(self):
|
||||||
res = yield self.run_bintahoe(["--version-and-path"])
|
res = yield self.run_bintahoe(["--version-and-path"])
|
||||||
|
@ -1393,7 +1393,7 @@ class Handler(GridTestMixin, ShouldFailMixin, ReallyEqualMixin, unittest.TestCas
|
|||||||
return d
|
return d
|
||||||
|
|
||||||
def test_execCommand_and_openShell(self):
|
def test_execCommand_and_openShell(self):
|
||||||
class MockProtocol:
|
class MockProtocol(object):
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
self.output = ""
|
self.output = ""
|
||||||
self.error = ""
|
self.error = ""
|
||||||
|
@ -33,9 +33,10 @@ from allmydata.test.common_web import WebRenderingMixin
|
|||||||
from allmydata.test.no_network import NoNetworkServer
|
from allmydata.test.no_network import NoNetworkServer
|
||||||
from allmydata.web.storage import StorageStatus, remove_prefix
|
from allmydata.web.storage import StorageStatus, remove_prefix
|
||||||
|
|
||||||
class Marker:
|
class Marker(object):
|
||||||
pass
|
pass
|
||||||
class FakeCanary:
|
|
||||||
|
class FakeCanary(object):
|
||||||
def __init__(self, ignore_disconnectors=False):
|
def __init__(self, ignore_disconnectors=False):
|
||||||
self.ignore = ignore_disconnectors
|
self.ignore = ignore_disconnectors
|
||||||
self.disconnectors = {}
|
self.disconnectors = {}
|
||||||
@ -50,7 +51,7 @@ class FakeCanary:
|
|||||||
return
|
return
|
||||||
del self.disconnectors[marker]
|
del self.disconnectors[marker]
|
||||||
|
|
||||||
class FakeStatsProvider:
|
class FakeStatsProvider(object):
|
||||||
def count(self, name, delta=1):
|
def count(self, name, delta=1):
|
||||||
pass
|
pass
|
||||||
def register_producer(self, producer):
|
def register_producer(self, producer):
|
||||||
@ -159,7 +160,7 @@ class Bucket(unittest.TestCase):
|
|||||||
result_of_read = br.remote_read(0, len(share_data)+1)
|
result_of_read = br.remote_read(0, len(share_data)+1)
|
||||||
self.failUnlessEqual(result_of_read, share_data)
|
self.failUnlessEqual(result_of_read, share_data)
|
||||||
|
|
||||||
class RemoteBucket:
|
class RemoteBucket(object):
|
||||||
|
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
self.read_count = 0
|
self.read_count = 0
|
||||||
@ -3052,8 +3053,9 @@ class InstrumentedLeaseCheckingCrawler(LeaseCheckingCrawler):
|
|||||||
if not self.stop_after_first_bucket:
|
if not self.stop_after_first_bucket:
|
||||||
self.cpu_slice = 500
|
self.cpu_slice = 500
|
||||||
|
|
||||||
class BrokenStatResults:
|
class BrokenStatResults(object):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
class No_ST_BLOCKS_LeaseCheckingCrawler(LeaseCheckingCrawler):
|
class No_ST_BLOCKS_LeaseCheckingCrawler(LeaseCheckingCrawler):
|
||||||
def stat(self, fn):
|
def stat(self, fn):
|
||||||
s = os.stat(fn)
|
s = os.stat(fn)
|
||||||
|
@ -86,7 +86,7 @@ class Uploadable(unittest.TestCase):
|
|||||||
class ServerError(Exception):
|
class ServerError(Exception):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
class SetDEPMixin:
|
class SetDEPMixin(object):
|
||||||
def set_encoding_parameters(self, k, happy, n, max_segsize=1*MiB):
|
def set_encoding_parameters(self, k, happy, n, max_segsize=1*MiB):
|
||||||
p = {"k": k,
|
p = {"k": k,
|
||||||
"happy": happy,
|
"happy": happy,
|
||||||
@ -95,7 +95,7 @@ class SetDEPMixin:
|
|||||||
}
|
}
|
||||||
self.node.encoding_params = p
|
self.node.encoding_params = p
|
||||||
|
|
||||||
class FakeStorageServer:
|
class FakeStorageServer(object):
|
||||||
def __init__(self, mode, reactor=None):
|
def __init__(self, mode, reactor=None):
|
||||||
self.mode = mode
|
self.mode = mode
|
||||||
self.allocated = []
|
self.allocated = []
|
||||||
@ -162,7 +162,7 @@ class FakeStorageServer:
|
|||||||
|
|
||||||
|
|
||||||
|
|
||||||
class FakeBucketWriter:
|
class FakeBucketWriter(object):
|
||||||
# a diagnostic version of storageserver.BucketWriter
|
# a diagnostic version of storageserver.BucketWriter
|
||||||
def __init__(self, size):
|
def __init__(self, size):
|
||||||
self.data = StringIO()
|
self.data = StringIO()
|
||||||
@ -856,7 +856,7 @@ def is_happy_enough(servertoshnums, h, k):
|
|||||||
return False
|
return False
|
||||||
return True
|
return True
|
||||||
|
|
||||||
class FakeServerTracker:
|
class FakeServerTracker(object):
|
||||||
def __init__(self, serverid, buckets):
|
def __init__(self, serverid, buckets):
|
||||||
self._serverid = serverid
|
self._serverid = serverid
|
||||||
self.buckets = buckets
|
self.buckets = buckets
|
||||||
|
@ -1333,7 +1333,7 @@ class CacheDir(unittest.TestCase):
|
|||||||
del b2
|
del b2
|
||||||
|
|
||||||
ctr = [0]
|
ctr = [0]
|
||||||
class EqButNotIs:
|
class EqButNotIs(object):
|
||||||
def __init__(self, x):
|
def __init__(self, x):
|
||||||
self.x = x
|
self.x = x
|
||||||
self.hash = ctr[0]
|
self.hash = ctr[0]
|
||||||
@ -1615,7 +1615,7 @@ class Log(unittest.TestCase):
|
|||||||
self.flushLoggedErrors(SampleError)
|
self.flushLoggedErrors(SampleError)
|
||||||
|
|
||||||
|
|
||||||
class SimpleSpans:
|
class SimpleSpans(object):
|
||||||
# this is a simple+inefficient form of util.spans.Spans . We compare the
|
# this is a simple+inefficient form of util.spans.Spans . We compare the
|
||||||
# behavior of this reference model against the real (efficient) form.
|
# behavior of this reference model against the real (efficient) form.
|
||||||
|
|
||||||
@ -1943,7 +1943,7 @@ def replace(s, start, data):
|
|||||||
assert len(s) >= start+len(data)
|
assert len(s) >= start+len(data)
|
||||||
return s[:start] + data + s[start+len(data):]
|
return s[:start] + data + s[start+len(data):]
|
||||||
|
|
||||||
class SimpleDataSpans:
|
class SimpleDataSpans(object):
|
||||||
def __init__(self, other=None):
|
def __init__(self, other=None):
|
||||||
self.missing = "" # "1" where missing, "0" where found
|
self.missing = "" # "1" where missing, "0" where found
|
||||||
self.data = ""
|
self.data = ""
|
||||||
|
@ -10,7 +10,7 @@ class FakeRoot(Root):
|
|||||||
def now_fn(self):
|
def now_fn(self):
|
||||||
return 0
|
return 0
|
||||||
|
|
||||||
class FakeContext:
|
class FakeContext(object):
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
self.slots = {}
|
self.slots = {}
|
||||||
self.tag = self
|
self.tag = self
|
||||||
|
@ -57,7 +57,7 @@ from ..status import FakeStatus
|
|||||||
# create a fake uploader/downloader, and a couple of fake dirnodes, then
|
# create a fake uploader/downloader, and a couple of fake dirnodes, then
|
||||||
# create a webserver that works against them
|
# create a webserver that works against them
|
||||||
|
|
||||||
class FakeStatsProvider:
|
class FakeStatsProvider(object):
|
||||||
def get_stats(self):
|
def get_stats(self):
|
||||||
stats = {'stats': {}, 'counters': {}}
|
stats = {'stats': {}, 'counters': {}}
|
||||||
return stats
|
return stats
|
||||||
@ -179,7 +179,7 @@ def build_one_ds():
|
|||||||
|
|
||||||
return ds
|
return ds
|
||||||
|
|
||||||
class FakeHistory:
|
class FakeHistory(object):
|
||||||
_all_upload_status = [upload.UploadStatus()]
|
_all_upload_status = [upload.UploadStatus()]
|
||||||
_all_download_status = [build_one_ds()]
|
_all_download_status = [build_one_ds()]
|
||||||
_all_mapupdate_statuses = [servermap.UpdateStatus()]
|
_all_mapupdate_statuses = [servermap.UpdateStatus()]
|
||||||
|
@ -707,7 +707,7 @@ class ImmutableDirectoryURIVerifier(DirectoryURIVerifier):
|
|||||||
INNER_URI_CLASS=CHKFileVerifierURI
|
INNER_URI_CLASS=CHKFileVerifierURI
|
||||||
|
|
||||||
|
|
||||||
class UnknownURI:
|
class UnknownURI(object):
|
||||||
def __init__(self, uri, error=None):
|
def __init__(self, uri, error=None):
|
||||||
self._uri = uri
|
self._uri = uri
|
||||||
self._error = error
|
self._error = error
|
||||||
|
@ -35,7 +35,7 @@ class CacheDirectoryManager(service.MultiService):
|
|||||||
if now - mtime > self.old:
|
if now - mtime > self.old:
|
||||||
os.remove(absfn)
|
os.remove(absfn)
|
||||||
|
|
||||||
class CacheFile:
|
class CacheFile(object):
|
||||||
def __init__(self, absfn):
|
def __init__(self, absfn):
|
||||||
self.filename = absfn
|
self.filename = absfn
|
||||||
|
|
||||||
|
@ -102,7 +102,7 @@ def eventual_chain(source, target):
|
|||||||
source.addCallbacks(eventually_callback(target), eventually_errback(target))
|
source.addCallbacks(eventually_callback(target), eventually_errback(target))
|
||||||
|
|
||||||
|
|
||||||
class HookMixin:
|
class HookMixin(object):
|
||||||
"""
|
"""
|
||||||
I am a helper mixin that maintains a collection of named hooks, primarily
|
I am a helper mixin that maintains a collection of named hooks, primarily
|
||||||
for use in tests. Each hook is set to an unfired Deferred using 'set_hook',
|
for use in tests. Each hook is set to an unfired Deferred using 'set_hook',
|
||||||
|
@ -75,7 +75,7 @@ def remove(f, tries=4, basedelay=0.1):
|
|||||||
basedelay *= 2
|
basedelay *= 2
|
||||||
return os.remove(f) # The last try.
|
return os.remove(f) # The last try.
|
||||||
|
|
||||||
class ReopenableNamedTemporaryFile:
|
class ReopenableNamedTemporaryFile(object):
|
||||||
"""
|
"""
|
||||||
This uses tempfile.mkstemp() to generate a secure temp file. It then closes
|
This uses tempfile.mkstemp() to generate a secure temp file. It then closes
|
||||||
the file, leaving a zero-length file as a placeholder. You can get the
|
the file, leaving a zero-length file as a placeholder. You can get the
|
||||||
@ -99,7 +99,7 @@ class ReopenableNamedTemporaryFile:
|
|||||||
def shutdown(self):
|
def shutdown(self):
|
||||||
remove(self.name)
|
remove(self.name)
|
||||||
|
|
||||||
class EncryptedTemporaryFile:
|
class EncryptedTemporaryFile(object):
|
||||||
# not implemented: next, readline, readlines, xreadlines, writelines
|
# not implemented: next, readline, readlines, xreadlines, writelines
|
||||||
|
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
|
@ -14,7 +14,7 @@ from allmydata.util.netstring import netstring
|
|||||||
# kinds.
|
# kinds.
|
||||||
CRYPTO_VAL_SIZE=32
|
CRYPTO_VAL_SIZE=32
|
||||||
|
|
||||||
class _SHA256d_Hasher:
|
class _SHA256d_Hasher(object):
|
||||||
# use SHA-256d, as defined by Ferguson and Schneier: hash the output
|
# use SHA-256d, as defined by Ferguson and Schneier: hash the output
|
||||||
# again to prevent length-extension attacks
|
# again to prevent length-extension attacks
|
||||||
def __init__(self, truncate_to=None):
|
def __init__(self, truncate_to=None):
|
||||||
|
@ -2,7 +2,7 @@
|
|||||||
from twisted.internet import defer
|
from twisted.internet import defer
|
||||||
from foolscap.api import eventually
|
from foolscap.api import eventually
|
||||||
|
|
||||||
class ConcurrencyLimiter:
|
class ConcurrencyLimiter(object):
|
||||||
"""I implement a basic concurrency limiter. Add work to it in the form of
|
"""I implement a basic concurrency limiter. Add work to it in the form of
|
||||||
(callable, args, kwargs) tuples. No more than LIMIT callables will be
|
(callable, args, kwargs) tuples. No more than LIMIT callables will be
|
||||||
outstanding at any one time.
|
outstanding at any one time.
|
||||||
|
@ -77,7 +77,7 @@ class LazyOneShotObserverList(OneShotObserverList):
|
|||||||
if self._watchers: # if not, don't call result_producer
|
if self._watchers: # if not, don't call result_producer
|
||||||
self._fire(self._get_result())
|
self._fire(self._get_result())
|
||||||
|
|
||||||
class ObserverList:
|
class ObserverList(object):
|
||||||
"""A simple class to distribute events to a number of subscribers."""
|
"""A simple class to distribute events to a number of subscribers."""
|
||||||
|
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
@ -93,7 +93,7 @@ class ObserverList:
|
|||||||
for o in self._watchers:
|
for o in self._watchers:
|
||||||
eventually(o, *args, **kwargs)
|
eventually(o, *args, **kwargs)
|
||||||
|
|
||||||
class EventStreamObserver:
|
class EventStreamObserver(object):
|
||||||
"""A simple class to distribute multiple events to a single subscriber.
|
"""A simple class to distribute multiple events to a single subscriber.
|
||||||
It accepts arbitrary kwargs, but no posargs."""
|
It accepts arbitrary kwargs, but no posargs."""
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
|
@ -66,7 +66,7 @@ class ExpandableDeferredList(defer.Deferred):
|
|||||||
return f
|
return f
|
||||||
|
|
||||||
|
|
||||||
class Pipeline:
|
class Pipeline(object):
|
||||||
"""I manage a size-limited pipeline of Deferred operations, usually
|
"""I manage a size-limited pipeline of Deferred operations, usually
|
||||||
callRemote() messages."""
|
callRemote() messages."""
|
||||||
|
|
||||||
|
@ -9,7 +9,7 @@ class TimeoutError(Exception):
|
|||||||
class PollComplete(Exception):
|
class PollComplete(Exception):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
class PollMixin:
|
class PollMixin(object):
|
||||||
_poll_should_ignore_these_errors = []
|
_poll_should_ignore_these_errors = []
|
||||||
|
|
||||||
def poll(self, check_f, pollinterval=0.01, timeout=1000):
|
def poll(self, check_f, pollinterval=0.01, timeout=1000):
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
from __future__ import print_function
|
from __future__ import print_function
|
||||||
|
|
||||||
|
|
||||||
class Spans:
|
class Spans(object):
|
||||||
"""I represent a compressed list of booleans, one per index (an integer).
|
"""I represent a compressed list of booleans, one per index (an integer).
|
||||||
Typically, each index represents an offset into a large string, pointing
|
Typically, each index represents an offset into a large string, pointing
|
||||||
to a specific byte of a share. In this context, True means that byte has
|
to a specific byte of a share. In this context, True means that byte has
|
||||||
@ -222,7 +222,7 @@ def adjacent(start0, length0, start1, length1):
|
|||||||
return True
|
return True
|
||||||
return False
|
return False
|
||||||
|
|
||||||
class DataSpans:
|
class DataSpans(object):
|
||||||
"""I represent portions of a large string. Equivalently, I can be said to
|
"""I represent portions of a large string. Equivalently, I can be said to
|
||||||
maintain a large array of characters (with gaps of empty elements). I can
|
maintain a large array of characters (with gaps of empty elements). I can
|
||||||
be used to manage access to a remote share, where some pieces have been
|
be used to manage access to a remote share, where some pieces have been
|
||||||
|
@ -63,7 +63,7 @@ def json_check_and_repair_results(r):
|
|||||||
data["post-repair-results"] = json_check_results(post)
|
data["post-repair-results"] = json_check_results(post)
|
||||||
return data
|
return data
|
||||||
|
|
||||||
class ResultsBase:
|
class ResultsBase(object):
|
||||||
# self.client must point to the Client, so we can get nicknames and
|
# self.client must point to the Client, so we can get nicknames and
|
||||||
# determine the permuted peer order
|
# determine the permuted peer order
|
||||||
|
|
||||||
@ -205,7 +205,7 @@ class LiteralCheckResultsRenderer(rend.Page, ResultsBase):
|
|||||||
return T.div[T.a(href=return_to)["Return to file."]]
|
return T.div[T.a(href=return_to)["Return to file."]]
|
||||||
return ""
|
return ""
|
||||||
|
|
||||||
class CheckerBase:
|
class CheckerBase(object):
|
||||||
|
|
||||||
def renderHTTP(self, ctx):
|
def renderHTTP(self, ctx):
|
||||||
if self.want_json(ctx):
|
if self.want_json(ctx):
|
||||||
|
@ -371,7 +371,7 @@ class NeedOperationHandleError(WebError):
|
|||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
class RenderMixin:
|
class RenderMixin(object):
|
||||||
|
|
||||||
def renderHTTP(self, ctx):
|
def renderHTTP(self, ctx):
|
||||||
request = IRequest(ctx)
|
request = IRequest(ctx)
|
||||||
|
@ -23,7 +23,7 @@ from allmydata.web.check_results import CheckResultsRenderer, \
|
|||||||
CheckAndRepairResultsRenderer, LiteralCheckResultsRenderer
|
CheckAndRepairResultsRenderer, LiteralCheckResultsRenderer
|
||||||
from allmydata.web.info import MoreInfo
|
from allmydata.web.info import MoreInfo
|
||||||
|
|
||||||
class ReplaceMeMixin:
|
class ReplaceMeMixin(object):
|
||||||
def replace_me_with_a_child(self, req, client, replace):
|
def replace_me_with_a_child(self, req, client, replace):
|
||||||
# a new file is being uploaded in our place.
|
# a new file is being uploaded in our place.
|
||||||
file_format = get_format(req, "CHK")
|
file_format = get_format(req, "CHK")
|
||||||
|
@ -120,7 +120,7 @@ class OphandleTable(rend.Page, service.Service):
|
|||||||
self.timers.pop(ophandle, None)
|
self.timers.pop(ophandle, None)
|
||||||
self.handles.pop(ophandle, None)
|
self.handles.pop(ophandle, None)
|
||||||
|
|
||||||
class ReloadMixin:
|
class ReloadMixin(object):
|
||||||
REFRESH_TIME = 1*MINUTE
|
REFRESH_TIME = 1*MINUTE
|
||||||
|
|
||||||
def render_refresh(self, ctx, data):
|
def render_refresh(self, ctx, data):
|
||||||
|
@ -17,7 +17,7 @@ from allmydata.web.common import (
|
|||||||
from allmydata.interfaces import IUploadStatus, IDownloadStatus, \
|
from allmydata.interfaces import IUploadStatus, IDownloadStatus, \
|
||||||
IPublishStatus, IRetrieveStatus, IServermapUpdaterStatus
|
IPublishStatus, IRetrieveStatus, IServermapUpdaterStatus
|
||||||
|
|
||||||
class RateAndTimeMixin:
|
class RateAndTimeMixin(object):
|
||||||
|
|
||||||
def render_time(self, ctx, data):
|
def render_time(self, ctx, data):
|
||||||
return abbreviate_time(data)
|
return abbreviate_time(data)
|
||||||
|
@ -117,7 +117,7 @@ def initialize():
|
|||||||
use_last_error=True
|
use_last_error=True
|
||||||
)(("WriteConsoleW", windll.kernel32))
|
)(("WriteConsoleW", windll.kernel32))
|
||||||
|
|
||||||
class UnicodeOutput:
|
class UnicodeOutput(object):
|
||||||
def __init__(self, hConsole, stream, fileno, name):
|
def __init__(self, hConsole, stream, fileno, name):
|
||||||
self._hConsole = hConsole
|
self._hConsole = hConsole
|
||||||
self._stream = stream
|
self._stream = stream
|
||||||
|
Reference in New Issue
Block a user