diff --git a/src/allmydata/blacklist.py b/src/allmydata/blacklist.py index f0af41df4..aebede6af 100644 --- a/src/allmydata/blacklist.py +++ b/src/allmydata/blacklist.py @@ -1,7 +1,7 @@ import os -from zope.interface import implements +from zope.interface import implementer from twisted.internet import defer from twisted.python import log as twisted_log @@ -56,8 +56,8 @@ class Blacklist: return reason -class ProhibitedNode: - implements(IFileNode) +@implementer(IFileNode) +class ProhibitedNode(object): def __init__(self, wrapped_node, reason): assert IFilesystemNode.providedBy(wrapped_node), wrapped_node diff --git a/src/allmydata/check_results.py b/src/allmydata/check_results.py index 4071de6a0..df6887a4e 100644 --- a/src/allmydata/check_results.py +++ b/src/allmydata/check_results.py @@ -1,11 +1,11 @@ -from zope.interface import implements +from zope.interface import implementer from allmydata.interfaces import ICheckResults, ICheckAndRepairResults, \ IDeepCheckResults, IDeepCheckAndRepairResults, IURI, IDisplayableServer from allmydata.util import base32 -class CheckResults: - implements(ICheckResults) +@implementer(ICheckResults) +class CheckResults(object): def __init__(self, uri, storage_index, healthy, recoverable, count_happiness, @@ -146,8 +146,8 @@ class CheckResults: def get_servermap(self): return self._servermap -class CheckAndRepairResults: - implements(ICheckAndRepairResults) +@implementer(ICheckAndRepairResults) +class CheckAndRepairResults(object): def __init__(self, storage_index): self.storage_index = storage_index @@ -206,8 +206,8 @@ class DeepResultsBase: return self.stats +@implementer(IDeepCheckResults) class DeepCheckResults(DeepResultsBase): - implements(IDeepCheckResults) def add_check(self, r, path): if not r: @@ -234,8 +234,8 @@ class DeepCheckResults(DeepResultsBase): } +@implementer(IDeepCheckAndRepairResults) class DeepCheckAndRepairResults(DeepResultsBase): - implements(IDeepCheckAndRepairResults) def __init__(self, root_storage_index): DeepResultsBase.__init__(self, root_storage_index) diff --git a/src/allmydata/client.py b/src/allmydata/client.py index 80332b408..0e7456bab 100644 --- a/src/allmydata/client.py +++ b/src/allmydata/client.py @@ -2,7 +2,7 @@ import os, stat, time, weakref from allmydata import node from base64 import urlsafe_b64encode -from zope.interface import implements +from zope.interface import implementer from twisted.internet import reactor, defer from twisted.application import service from twisted.application.internet import TimerService @@ -153,8 +153,8 @@ class Terminator(service.Service): return service.Service.stopService(self) +@implementer(IStatsProducer) class Client(node.Node, pollmixin.PollMixin): - implements(IStatsProducer) PORTNUMFILE = "client.port" STOREDIR = 'storage' diff --git a/src/allmydata/codec.py b/src/allmydata/codec.py index 25e600227..1ceb146e4 100644 --- a/src/allmydata/codec.py +++ b/src/allmydata/codec.py @@ -1,14 +1,14 @@ # -*- test-case-name: allmydata.test.test_encode_share -*- -from zope.interface import implements +from zope.interface import implementer from twisted.internet import defer from allmydata.util import mathutil from allmydata.util.assertutil import precondition from allmydata.interfaces import ICodecEncoder, ICodecDecoder import zfec +@implementer(ICodecEncoder) class CRSEncoder(object): - implements(ICodecEncoder) ENCODER_TYPE = "crs" def set_params(self, data_size, required_shares, max_shares): @@ -45,8 +45,8 @@ class CRSEncoder(object): return defer.succeed((shares, desired_share_ids)) +@implementer(ICodecDecoder) class CRSDecoder(object): - implements(ICodecDecoder) def set_params(self, data_size, required_shares, max_shares): self.data_size = data_size diff --git a/src/allmydata/control.py b/src/allmydata/control.py index 7bd8eedab..69ac9a62e 100644 --- a/src/allmydata/control.py +++ b/src/allmydata/control.py @@ -1,6 +1,6 @@ import os, time, tempfile -from zope.interface import implements +from zope.interface import implementer from twisted.application import service from twisted.internet import defer from twisted.internet.interfaces import IConsumer @@ -37,8 +37,8 @@ def log_memory_usage(where=""): stats["VmPeak"], where)) +@implementer(IConsumer) class FileWritingConsumer: - implements(IConsumer) def __init__(self, filename): self.done = False self.f = open(filename, "wb") @@ -54,8 +54,8 @@ class FileWritingConsumer: self.done = True self.f.close() +@implementer(RIControlClient) class ControlServer(Referenceable, service.Service): - implements(RIControlClient) def remote_wait_for_client_connections(self, num_clients): return self.parent.debug_wait_for_client_connections(num_clients) @@ -245,8 +245,8 @@ class SpeedTest: os.unlink(fn) return res -class DiscardingConsumer: - implements(IConsumer) +@implementer(IConsumer) +class DiscardingConsumer(object): def __init__(self): self.done = False def registerProducer(self, p, streaming): diff --git a/src/allmydata/dirnode.py b/src/allmydata/dirnode.py index 620c7e398..f5f4e2073 100644 --- a/src/allmydata/dirnode.py +++ b/src/allmydata/dirnode.py @@ -1,7 +1,7 @@ """Directory Node implementation.""" import time, unicodedata -from zope.interface import implements +from zope.interface import implementer from twisted.internet import defer from foolscap.api import fireEventually import simplejson @@ -256,8 +256,8 @@ def _pack_normalized_children(children, writekey, deep_immutable=False): entries.append(netstring(entry)) return "".join(entries) -class DirectoryNode: - implements(IDirectoryNode, ICheckable, IDeepCheckable) +@implementer(IDirectoryNode, ICheckable, IDeepCheckable) +class DirectoryNode(object): filenode_class = MutableFileNode def __init__(self, filenode, nodemaker, uploader): diff --git a/src/allmydata/frontends/auth.py b/src/allmydata/frontends/auth.py index 712a888a0..2c72a020e 100644 --- a/src/allmydata/frontends/auth.py +++ b/src/allmydata/frontends/auth.py @@ -1,6 +1,6 @@ import os -from zope.interface import implements +from zope.interface import implementer from twisted.web.client import getPage from twisted.internet import defer from twisted.cred import error, checkers, credentials @@ -21,8 +21,8 @@ class FTPAvatarID: self.username = username self.rootcap = rootcap -class AccountFileChecker: - implements(checkers.ICredentialsChecker) +@implementer(checkers.ICredentialsChecker) +class AccountFileChecker(object): credentialInterfaces = (credentials.IUsernamePassword, credentials.IUsernameHashedPassword, credentials.ISSHPrivateKey) @@ -108,8 +108,8 @@ class AccountFileChecker: return defer.fail(error.UnauthorizedLogin()) -class AccountURLChecker: - implements(checkers.ICredentialsChecker) +@implementer(checkers.ICredentialsChecker) +class AccountURLChecker(object): credentialInterfaces = (credentials.IUsernamePassword,) def __init__(self, client, auth_url): diff --git a/src/allmydata/frontends/ftpd.py b/src/allmydata/frontends/ftpd.py index 9791b813a..18f6ff423 100644 --- a/src/allmydata/frontends/ftpd.py +++ b/src/allmydata/frontends/ftpd.py @@ -1,7 +1,7 @@ from types import NoneType -from zope.interface import implements +from zope.interface import implementer from twisted.application import service, strports from twisted.internet import defer from twisted.internet.interfaces import IConsumer @@ -15,16 +15,16 @@ from allmydata.immutable.upload import FileHandle from allmydata.util.fileutil import EncryptedTemporaryFile from allmydata.util.assertutil import precondition -class ReadFile: - implements(ftp.IReadFile) +@implementer(ftp.IReadFile) +class ReadFile(object): def __init__(self, node): self.node = node def send(self, consumer): d = self.node.read(consumer) return d # when consumed -class FileWriter: - implements(IConsumer) +@implementer(IConsumer) +class FileWriter(object): def registerProducer(self, producer, streaming): if not streaming: @@ -41,8 +41,8 @@ class FileWriter: def write(self, data): self.f.write(data) -class WriteFile: - implements(ftp.IWriteFile) +@implementer(ftp.IWriteFile) +class WriteFile(object): def __init__(self, parent, childname, convergence): self.parent = parent @@ -73,8 +73,8 @@ class IntishPermissions(filepath.Permissions): def __and__(self, other): return self._tahoe_statModeInt & other -class Handler: - implements(ftp.IFTPShell) +@implementer(ftp.IFTPShell) +class Handler(object): def __init__(self, client, rootnode, username, convergence): self.client = client self.root = rootnode @@ -292,8 +292,8 @@ class Handler: from allmydata.frontends.auth import AccountURLChecker, AccountFileChecker, NeedRootcapLookupScheme -class Dispatcher: - implements(portal.IRealm) +@implementer(portal.IRealm) +class Dispatcher(object): def __init__(self, client): self.client = client diff --git a/src/allmydata/frontends/sftpd.py b/src/allmydata/frontends/sftpd.py index f4a39cb9b..028ea7ee5 100644 --- a/src/allmydata/frontends/sftpd.py +++ b/src/allmydata/frontends/sftpd.py @@ -4,7 +4,7 @@ from types import NoneType from stat import S_IFREG, S_IFDIR from time import time, strftime, localtime -from zope.interface import implements +from zope.interface import implementer from twisted.python import components from twisted.application import service, strports from twisted.conch.ssh import factory, keys, session @@ -294,8 +294,8 @@ def _direntry_for(filenode_or_parent, childname, filenode=None): return None +@implementer(IConsumer) class OverwriteableFileConsumer(PrefixingLogMixin): - implements(IConsumer) """I act both as a consumer for the download of the original file contents, and as a wrapper for a temporary file that records the downloaded data and any overwrites. I use a priority queue to keep track of which regions of the file have been overwritten @@ -566,8 +566,8 @@ class OverwriteableFileConsumer(PrefixingLogMixin): SIZE_THRESHOLD = 1000 +@implementer(ISFTPFile) class ShortReadOnlySFTPFile(PrefixingLogMixin): - implements(ISFTPFile) """I represent a file handle to a particular file on an SFTP connection. I am used only for short immutable files opened in read-only mode. When I am created, the file contents start to be downloaded to memory. @@ -644,8 +644,8 @@ class ShortReadOnlySFTPFile(PrefixingLogMixin): return defer.execute(_denied) +@implementer(ISFTPFile) class GeneralSFTPFile(PrefixingLogMixin): - implements(ISFTPFile) """I represent a file handle to a particular file on an SFTP connection. I wrap an instance of OverwriteableFileConsumer, which is responsible for storing the file contents. In order to allow write requests to be satisfied @@ -993,8 +993,8 @@ def _reload(): global all_heisenfiles all_heisenfiles = {} +@implementer(ISFTPServer) class SFTPUserHandler(ConchUser, PrefixingLogMixin): - implements(ISFTPServer) def __init__(self, client, rootnode, username): ConchUser.__init__(self) PrefixingLogMixin.__init__(self, facility="tahoe.sftp", prefix=username) @@ -1894,8 +1894,8 @@ class SFTPUserHandler(ConchUser, PrefixingLogMixin): return d -class FakeTransport: - implements(ITransport) +@implementer(ITransport) +class FakeTransport(object): def write(self, data): logmsg("FakeTransport.write()" % (len(data),), level=NOISY) @@ -1908,8 +1908,8 @@ class FakeTransport: # getPeer and getHost can just raise errors, since we don't know what to return +@implementer(ISession) class ShellSession(PrefixingLogMixin): - implements(ISession) def __init__(self, userHandler): PrefixingLogMixin.__init__(self, facility="tahoe.sftp") if noisy: self.log(".__init__(%r)" % (userHandler), level=NOISY) @@ -1965,8 +1965,8 @@ components.registerAdapter(ShellSession, SFTPUserHandler, ISession) from allmydata.frontends.auth import AccountURLChecker, AccountFileChecker, NeedRootcapLookupScheme -class Dispatcher: - implements(portal.IRealm) +@implementer(portal.IRealm) +class Dispatcher(object): def __init__(self, client): self._client = client diff --git a/src/allmydata/immutable/checker.py b/src/allmydata/immutable/checker.py index d931a15be..317d4af90 100644 --- a/src/allmydata/immutable/checker.py +++ b/src/allmydata/immutable/checker.py @@ -1,4 +1,4 @@ -from zope.interface import implements +from zope.interface import implementer from twisted.internet import defer from foolscap.api import DeadReferenceError, RemoteException from allmydata import hashtree, codec, uri @@ -27,8 +27,8 @@ class BadOrMissingHash(IntegrityCheckReject): class UnsupportedErasureCodec(BadURIExtension): pass -class ValidatedExtendedURIProxy: - implements(IValidatedThingProxy) +@implementer(IValidatedThingProxy) +class ValidatedExtendedURIProxy(object): """ I am a front-end for a remote UEB (using a local ReadBucketProxy), responsible for retrieving and validating the elements from the UEB.""" diff --git a/src/allmydata/immutable/downloader/segmentation.py b/src/allmydata/immutable/downloader/segmentation.py index 07307c367..0acf09d9b 100644 --- a/src/allmydata/immutable/downloader/segmentation.py +++ b/src/allmydata/immutable/downloader/segmentation.py @@ -1,7 +1,7 @@ import time now = time.time -from zope.interface import implements +from zope.interface import implementer from twisted.internet import defer from twisted.internet.interfaces import IPushProducer from foolscap.api import eventually @@ -11,14 +11,14 @@ from allmydata.interfaces import DownloadStopped from common import BadSegmentNumberError, WrongSegmentError -class Segmentation: +@implementer(IPushProducer) +class Segmentation(object): """I am responsible for a single offset+size read of the file. I handle segmentation: I figure out which segments are necessary, request them (from my CiphertextDownloader) in order, and trim the segments down to match the offset+size span. I use the Producer/Consumer interface to only request one segment at a time. """ - implements(IPushProducer) def __init__(self, node, offset, size, consumer, read_ev, logparent=None): self._node = node self._hungry = True diff --git a/src/allmydata/immutable/downloader/status.py b/src/allmydata/immutable/downloader/status.py index e9174b6bc..74e5fc7b6 100644 --- a/src/allmydata/immutable/downloader/status.py +++ b/src/allmydata/immutable/downloader/status.py @@ -1,6 +1,6 @@ import itertools -from zope.interface import implements +from zope.interface import implementer from allmydata.interfaces import IDownloadStatus class ReadEvent: @@ -64,10 +64,10 @@ class BlockRequestEvent: self._ds.update_last_timestamp(when) -class DownloadStatus: +@implementer(IDownloadStatus) +class DownloadStatus(object): # There is one DownloadStatus for each CiphertextFileNode. The status # object will keep track of all activity for that node. - implements(IDownloadStatus) statusid_counter = itertools.count(0) def __init__(self, storage_index, size): diff --git a/src/allmydata/immutable/encode.py b/src/allmydata/immutable/encode.py index 8cc32d35a..efc7ac3f6 100644 --- a/src/allmydata/immutable/encode.py +++ b/src/allmydata/immutable/encode.py @@ -1,7 +1,7 @@ # -*- test-case-name: allmydata.test.test_encode -*- import time -from zope.interface import implements +from zope.interface import implementer from twisted.internet import defer from foolscap.api import fireEventually from allmydata import uri @@ -71,8 +71,8 @@ GiB=1024*MiB TiB=1024*GiB PiB=1024*TiB +@implementer(IEncoder) class Encoder(object): - implements(IEncoder) def __init__(self, log_parent=None, upload_status=None, progress=None): object.__init__(self) diff --git a/src/allmydata/immutable/filenode.py b/src/allmydata/immutable/filenode.py index e00e69c98..ff15843a8 100644 --- a/src/allmydata/immutable/filenode.py +++ b/src/allmydata/immutable/filenode.py @@ -2,7 +2,7 @@ import binascii from time import time as now -from zope.interface import implements +from zope.interface import implementer from twisted.internet import defer from allmydata import uri @@ -182,12 +182,12 @@ class CiphertextFileNode: monitor=monitor) return v.start() -class DecryptingConsumer: +@implementer(IConsumer, IDownloadStatusHandlingConsumer) +class DecryptingConsumer(object): """I sit between a CiphertextDownloader (which acts as a Producer) and the real Consumer, decrypting everything that passes by. The real Consumer sees the real Producer, but the Producer sees us instead of the real consumer.""" - implements(IConsumer, IDownloadStatusHandlingConsumer) def __init__(self, consumer, readkey, offset): self._consumer = consumer @@ -227,8 +227,8 @@ class DecryptingConsumer: self._download_status.add_misc_event("AES", started, now()) self._consumer.write(plaintext) -class ImmutableFileNode: - implements(IImmutableFileNode) +@implementer(IImmutableFileNode) +class ImmutableFileNode(object): # I wrap a CiphertextFileNode with a decryption key def __init__(self, filecap, storage_broker, secret_holder, terminator, diff --git a/src/allmydata/immutable/layout.py b/src/allmydata/immutable/layout.py index 8d9a8f035..c634702ab 100644 --- a/src/allmydata/immutable/layout.py +++ b/src/allmydata/immutable/layout.py @@ -1,5 +1,5 @@ import struct -from zope.interface import implements +from zope.interface import implementer from twisted.internet import defer from allmydata.interfaces import IStorageBucketWriter, IStorageBucketReader, \ FileTooLargeError, HASH_SIZE @@ -94,8 +94,8 @@ def make_write_bucket_proxy(rref, server, num_share_hashes, uri_extension_size_max) return wbp -class WriteBucketProxy: - implements(IStorageBucketWriter) +@implementer(IStorageBucketWriter) +class WriteBucketProxy(object): fieldsize = 4 fieldstruct = ">L" @@ -289,8 +289,8 @@ class WriteBucketProxy_v2(WriteBucketProxy): assert len(offset_data) == 0x44, len(offset_data) self._offset_data = offset_data -class ReadBucketProxy: - implements(IStorageBucketReader) +@implementer(IStorageBucketReader) +class ReadBucketProxy(object): MAX_UEB_SIZE = 2000 # actual size is closer to 419, but varies by a few bytes diff --git a/src/allmydata/immutable/literal.py b/src/allmydata/immutable/literal.py index b18dccb6c..d00aedec0 100644 --- a/src/allmydata/immutable/literal.py +++ b/src/allmydata/immutable/literal.py @@ -1,13 +1,13 @@ from cStringIO import StringIO -from zope.interface import implements +from zope.interface import implementer from twisted.internet import defer from twisted.internet.interfaces import IPushProducer from twisted.protocols import basic from allmydata.interfaces import IImmutableFileNode, ICheckable from allmydata.uri import LiteralFileURI +@implementer(IImmutableFileNode, ICheckable) class _ImmutableFileNodeBase(object): - implements(IImmutableFileNode, ICheckable) def get_write_uri(self): return None @@ -44,8 +44,8 @@ class _ImmutableFileNodeBase(object): return True -class LiteralProducer: - implements(IPushProducer) +@implementer(IPushProducer) +class LiteralProducer(object): def pauseProducing(self): pass diff --git a/src/allmydata/immutable/offloaded.py b/src/allmydata/immutable/offloaded.py index 937dceae8..5b9e5803b 100644 --- a/src/allmydata/immutable/offloaded.py +++ b/src/allmydata/immutable/offloaded.py @@ -1,6 +1,6 @@ import os, stat, time, weakref -from zope.interface import implements +from zope.interface import implementer from twisted.internet import defer from foolscap.api import Referenceable, DeadReferenceError, eventually import allmydata # for __full_version__ @@ -123,12 +123,12 @@ class CHKCheckerAndUEBFetcher: return False +@implementer(interfaces.RICHKUploadHelper) class CHKUploadHelper(Referenceable, upload.CHKUploader): """I am the helper-server -side counterpart to AssistedUploader. I handle peer selection, encoding, and share pushing. I read ciphertext from the remote AssistedUploader. """ - implements(interfaces.RICHKUploadHelper) VERSION = { "http://allmydata.org/tahoe/protocols/helper/chk-upload/v1" : { }, "application-version": str(allmydata.__full_version__), @@ -447,8 +447,8 @@ class CHKCiphertextFetcher(AskUntilSuccessMixin): return self._ciphertext_fetched +@implementer(interfaces.IEncryptedUploadable) class LocalCiphertextReader(AskUntilSuccessMixin): - implements(interfaces.IEncryptedUploadable) def __init__(self, upload_helper, storage_index, encoding_file): self._readers = [] @@ -484,8 +484,8 @@ class LocalCiphertextReader(AskUntilSuccessMixin): +@implementer(interfaces.RIHelper, interfaces.IStatsProducer) class Helper(Referenceable): - implements(interfaces.RIHelper, interfaces.IStatsProducer) # this is the non-distributed version. When we need to have multiple # helpers, this object will become the HelperCoordinator, and will query # the farm of Helpers to see if anyone has the storage_index of interest, diff --git a/src/allmydata/immutable/repairer.py b/src/allmydata/immutable/repairer.py index 320dbd8c4..97fc9df1b 100644 --- a/src/allmydata/immutable/repairer.py +++ b/src/allmydata/immutable/repairer.py @@ -1,4 +1,4 @@ -from zope.interface import implements +from zope.interface import implementer from twisted.internet import defer from allmydata.storage.server import si_b2a from allmydata.util import log, consumer @@ -7,8 +7,8 @@ from allmydata.interfaces import IEncryptedUploadable from allmydata.immutable import upload +@implementer(IEncryptedUploadable) class Repairer(log.PrefixingLogMixin): - implements(IEncryptedUploadable) """I generate any shares which were not available and upload them to servers. diff --git a/src/allmydata/immutable/upload.py b/src/allmydata/immutable/upload.py index d7a7365a6..6f5224942 100644 --- a/src/allmydata/immutable/upload.py +++ b/src/allmydata/immutable/upload.py @@ -1,5 +1,5 @@ import os, time, weakref, itertools -from zope.interface import implements +from zope.interface import implementer from twisted.python import failure from twisted.internet import defer from twisted.application import service @@ -57,8 +57,8 @@ class HelperUploadResults(Copyable, RemoteCopy): self.preexisting_shares = None # count of shares already present self.pushed_shares = None # count of shares we pushed -class UploadResults: - implements(IUploadResults) +@implementer(IUploadResults) +class UploadResults(object): def __init__(self, file_size, ciphertext_fetched, # how much the helper fetched @@ -617,10 +617,10 @@ class Tahoe2ServerSelector(log.PrefixingLogMixin): raise UploadUnhappinessError(msg) -class EncryptAnUploadable: +@implementer(IEncryptedUploadable) +class EncryptAnUploadable(object): """This is a wrapper that takes an IUploadable and provides IEncryptedUploadable.""" - implements(IEncryptedUploadable) CHUNKSIZE = 50*1024 def __init__(self, original, log_parent=None, progress=None): @@ -844,8 +844,8 @@ class EncryptAnUploadable: def close(self): return self.original.close() -class UploadStatus: - implements(IUploadStatus) +@implementer(IUploadStatus) +class UploadStatus(object): statusid_counter = itertools.count(0) def __init__(self): @@ -1129,8 +1129,8 @@ class LiteralUploader: def get_upload_status(self): return self._status +@implementer(RIEncryptedUploadable) class RemoteEncryptedUploadable(Referenceable): - implements(RIEncryptedUploadable) def __init__(self, encrypted_uploadable, upload_status): self._eu = IEncryptedUploadable(encrypted_uploadable) @@ -1407,8 +1407,8 @@ class BaseUploadable: d.addCallback(_got_size) return d +@implementer(IUploadable) class FileHandle(BaseUploadable): - implements(IUploadable) def __init__(self, filehandle, convergence): """ @@ -1510,11 +1510,11 @@ class Data(FileHandle): assert convergence is None or isinstance(convergence, str), (convergence, type(convergence)) FileHandle.__init__(self, StringIO(data), convergence=convergence) +@implementer(IUploader) class Uploader(service.MultiService, log.PrefixingLogMixin): """I am a service that allows file uploading. I am a service-child of the Client. """ - implements(IUploader) name = "uploader" URI_LIT_SIZE_THRESHOLD = 55 diff --git a/src/allmydata/introducer/client.py b/src/allmydata/introducer/client.py index cc3b3dadb..a22c46103 100644 --- a/src/allmydata/introducer/client.py +++ b/src/allmydata/introducer/client.py @@ -1,6 +1,6 @@ import time -from zope.interface import implements +from zope.interface import implementer from twisted.application import service from foolscap.api import Referenceable, eventually from allmydata.interfaces import InsufficientVersionError @@ -18,8 +18,8 @@ class InvalidCacheError(Exception): V2 = "http://allmydata.org/tahoe/protocols/introducer/v2" +@implementer(RIIntroducerSubscriberClient_v2, IIntroducerClient) class IntroducerClient(service.Service, Referenceable): - implements(RIIntroducerSubscriberClient_v2, IIntroducerClient) def __init__(self, tub, introducer_furl, nickname, my_version, oldest_supported, diff --git a/src/allmydata/introducer/server.py b/src/allmydata/introducer/server.py index e999fc098..2ef68d37d 100644 --- a/src/allmydata/introducer/server.py +++ b/src/allmydata/introducer/server.py @@ -1,6 +1,6 @@ import time, os.path, textwrap -from zope.interface import implements +from zope.interface import implementer from twisted.application import service from foolscap.api import Referenceable import allmydata @@ -69,8 +69,8 @@ class IntroducerNode(node.Node): ws = IntroducerWebishServer(self, webport, nodeurl_path, staticdir) self.add_service(ws) +@implementer(RIIntroducerPublisherAndSubscriberService_v2) class IntroducerService(service.MultiService, Referenceable): - implements(RIIntroducerPublisherAndSubscriberService_v2) name = "introducer" # v1 is the original protocol, added in 1.0 (but only advertised starting # in 1.3), removed in 1.12. v2 is the new signed protocol, added in 1.10 diff --git a/src/allmydata/monitor.py b/src/allmydata/monitor.py index d17833118..98c463014 100644 --- a/src/allmydata/monitor.py +++ b/src/allmydata/monitor.py @@ -1,5 +1,5 @@ -from zope.interface import Interface, implements +from zope.interface import Interface, implementer from allmydata.util import observer class IMonitor(Interface): @@ -80,8 +80,8 @@ class OperationCancelledError(Exception): pass -class Monitor: - implements(IMonitor) +@implementer(IMonitor) +class Monitor(object): def __init__(self): self.cancelled = False diff --git a/src/allmydata/mutable/filenode.py b/src/allmydata/mutable/filenode.py index 084c28f66..bcb62eb0d 100644 --- a/src/allmydata/mutable/filenode.py +++ b/src/allmydata/mutable/filenode.py @@ -1,7 +1,7 @@ import random -from zope.interface import implements +from zope.interface import implementer from twisted.internet import defer, reactor from foolscap.api import eventually from allmydata.interfaces import IMutableFileNode, ICheckable, ICheckResults, \ @@ -48,8 +48,8 @@ class BackoffAgent: # use nodemaker.create_mutable_file() to make one of these -class MutableFileNode: - implements(IMutableFileNode, ICheckable) +@implementer(IMutableFileNode, ICheckable) +class MutableFileNode(object): def __init__(self, storage_broker, secret_holder, default_encoding_parameters, history): @@ -695,7 +695,8 @@ class MutableFileNode: return res -class MutableFileVersion: +@implementer(IMutableFileVersion, IWriteable) +class MutableFileVersion(object): """ I represent a specific version (most likely the best version) of a mutable file. @@ -708,7 +709,6 @@ class MutableFileVersion: overwrite or modify the contents of the mutable file that I reference. """ - implements(IMutableFileVersion, IWriteable) def __init__(self, node, diff --git a/src/allmydata/mutable/layout.py b/src/allmydata/mutable/layout.py index 40fd9331b..dbfde5f68 100644 --- a/src/allmydata/mutable/layout.py +++ b/src/allmydata/mutable/layout.py @@ -7,7 +7,7 @@ from allmydata.interfaces import HASH_SIZE, SALT_SIZE, SDMF_VERSION, \ from allmydata.util import mathutil from twisted.python import failure from twisted.internet import defer -from zope.interface import implements +from zope.interface import implementer # These strings describe the format of the packed structs they help process. @@ -218,8 +218,8 @@ def pack_prefix(seqnum, root_hash, IV, return prefix -class SDMFSlotWriteProxy: - implements(IMutableSlotWriter) +@implementer(IMutableSlotWriter) +class SDMFSlotWriteProxy(object): """ I represent a remote write slot for an SDMF mutable file. I build a share in memory, and then write it in one piece to the remote @@ -566,8 +566,8 @@ VERIFICATION_KEY_SIZE = 292 # bound. Each node requires 2 bytes of node-number plus 32 bytes of hash. SHARE_HASH_CHAIN_SIZE = (2+HASH_SIZE)*mathutil.log_ceil(256, 2) -class MDMFSlotWriteProxy: - implements(IMutableSlotWriter) +@implementer(IMutableSlotWriter) +class MDMFSlotWriteProxy(object): """ I represent a remote write slot for an MDMF mutable file. diff --git a/src/allmydata/mutable/publish.py b/src/allmydata/mutable/publish.py index 88e95e6c3..4dbbd832a 100644 --- a/src/allmydata/mutable/publish.py +++ b/src/allmydata/mutable/publish.py @@ -3,7 +3,7 @@ import os, time from StringIO import StringIO from itertools import count -from zope.interface import implements +from zope.interface import implementer from twisted.internet import defer from twisted.python import failure from allmydata.interfaces import IPublishStatus, SDMF_VERSION, MDMF_VERSION, \ @@ -30,8 +30,8 @@ PUSHING_BLOCKS_STATE = 0 PUSHING_EVERYTHING_ELSE_STATE = 1 DONE_STATE = 2 -class PublishStatus: - implements(IPublishStatus) +@implementer(IPublishStatus) +class PublishStatus(object): statusid_counter = count(0) def __init__(self): self.timings = {} @@ -1199,12 +1199,12 @@ class Publish: eventually(self.done_deferred.callback, f) -class MutableFileHandle: +@implementer(IMutableUploadable) +class MutableFileHandle(object): """ I am a mutable uploadable built around a filehandle-like object, usually either a StringIO instance or a handle to an actual file. """ - implements(IMutableUploadable) def __init__(self, filehandle): # The filehandle is defined as a generally file-like object that @@ -1283,13 +1283,13 @@ class MutableData(MutableFileHandle): MutableFileHandle.__init__(self, StringIO(s)) -class TransformingUploadable: +@implementer(IMutableUploadable) +class TransformingUploadable(object): """ I am an IMutableUploadable that wraps another IMutableUploadable, and some segments that are already on the grid. When I am called to read, I handle merging of boundary segments. """ - implements(IMutableUploadable) def __init__(self, data, offset, segment_size, start, end): diff --git a/src/allmydata/mutable/repairer.py b/src/allmydata/mutable/repairer.py index 1ec80ce20..2f9377fc7 100644 --- a/src/allmydata/mutable/repairer.py +++ b/src/allmydata/mutable/repairer.py @@ -1,13 +1,13 @@ -from zope.interface import implements +from zope.interface import implementer from twisted.internet import defer from allmydata.interfaces import IRepairResults, ICheckResults from allmydata.mutable.publish import MutableData from allmydata.mutable.common import MODE_REPAIR from allmydata.mutable.servermap import ServerMap, ServermapUpdater -class RepairResults: - implements(IRepairResults) +@implementer(IRepairResults) +class RepairResults(object): def __init__(self, smap): self.servermap = smap diff --git a/src/allmydata/mutable/retrieve.py b/src/allmydata/mutable/retrieve.py index 9d3038aed..03dc6f888 100644 --- a/src/allmydata/mutable/retrieve.py +++ b/src/allmydata/mutable/retrieve.py @@ -1,7 +1,7 @@ import time from itertools import count -from zope.interface import implements +from zope.interface import implementer from twisted.internet import defer from twisted.python import failure from twisted.internet.interfaces import IPushProducer, IConsumer @@ -22,8 +22,8 @@ from allmydata.mutable.common import CorruptShareError, BadShareError, \ UncoordinatedWriteError from allmydata.mutable.layout import MDMFSlotReadProxy -class RetrieveStatus: - implements(IRetrieveStatus) +@implementer(IRetrieveStatus) +class RetrieveStatus(object): statusid_counter = count(0) def __init__(self): self.timings = {} @@ -92,13 +92,13 @@ class RetrieveStatus: class Marker: pass -class Retrieve: +@implementer(IPushProducer) +class Retrieve(object): # this class is currently single-use. Eventually (in MDMF) we will make # it multi-use, in which case you can call download(range) multiple # times, and each will have a separate response chain. However the # Retrieve object will remain tied to a specific version of the file, and # will use a single ServerMap instance. - implements(IPushProducer) def __init__(self, filenode, storage_broker, servermap, verinfo, fetch_privkey=False, verify=False): diff --git a/src/allmydata/mutable/servermap.py b/src/allmydata/mutable/servermap.py index 84ac1b5a5..273e0f0de 100644 --- a/src/allmydata/mutable/servermap.py +++ b/src/allmydata/mutable/servermap.py @@ -1,6 +1,6 @@ import sys, time, copy -from zope.interface import implements +from zope.interface import implementer from itertools import count from twisted.internet import defer from twisted.python import failure @@ -16,8 +16,8 @@ from allmydata.mutable.common import MODE_CHECK, MODE_ANYTHING, MODE_WRITE, \ MODE_READ, MODE_REPAIR, CorruptShareError from allmydata.mutable.layout import SIGNED_PREFIX_LENGTH, MDMFSlotReadProxy -class UpdateStatus: - implements(IServermapUpdaterStatus) +@implementer(IServermapUpdaterStatus) +class UpdateStatus(object): statusid_counter = count(0) def __init__(self): self.timings = {} diff --git a/src/allmydata/nodemaker.py b/src/allmydata/nodemaker.py index d0c002461..89fa34a6c 100644 --- a/src/allmydata/nodemaker.py +++ b/src/allmydata/nodemaker.py @@ -1,5 +1,5 @@ import weakref -from zope.interface import implements +from zope.interface import implementer from allmydata.util.assertutil import precondition from allmydata.interfaces import INodeMaker from allmydata.immutable.literal import LiteralFileNode @@ -13,8 +13,8 @@ from allmydata.blacklist import ProhibitedNode from allmydata import uri -class NodeMaker: - implements(INodeMaker) +@implementer(INodeMaker) +class NodeMaker(object): def __init__(self, storage_broker, secret_holder, history, uploader, terminator, diff --git a/src/allmydata/stats.py b/src/allmydata/stats.py index e107c5042..19f10eb38 100644 --- a/src/allmydata/stats.py +++ b/src/allmydata/stats.py @@ -8,15 +8,15 @@ from collections import deque from twisted.internet import reactor from twisted.application import service from twisted.application.internet import TimerService -from zope.interface import implements +from zope.interface import implementer from foolscap.api import eventually, DeadReferenceError, Referenceable, Tub from allmydata.util import log from allmydata.util.encodingutil import quote_local_unicode_path from allmydata.interfaces import RIStatsProvider, RIStatsGatherer, IStatsProducer +@implementer(IStatsProducer) class LoadMonitor(service.MultiService): - implements(IStatsProducer) loop_interval = 1 num_samples = 60 @@ -69,8 +69,8 @@ class LoadMonitor(service.MultiService): return { 'load_monitor.avg_load': avg, 'load_monitor.max_load': m_x, } +@implementer(IStatsProducer) class CPUUsageMonitor(service.MultiService): - implements(IStatsProducer) HISTORY_LENGTH = 15 POLL_INTERVAL = 60 @@ -122,8 +122,8 @@ class CPUUsageMonitor(service.MultiService): return s +@implementer(RIStatsProvider) class StatsProvider(Referenceable, service.MultiService): - implements(RIStatsProvider) def __init__(self, node, gatherer_furl): service.MultiService.__init__(self) @@ -175,8 +175,8 @@ class StatsProvider(Referenceable, service.MultiService): gatherer.callRemoteOnly('provide', self, nickname or '') +@implementer(RIStatsGatherer) class StatsGatherer(Referenceable, service.MultiService): - implements(RIStatsGatherer) poll_interval = 60 diff --git a/src/allmydata/storage/immutable.py b/src/allmydata/storage/immutable.py index 280dff397..fd846c63f 100644 --- a/src/allmydata/storage/immutable.py +++ b/src/allmydata/storage/immutable.py @@ -2,7 +2,7 @@ import os, stat, struct, time from foolscap.api import Referenceable -from zope.interface import implements +from zope.interface import implementer from allmydata.interfaces import RIBucketWriter, RIBucketReader from allmydata.util import base32, fileutil, log from allmydata.util.assertutil import precondition @@ -194,8 +194,8 @@ class ShareFile: return space_freed +@implementer(RIBucketWriter) class BucketWriter(Referenceable): - implements(RIBucketWriter) def __init__(self, ss, incominghome, finalhome, max_size, lease_info, canary): self.ss = ss @@ -293,8 +293,8 @@ class BucketWriter(Referenceable): self.ss.bucket_writer_closed(self, 0) +@implementer(RIBucketReader) class BucketReader(Referenceable): - implements(RIBucketReader) def __init__(self, ss, sharefname, storage_index=None, shnum=None): self.ss = ss diff --git a/src/allmydata/storage/server.py b/src/allmydata/storage/server.py index 1de4b22fe..ac5567a30 100644 --- a/src/allmydata/storage/server.py +++ b/src/allmydata/storage/server.py @@ -3,7 +3,7 @@ import os, re, weakref, struct, time from foolscap.api import Referenceable from twisted.application import service -from zope.interface import implements +from zope.interface import implementer from allmydata.interfaces import RIStorageServer, IStatsProducer from allmydata.util import fileutil, idlib, log, time_format import allmydata # for __full_version__ @@ -33,8 +33,8 @@ NUM_RE=re.compile("^[0-9]+$") +@implementer(RIStorageServer, IStatsProducer) class StorageServer(service.MultiService, Referenceable): - implements(RIStorageServer, IStatsProducer) name = 'storage' LeaseCheckerClass = LeaseCheckingCrawler diff --git a/src/allmydata/storage_client.py b/src/allmydata/storage_client.py index 8c3854225..65c65f535 100644 --- a/src/allmydata/storage_client.py +++ b/src/allmydata/storage_client.py @@ -30,7 +30,7 @@ the foolscap-based server implemented in src/allmydata/storage/*.py . import re, time, hashlib -from zope.interface import implements +from zope.interface import implementer from twisted.internet import defer from twisted.application import service @@ -59,8 +59,8 @@ from allmydata.util.hashutil import permute_server_hash # don't pass signatures: only pass validated blessed-objects +@implementer(IStorageBroker) class StorageFarmBroker(service.MultiService): - implements(IStorageBroker) """I live on the client, and know about storage servers. For each server that is participating in a grid, I either maintain a connection to it or remember enough information to establish a connection to it on demand. @@ -235,8 +235,8 @@ class StorageFarmBroker(service.MultiService): return s return StubServer(serverid) -class StubServer: - implements(IDisplayableServer) +@implementer(IDisplayableServer) +class StubServer(object): def __init__(self, serverid): self.serverid = serverid # binary tubid def get_serverid(self): @@ -248,6 +248,7 @@ class StubServer: def get_nickname(self): return "?" +@implementer(IServer) class NativeStorageServer(service.MultiService): """I hold information about a storage server that we want to connect to. If we are connected, I hold the RemoteReference, their host address, and @@ -263,7 +264,6 @@ class NativeStorageServer(service.MultiService): @ivar rref: the RemoteReference, if connected, otherwise None @ivar remote_host: the IAddress, if connected, otherwise None """ - implements(IServer) VERSION_DEFAULTS = { "http://allmydata.org/tahoe/protocols/storage/v1" : diff --git a/src/allmydata/test/bench_dirnode.py b/src/allmydata/test/bench_dirnode.py index aecd06ce7..8626faf07 100644 --- a/src/allmydata/test/bench_dirnode.py +++ b/src/allmydata/test/bench_dirnode.py @@ -2,14 +2,14 @@ import hotshot.stats, os, random, sys from pyutil import benchutil, randutil # http://tahoe-lafs.org/trac/pyutil -from zope.interface import implements +from zope.interface import implementer from allmydata import dirnode, uri from allmydata.interfaces import IFileNode from allmydata.mutable.filenode import MutableFileNode from allmydata.immutable.filenode import ImmutableFileNode -class ContainerNode: - implements(IFileNode) +@implementer(IFileNode) +class ContainerNode(object): # dirnodes sit on top of a "container" filenode, from which it extracts a # writekey def __init__(self): diff --git a/src/allmydata/test/common.py b/src/allmydata/test/common.py index 1979023fe..3ad3ef3d1 100644 --- a/src/allmydata/test/common.py +++ b/src/allmydata/test/common.py @@ -1,5 +1,5 @@ import os, random, struct -from zope.interface import implements +from zope.interface import implementer from twisted.internet import defer from twisted.internet.interfaces import IPullProducer from twisted.python import failure @@ -24,15 +24,15 @@ from allmydata.immutable.upload import Uploader TEST_RSA_KEY_SIZE = 522 -class DummyProducer: - implements(IPullProducer) +@implementer(IPullProducer) +class DummyProducer(object): def resumeProducing(self): pass +@implementer(IImmutableFileNode) class FakeCHKFileNode: """I provide IImmutableFileNode, but all of my data is stored in a class-level dictionary.""" - implements(IImmutableFileNode) def __init__(self, filecap, all_contents): precondition(isinstance(filecap, (uri.CHKFileURI, uri.LiteralFileURI)), filecap) @@ -167,11 +167,11 @@ def create_chk_filenode(contents, all_contents): return n +@implementer(IMutableFileNode, ICheckable) class FakeMutableFileNode: """I provide IMutableFileNode, but all of my data is stored in a class-level dictionary.""" - implements(IMutableFileNode, ICheckable) MUTABLE_SIZELIMIT = 10000 def __init__(self, storage_broker, secret_holder, diff --git a/src/allmydata/test/no_network.py b/src/allmydata/test/no_network.py index 56ccfa5a9..55bd7cdb8 100644 --- a/src/allmydata/test/no_network.py +++ b/src/allmydata/test/no_network.py @@ -14,7 +14,7 @@ # control.furl . import os -from zope.interface import implements +from zope.interface import implementer from twisted.application import service from twisted.internet import defer, reactor from twisted.python.failure import Failure @@ -130,8 +130,8 @@ def wrap_storage_server(original): wrapper.version = original.remote_get_version() return wrapper -class NoNetworkServer: - implements(IServer) +@implementer(IServer) +class NoNetworkServer(object): def __init__(self, serverid, rref): self.serverid = serverid self.rref = rref @@ -164,8 +164,8 @@ class NoNetworkServer: def get_version(self): return self.rref.version -class NoNetworkStorageBroker: - implements(IStorageBroker) +@implementer(IStorageBroker) +class NoNetworkStorageBroker(object): def get_servers_for_psi(self, peer_selection_index): def _permuted(server): seed = server.get_permutation_seed() diff --git a/src/allmydata/test/test_dirnode.py b/src/allmydata/test/test_dirnode.py index 69def6406..ef155f8bb 100644 --- a/src/allmydata/test/test_dirnode.py +++ b/src/allmydata/test/test_dirnode.py @@ -2,7 +2,7 @@ import time import unicodedata -from zope.interface import implements +from zope.interface import implementer from twisted.trial import unittest from twisted.internet import defer from twisted.internet.interfaces import IConsumer @@ -27,8 +27,8 @@ from allmydata.nodemaker import NodeMaker from base64 import b32decode import allmydata.test.common_util as testutil -class MemAccum: - implements(IConsumer) +@implementer(IConsumer) +class MemAccum(object): def registerProducer(self, producer, streaming): self.producer = producer self.producer.resumeProducing() @@ -1489,8 +1489,8 @@ class Packing(testutil.ReallyEqualMixin, unittest.TestCase): dirnode.pack_children, kids, fn.get_writekey(), deep_immutable=True) -class FakeMutableFile: - implements(IMutableFileNode) +@implementer(IMutableFileNode) +class FakeMutableFile(object): counter = 0 def __init__(self, initial_contents=""): data = self._get_initial_contents(initial_contents) diff --git a/src/allmydata/test/test_encode.py b/src/allmydata/test/test_encode.py index c908e9749..4b3bb7a0c 100644 --- a/src/allmydata/test/test_encode.py +++ b/src/allmydata/test/test_encode.py @@ -1,4 +1,4 @@ -from zope.interface import implements +from zope.interface import implementer from twisted.trial import unittest from twisted.internet import defer from twisted.python.failure import Failure @@ -17,8 +17,8 @@ class LostPeerError(Exception): def flip_bit(good): # flips the last bit return good[:-1] + chr(ord(good[-1]) ^ 0x01) -class FakeBucketReaderWriterProxy: - implements(IStorageBucketWriter, IStorageBucketReader) +@implementer(IStorageBucketWriter, IStorageBucketReader) +class FakeBucketReaderWriterProxy(object): # these are used for both reading and writing def __init__(self, mode="good", peerid="peer"): self.mode = mode diff --git a/src/allmydata/unknown.py b/src/allmydata/unknown.py index a8b108eea..6c970e484 100644 --- a/src/allmydata/unknown.py +++ b/src/allmydata/unknown.py @@ -1,5 +1,5 @@ -from zope.interface import implements +from zope.interface import implementer from twisted.internet import defer from allmydata.interfaces import IFilesystemNode, MustNotBeUnknownRWError, \ MustBeDeepImmutableError @@ -26,8 +26,8 @@ def strip_prefix_for_ro(ro_uri, deep_immutable): else: return ro_uri -class UnknownNode: - implements(IFilesystemNode) +@implementer(IFilesystemNode) +class UnknownNode(object): def __init__(self, given_rw_uri, given_ro_uri, deep_immutable=False, name=u""): diff --git a/src/allmydata/uri.py b/src/allmydata/uri.py index 4d6db4829..f4f1b1542 100644 --- a/src/allmydata/uri.py +++ b/src/allmydata/uri.py @@ -1,7 +1,7 @@ import re -from zope.interface import implements +from zope.interface import implementer from twisted.python.components import registerAdapter from allmydata.storage.server import si_a2b, si_b2a @@ -30,7 +30,7 @@ BASE32STR_256bits = '(%s{51}%s)' % (base32.BASE32CHAR, base32.BASE32CHAR_1bits) NUMBER='([0-9]+)' -class _BaseURI: +class _BaseURI(object): def __hash__(self): return self.to_string().__hash__() @@ -50,8 +50,8 @@ class _BaseURI: return self.storage_index +@implementer(IURI, IImmutableFileURI) class CHKFileURI(_BaseURI): - implements(IURI, IImmutableFileURI) BASE_STRING='URI:CHK:' STRING_RE=re.compile('^URI:CHK:'+BASE32STR_128bits+':'+ @@ -109,8 +109,8 @@ class CHKFileURI(_BaseURI): size=self.size) +@implementer(IVerifierURI) class CHKFileVerifierURI(_BaseURI): - implements(IVerifierURI) BASE_STRING='URI:CHK-Verifier:' STRING_RE=re.compile('^URI:CHK-Verifier:'+BASE32STR_128bits+':'+ @@ -158,8 +158,8 @@ class CHKFileVerifierURI(_BaseURI): return self +@implementer(IURI, IImmutableFileURI) class LiteralFileURI(_BaseURI): - implements(IURI, IImmutableFileURI) BASE_STRING='URI:LIT:' STRING_RE=re.compile('^URI:LIT:'+base32.BASE32STR_anybytes+'$') @@ -199,8 +199,8 @@ class LiteralFileURI(_BaseURI): return len(self.data) +@implementer(IURI, IMutableFileURI) class WriteableSSKFileURI(_BaseURI): - implements(IURI, IMutableFileURI) BASE_STRING='URI:SSK:' STRING_RE=re.compile('^'+BASE_STRING+BASE32STR_128bits+':'+ @@ -248,8 +248,8 @@ class WriteableSSKFileURI(_BaseURI): return SSKVerifierURI(self.storage_index, self.fingerprint) +@implementer(IURI, IMutableFileURI) class ReadonlySSKFileURI(_BaseURI): - implements(IURI, IMutableFileURI) BASE_STRING='URI:SSK-RO:' STRING_RE=re.compile('^URI:SSK-RO:'+BASE32STR_128bits+':'+BASE32STR_256bits+'$') @@ -295,8 +295,8 @@ class ReadonlySSKFileURI(_BaseURI): return SSKVerifierURI(self.storage_index, self.fingerprint) +@implementer(IVerifierURI) class SSKVerifierURI(_BaseURI): - implements(IVerifierURI) BASE_STRING='URI:SSK-Verifier:' STRING_RE=re.compile('^'+BASE_STRING+BASE32STR_128bits+':'+BASE32STR_256bits+'$') @@ -332,8 +332,8 @@ class SSKVerifierURI(_BaseURI): return self +@implementer(IURI, IMutableFileURI) class WriteableMDMFFileURI(_BaseURI): - implements(IURI, IMutableFileURI) BASE_STRING='URI:MDMF:' STRING_RE=re.compile('^'+BASE_STRING+BASE32STR_128bits+':'+BASE32STR_256bits+'(:|$)') @@ -381,8 +381,8 @@ class WriteableMDMFFileURI(_BaseURI): return MDMFVerifierURI(self.storage_index, self.fingerprint) +@implementer(IURI, IMutableFileURI) class ReadonlyMDMFFileURI(_BaseURI): - implements(IURI, IMutableFileURI) BASE_STRING='URI:MDMF-RO:' STRING_RE=re.compile('^' +BASE_STRING+BASE32STR_128bits+':'+BASE32STR_256bits+'(:|$)') @@ -430,8 +430,8 @@ class ReadonlyMDMFFileURI(_BaseURI): return MDMFVerifierURI(self.storage_index, self.fingerprint) +@implementer(IVerifierURI) class MDMFVerifierURI(_BaseURI): - implements(IVerifierURI) BASE_STRING='URI:MDMF-Verifier:' STRING_RE=re.compile('^'+BASE_STRING+BASE32STR_128bits+':'+BASE32STR_256bits+'(:|$)') @@ -468,8 +468,8 @@ class MDMFVerifierURI(_BaseURI): return self +@implementer(IURI, IDirnodeURI) class _DirectoryBaseURI(_BaseURI): - implements(IURI, IDirnodeURI) def __init__(self, filenode_uri=None): self._filenode_uri = filenode_uri @@ -515,8 +515,8 @@ class _DirectoryBaseURI(_BaseURI): return self._filenode_uri.get_storage_index() +@implementer(IDirectoryURI) class DirectoryURI(_DirectoryBaseURI): - implements(IDirectoryURI) BASE_STRING='URI:DIR2:' BASE_STRING_RE=re.compile('^'+BASE_STRING) @@ -534,8 +534,8 @@ class DirectoryURI(_DirectoryBaseURI): return ReadonlyDirectoryURI(self._filenode_uri.get_readonly()) +@implementer(IReadonlyDirectoryURI) class ReadonlyDirectoryURI(_DirectoryBaseURI): - implements(IReadonlyDirectoryURI) BASE_STRING='URI:DIR2-RO:' BASE_STRING_RE=re.compile('^'+BASE_STRING) @@ -590,8 +590,8 @@ class LiteralDirectoryURI(_ImmutableDirectoryBaseURI): return None +@implementer(IDirectoryURI) class MDMFDirectoryURI(_DirectoryBaseURI): - implements(IDirectoryURI) BASE_STRING='URI:DIR2-MDMF:' BASE_STRING_RE=re.compile('^'+BASE_STRING) @@ -612,8 +612,8 @@ class MDMFDirectoryURI(_DirectoryBaseURI): return MDMFDirectoryURIVerifier(self._filenode_uri.get_verify_cap()) +@implementer(IReadonlyDirectoryURI) class ReadonlyMDMFDirectoryURI(_DirectoryBaseURI): - implements(IReadonlyDirectoryURI) BASE_STRING='URI:DIR2-MDMF-RO:' BASE_STRING_RE=re.compile('^'+BASE_STRING) @@ -650,8 +650,8 @@ def wrap_dirnode_cap(filecap): raise AssertionError("cannot interpret as a directory cap: %s" % filecap.__class__) +@implementer(IVerifierURI) class MDMFDirectoryURIVerifier(_DirectoryBaseURI): - implements(IVerifierURI) BASE_STRING='URI:DIR2-MDMF-Verifier:' BASE_STRING_RE=re.compile('^'+BASE_STRING) @@ -675,8 +675,8 @@ class MDMFDirectoryURIVerifier(_DirectoryBaseURI): return self +@implementer(IVerifierURI) class DirectoryURIVerifier(_DirectoryBaseURI): - implements(IVerifierURI) BASE_STRING='URI:DIR2-Verifier:' BASE_STRING_RE=re.compile('^'+BASE_STRING) @@ -700,8 +700,8 @@ class DirectoryURIVerifier(_DirectoryBaseURI): return self +@implementer(IVerifierURI) class ImmutableDirectoryURIVerifier(DirectoryURIVerifier): - implements(IVerifierURI) BASE_STRING='URI:DIR2-CHK-Verifier:' BASE_STRING_RE=re.compile('^'+BASE_STRING) INNER_URI_CLASS=CHKFileVerifierURI diff --git a/src/allmydata/util/connection_status.py b/src/allmydata/util/connection_status.py index 2ea2b0cab..7cc2791b2 100644 --- a/src/allmydata/util/connection_status.py +++ b/src/allmydata/util/connection_status.py @@ -3,7 +3,7 @@ from zope.interface import implementer from ..interfaces import IConnectionStatus @implementer(IConnectionStatus) -class ConnectionStatus: +class ConnectionStatus(object): def __init__(self, connected, summary, non_connected_statuses, last_connection_time, last_received_time): self.connected = connected diff --git a/src/allmydata/util/consumer.py b/src/allmydata/util/consumer.py index af0a04ad7..c41115e5e 100644 --- a/src/allmydata/util/consumer.py +++ b/src/allmydata/util/consumer.py @@ -3,11 +3,11 @@ a filenode's read() method. See download_to_data() for an example of its use. """ -from zope.interface import implements +from zope.interface import implementer from twisted.internet.interfaces import IConsumer -class MemoryConsumer: - implements(IConsumer) +@implementer(IConsumer) +class MemoryConsumer(object): def __init__(self, progress=None): self.chunks = [] diff --git a/src/allmydata/web/directory.py b/src/allmydata/web/directory.py index 88d4a5068..2b6084a3b 100644 --- a/src/allmydata/web/directory.py +++ b/src/allmydata/web/directory.py @@ -2,7 +2,7 @@ import simplejson import urllib -from zope.interface import implements +from zope.interface import implementer from twisted.internet import defer from twisted.internet.interfaces import IPushProducer from twisted.python.failure import Failure @@ -1087,8 +1087,8 @@ class DeepStatsResults(rend.Page): s["finished"] = self.monitor.is_finished() return simplejson.dumps(s, indent=1) +@implementer(IPushProducer) class ManifestStreamer(dirnode.DeepStats): - implements(IPushProducer) def __init__(self, ctx, origin): dirnode.DeepStats.__init__(self, origin) @@ -1144,8 +1144,8 @@ class ManifestStreamer(dirnode.DeepStats): self.req.write(j+"\n") return "" +@implementer(IPushProducer) class DeepCheckStreamer(dirnode.DeepStats): - implements(IPushProducer) def __init__(self, ctx, origin, verify, repair, add_lease): dirnode.DeepStats.__init__(self, origin) diff --git a/src/allmydata/web/operations.py b/src/allmydata/web/operations.py index 8ee9f2027..21e43c9c4 100644 --- a/src/allmydata/web/operations.py +++ b/src/allmydata/web/operations.py @@ -1,6 +1,6 @@ import time -from zope.interface import implements +from zope.interface import implementer from nevow import rend, url, tags as T from nevow.inevow import IRequest from twisted.python.failure import Failure @@ -18,8 +18,8 @@ DAY = 24*HOUR (MONITOR, RENDERER, WHEN_ADDED) = range(3) +@implementer(IOpHandleTable) class OphandleTable(rend.Page, service.Service): - implements(IOpHandleTable) UNCOLLECTED_HANDLE_LIFETIME = 4*DAY COLLECTED_HANDLE_LIFETIME = 1*DAY